From 981c6a751679143d6fde06a5cd495bbf1061194e Mon Sep 17 00:00:00 2001 From: Karol Sobczak Date: Tue, 29 Oct 2024 14:59:05 +0100 Subject: [PATCH] Only allow whitelisted extra properties --- docs/src/main/sphinx/connector/iceberg.md | 3 ++ .../trino/plugin/iceberg/IcebergConfig.java | 16 ++++++++++ .../trino/plugin/iceberg/IcebergMetadata.java | 7 +++-- .../iceberg/IcebergMetadataFactory.java | 7 ++++- .../io/trino/plugin/iceberg/IcebergUtil.java | 30 +++++++++++-------- .../iceberg/catalog/AbstractTrinoCatalog.java | 4 +-- .../iceberg/BaseIcebergConnectorTest.java | 5 ++++ .../plugin/iceberg/TestIcebergConfig.java | 4 +++ .../iceberg/catalog/BaseTrinoCatalogTest.java | 3 +- .../catalog/glue/TestTrinoGlueCatalog.java | 3 +- .../nessie/TestTrinoNessieCatalog.java | 3 +- .../catalog/rest/TestTrinoRestCatalog.java | 3 +- .../snowflake/TestTrinoSnowflakeCatalog.java | 3 +- .../iceberg.properties | 1 + .../iceberg.properties | 1 + .../iceberg.properties | 1 + 16 files changed, 72 insertions(+), 22 deletions(-) diff --git a/docs/src/main/sphinx/connector/iceberg.md b/docs/src/main/sphinx/connector/iceberg.md index cbe3dc6b19fd34..bc29ed5884d8b3 100644 --- a/docs/src/main/sphinx/connector/iceberg.md +++ b/docs/src/main/sphinx/connector/iceberg.md @@ -216,6 +216,9 @@ implementation is used: - Enable [sorted writing](iceberg-sorted-files) to tables with a specified sort order. Equivalent session property is `sorted_writing_enabled`. - `true` +* - `iceberg.allowed-extra-properties` + - List of extra properties that are allowed to be set on Iceberg tables. + - `[]` * - `iceberg.split-manager-threads` - Number of threads to use for generating splits. - Double the number of processors on the coordinator node. diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergConfig.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergConfig.java index b7793aead74d8d..2b799314b34a31 100644 --- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergConfig.java +++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergConfig.java @@ -13,6 +13,7 @@ */ package io.trino.plugin.iceberg; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import io.airlift.configuration.Config; import io.airlift.configuration.ConfigDescription; @@ -28,6 +29,7 @@ import jakarta.validation.constraints.Min; import jakarta.validation.constraints.NotNull; +import java.util.List; import java.util.Optional; import java.util.Set; @@ -85,6 +87,7 @@ public class IcebergConfig private boolean queryPartitionFilterRequired; private Set queryPartitionFilterRequiredSchemas = ImmutableSet.of(); private int splitManagerThreads = Runtime.getRuntime().availableProcessors() * 2; + private List allowedExtraProperties = ImmutableList.of(); private boolean incrementalRefreshEnabled = true; private boolean metadataCacheEnabled = true; @@ -469,6 +472,19 @@ public IcebergConfig setSplitManagerThreads(int splitManagerThreads) return this; } + public List getAllowedExtraProperties() + { + return allowedExtraProperties; + } + + @Config("iceberg.allowed-extra-properties") + @ConfigDescription("List of extra properties that are allowed to be set on Iceberg tables") + public IcebergConfig setAllowedExtraProperties(List allowedExtraProperties) + { + this.allowedExtraProperties = ImmutableList.copyOf(allowedExtraProperties); + return this; + } + public boolean isIncrementalRefreshEnabled() { return incrementalRefreshEnabled; diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java index f04de6ff404169..26534e7c1828e3 100644 --- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java +++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java @@ -385,6 +385,7 @@ public class IcebergMetadata private final TableStatisticsWriter tableStatisticsWriter; private final Optional metastoreFactory; private final boolean addFilesProcedureEnabled; + private final Predicate allowedExtraProperties; private final Map> tableStatisticsCache = new ConcurrentHashMap<>(); @@ -399,7 +400,8 @@ public IcebergMetadata( IcebergFileSystemFactory fileSystemFactory, TableStatisticsWriter tableStatisticsWriter, Optional metastoreFactory, - boolean addFilesProcedureEnabled) + boolean addFilesProcedureEnabled, + Predicate allowedExtraProperties) { this.typeManager = requireNonNull(typeManager, "typeManager is null"); this.trinoCatalogHandle = requireNonNull(trinoCatalogHandle, "trinoCatalogHandle is null"); @@ -409,6 +411,7 @@ public IcebergMetadata( this.tableStatisticsWriter = requireNonNull(tableStatisticsWriter, "tableStatisticsWriter is null"); this.metastoreFactory = requireNonNull(metastoreFactory, "metastoreFactory is null"); this.addFilesProcedureEnabled = addFilesProcedureEnabled; + this.allowedExtraProperties = requireNonNull(allowedExtraProperties, "allowedExtraProperties is null"); } @Override @@ -1059,7 +1062,7 @@ public ConnectorOutputTableHandle beginCreateTable(ConnectorSession session, Con tableLocation = getTableLocation(tableMetadata.getProperties()) .orElseGet(() -> catalog.defaultTableLocation(session, tableMetadata.getTable())); } - transaction = newCreateTableTransaction(catalog, tableMetadata, session, replace, tableLocation); + transaction = newCreateTableTransaction(catalog, tableMetadata, session, replace, tableLocation, allowedExtraProperties); Location location = Location.of(transaction.table().location()); TrinoFileSystem fileSystem = fileSystemFactory.create(session.getIdentity(), transaction.table().io().properties()); try { diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadataFactory.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadataFactory.java index 88c66d1e490848..ac12fe02b1aaeb 100644 --- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadataFactory.java +++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadataFactory.java @@ -13,6 +13,7 @@ */ package io.trino.plugin.iceberg; +import com.google.common.collect.ImmutableSet; import com.google.inject.Inject; import io.airlift.json.JsonCodec; import io.trino.plugin.hive.metastore.HiveMetastoreFactory; @@ -23,6 +24,7 @@ import io.trino.spi.type.TypeManager; import java.util.Optional; +import java.util.function.Predicate; import static java.util.Objects.requireNonNull; @@ -36,6 +38,7 @@ public class IcebergMetadataFactory private final TableStatisticsWriter tableStatisticsWriter; private final Optional metastoreFactory; private final boolean addFilesProcedureEnabled; + private final Predicate allowedExtraProperties; @Inject public IcebergMetadataFactory( @@ -56,6 +59,7 @@ public IcebergMetadataFactory( this.tableStatisticsWriter = requireNonNull(tableStatisticsWriter, "tableStatisticsWriter is null"); this.metastoreFactory = requireNonNull(metastoreFactory, "metastoreFactory is null"); this.addFilesProcedureEnabled = config.isAddFilesProcedureEnabled(); + this.allowedExtraProperties = ImmutableSet.copyOf(requireNonNull(config.getAllowedExtraProperties(), "allowedExtraProperties is null"))::contains; } public IcebergMetadata create(ConnectorIdentity identity) @@ -68,6 +72,7 @@ public IcebergMetadata create(ConnectorIdentity identity) fileSystemFactory, tableStatisticsWriter, metastoreFactory, - addFilesProcedureEnabled); + addFilesProcedureEnabled, + allowedExtraProperties); } } diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergUtil.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergUtil.java index 54fd3b5d54cb0e..4430f167e9bf9d 100644 --- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergUtil.java +++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergUtil.java @@ -92,6 +92,7 @@ import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Predicate; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Stream; @@ -792,7 +793,7 @@ public static List viewColumnsFromSchema(TypeManager typeManager, Sc .toList(); } - public static Transaction newCreateTableTransaction(TrinoCatalog catalog, ConnectorTableMetadata tableMetadata, ConnectorSession session, boolean replace, String tableLocation) + public static Transaction newCreateTableTransaction(TrinoCatalog catalog, ConnectorTableMetadata tableMetadata, ConnectorSession session, boolean replace, String tableLocation, Predicate allowedExtraProperties) { SchemaTableName schemaTableName = tableMetadata.getTable(); Schema schema = schemaFromMetadata(tableMetadata.getColumns()); @@ -800,12 +801,12 @@ public static Transaction newCreateTableTransaction(TrinoCatalog catalog, Connec SortOrder sortOrder = parseSortFields(schema, getSortOrder(tableMetadata.getProperties())); if (replace) { - return catalog.newCreateOrReplaceTableTransaction(session, schemaTableName, schema, partitionSpec, sortOrder, tableLocation, createTableProperties(tableMetadata)); + return catalog.newCreateOrReplaceTableTransaction(session, schemaTableName, schema, partitionSpec, sortOrder, tableLocation, createTableProperties(tableMetadata, allowedExtraProperties)); } - return catalog.newCreateTableTransaction(session, schemaTableName, schema, partitionSpec, sortOrder, tableLocation, createTableProperties(tableMetadata)); + return catalog.newCreateTableTransaction(session, schemaTableName, schema, partitionSpec, sortOrder, tableLocation, createTableProperties(tableMetadata, allowedExtraProperties)); } - public static Map createTableProperties(ConnectorTableMetadata tableMetadata) + public static Map createTableProperties(ConnectorTableMetadata tableMetadata, Predicate allowedExtraProperties) { ImmutableMap.Builder propertiesBuilder = ImmutableMap.builder(); IcebergFileFormat fileFormat = IcebergTableProperties.getFileFormat(tableMetadata.getProperties()); @@ -838,14 +839,19 @@ public static Map createTableProperties(ConnectorTableMetadata t Map baseProperties = propertiesBuilder.buildOrThrow(); Map extraProperties = IcebergTableProperties.getExtraProperties(tableMetadata.getProperties()).orElseGet(ImmutableMap::of); - Set illegalExtraProperties = Sets.intersection( - ImmutableSet.builder() - .add(TABLE_COMMENT) - .addAll(baseProperties.keySet()) - .addAll(SUPPORTED_PROPERTIES) - .addAll(PROTECTED_ICEBERG_NATIVE_PROPERTIES) - .build(), - extraProperties.keySet()); + Set illegalExtraProperties = ImmutableSet.builder() + .addAll(Sets.intersection( + ImmutableSet.builder() + .add(TABLE_COMMENT) + .addAll(baseProperties.keySet()) + .addAll(SUPPORTED_PROPERTIES) + .addAll(PROTECTED_ICEBERG_NATIVE_PROPERTIES) + .build(), + extraProperties.keySet())) + .addAll(extraProperties.keySet().stream() + .filter(name -> !allowedExtraProperties.test(name)) + .collect(toImmutableSet())) + .build(); if (!illegalExtraProperties.isEmpty()) { throw new TrinoException( diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/AbstractTrinoCatalog.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/AbstractTrinoCatalog.java index 2b257e085a8ed9..4daa639b8ce5ec 100644 --- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/AbstractTrinoCatalog.java +++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/AbstractTrinoCatalog.java @@ -309,7 +309,7 @@ protected Location createMaterializedViewStorage( Schema schema = schemaFromMetadata(columns); PartitionSpec partitionSpec = parsePartitionFields(schema, getPartitioning(materializedViewProperties)); SortOrder sortOrder = parseSortFields(schema, getSortOrder(materializedViewProperties)); - Map properties = createTableProperties(new ConnectorTableMetadata(storageTableName, columns, materializedViewProperties, Optional.empty())); + Map properties = createTableProperties(new ConnectorTableMetadata(storageTableName, columns, materializedViewProperties, Optional.empty()), _ -> false); TableMetadata metadata = newTableMetadata(schema, partitionSpec, sortOrder, tableLocation, properties); @@ -347,7 +347,7 @@ protected SchemaTableName createMaterializedViewStorageTable( ConnectorTableMetadata tableMetadata = new ConnectorTableMetadata(storageTable, columns, materializedViewProperties, Optional.empty()); String tableLocation = getTableLocation(tableMetadata.getProperties()) .orElseGet(() -> defaultTableLocation(session, tableMetadata.getTable())); - Transaction transaction = IcebergUtil.newCreateTableTransaction(this, tableMetadata, session, false, tableLocation); + Transaction transaction = IcebergUtil.newCreateTableTransaction(this, tableMetadata, session, false, tableLocation, _ -> false); AppendFiles appendFiles = transaction.newAppend(); commit(appendFiles, session); transaction.commitTransaction(); diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java index c2ed48e21a3781..29dc3cbc455aa8 100644 --- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java +++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java @@ -193,6 +193,7 @@ protected IcebergQueryRunner.Builder createQueryRunnerBuilder() .put("iceberg.file-format", format.name()) // Allows testing the sorting writer flushing to the file system with smaller tables .put("iceberg.writer-sort-buffer-size", "1MB") + .put("iceberg.allowed-extra-properties", "extra.property.one,extra.property.two,extra.property.three,sorted_by") .buildOrThrow()) .setInitialTables(REQUIRED_TPCH_TABLES); } @@ -8515,6 +8516,10 @@ public void testIllegalExtraPropertyKey() assertQueryFails( "CREATE TABLE test_create_table_with_as_illegal_extra_properties WITH (extra_properties = MAP(ARRAY['comment'], ARRAY['some comment'])) AS SELECT 1 as c1", "\\QIllegal keys in extra_properties: [comment]"); + + assertQueryFails( + "CREATE TABLE test_create_table_with_as_illegal_extra_properties WITH (extra_properties = MAP(ARRAY['not_allowed_property'], ARRAY['foo'])) AS SELECT 1 as c1", + "\\QIllegal keys in extra_properties: [not_allowed_property]"); } @Override diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergConfig.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergConfig.java index 9334299ba4a164..1ec49403515e47 100644 --- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergConfig.java +++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergConfig.java @@ -13,6 +13,7 @@ */ package io.trino.plugin.iceberg; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import io.airlift.units.DataSize; @@ -71,6 +72,7 @@ public void testDefaults() .setQueryPartitionFilterRequired(false) .setQueryPartitionFilterRequiredSchemas(ImmutableSet.of()) .setSplitManagerThreads(Runtime.getRuntime().availableProcessors() * 2) + .setAllowedExtraProperties(ImmutableList.of()) .setIncrementalRefreshEnabled(true) .setMetadataCacheEnabled(true)); } @@ -106,6 +108,7 @@ public void testExplicitPropertyMappings() .put("iceberg.query-partition-filter-required", "true") .put("iceberg.query-partition-filter-required-schemas", "bronze,silver") .put("iceberg.split-manager-threads", "42") + .put("iceberg.allowed-extra-properties", "propX,propY") .put("iceberg.incremental-refresh-enabled", "false") .put("iceberg.metadata-cache.enabled", "false") .buildOrThrow(); @@ -138,6 +141,7 @@ public void testExplicitPropertyMappings() .setQueryPartitionFilterRequired(true) .setQueryPartitionFilterRequiredSchemas(ImmutableSet.of("bronze", "silver")) .setSplitManagerThreads(42) + .setAllowedExtraProperties(ImmutableList.of("propX", "propY")) .setIncrementalRefreshEnabled(false) .setMetadataCacheEnabled(false); diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/BaseTrinoCatalogTest.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/BaseTrinoCatalogTest.java index 82b927808c63ce..aca7e0d4492d66 100644 --- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/BaseTrinoCatalogTest.java +++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/BaseTrinoCatalogTest.java @@ -119,7 +119,8 @@ public void testNonLowercaseNamespace() }, new TableStatisticsWriter(new NodeVersion("test-version")), Optional.empty(), - false); + false, + _ -> false); assertThat(icebergMetadata.schemaExists(SESSION, namespace)).as("icebergMetadata.schemaExists(namespace)") .isFalse(); assertThat(icebergMetadata.schemaExists(SESSION, schema)).as("icebergMetadata.schemaExists(schema)") diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/glue/TestTrinoGlueCatalog.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/glue/TestTrinoGlueCatalog.java index a944c6dd002649..8d380edd3cc118 100644 --- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/glue/TestTrinoGlueCatalog.java +++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/glue/TestTrinoGlueCatalog.java @@ -136,7 +136,8 @@ public void testNonLowercaseGlueDatabase() }, new TableStatisticsWriter(new NodeVersion("test-version")), Optional.empty(), - false); + false, + _ -> false); assertThat(icebergMetadata.schemaExists(SESSION, databaseName)).as("icebergMetadata.schemaExists(databaseName)") .isFalse(); assertThat(icebergMetadata.schemaExists(SESSION, trinoSchemaName)).as("icebergMetadata.schemaExists(trinoSchemaName)") diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/nessie/TestTrinoNessieCatalog.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/nessie/TestTrinoNessieCatalog.java index 306d7522f48f5a..38328611e8d476 100644 --- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/nessie/TestTrinoNessieCatalog.java +++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/nessie/TestTrinoNessieCatalog.java @@ -188,7 +188,8 @@ public void testNonLowercaseNamespace() }, new TableStatisticsWriter(new NodeVersion("test-version")), Optional.empty(), - false); + false, + _ -> false); assertThat(icebergMetadata.schemaExists(SESSION, namespace)).as("icebergMetadata.schemaExists(namespace)") .isTrue(); assertThat(icebergMetadata.schemaExists(SESSION, schema)).as("icebergMetadata.schemaExists(schema)") diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/rest/TestTrinoRestCatalog.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/rest/TestTrinoRestCatalog.java index cfe8c363f2c2c8..6ad4a2792efea8 100644 --- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/rest/TestTrinoRestCatalog.java +++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/rest/TestTrinoRestCatalog.java @@ -122,7 +122,8 @@ public void testNonLowercaseNamespace() }, new TableStatisticsWriter(new NodeVersion("test-version")), Optional.empty(), - false); + false, + _ -> false); assertThat(icebergMetadata.schemaExists(SESSION, namespace)).as("icebergMetadata.schemaExists(namespace)") .isTrue(); assertThat(icebergMetadata.schemaExists(SESSION, schema)).as("icebergMetadata.schemaExists(schema)") diff --git a/plugin/trino-iceberg/src/test/java/org/apache/iceberg/snowflake/TestTrinoSnowflakeCatalog.java b/plugin/trino-iceberg/src/test/java/org/apache/iceberg/snowflake/TestTrinoSnowflakeCatalog.java index c8f803f4bcf564..91b3b492c40141 100644 --- a/plugin/trino-iceberg/src/test/java/org/apache/iceberg/snowflake/TestTrinoSnowflakeCatalog.java +++ b/plugin/trino-iceberg/src/test/java/org/apache/iceberg/snowflake/TestTrinoSnowflakeCatalog.java @@ -218,7 +218,8 @@ public void testNonLowercaseNamespace() }, new TableStatisticsWriter(new NodeVersion("test-version")), Optional.empty(), - false); + false, + _ -> false); assertThat(icebergMetadata.schemaExists(SESSION, namespace)).as("icebergMetadata.schemaExists(namespace)") .isTrue(); assertThat(icebergMetadata.schemaExists(SESSION, schema)).as("icebergMetadata.schemaExists(schema)") diff --git a/testing/trino-product-tests-launcher/src/main/resources/docker/trino-product-tests/conf/environment/singlenode-spark-iceberg-jdbc-catalog/iceberg.properties b/testing/trino-product-tests-launcher/src/main/resources/docker/trino-product-tests/conf/environment/singlenode-spark-iceberg-jdbc-catalog/iceberg.properties index 931c63cc46e91d..d4b97df8cdc91d 100644 --- a/testing/trino-product-tests-launcher/src/main/resources/docker/trino-product-tests/conf/environment/singlenode-spark-iceberg-jdbc-catalog/iceberg.properties +++ b/testing/trino-product-tests-launcher/src/main/resources/docker/trino-product-tests/conf/environment/singlenode-spark-iceberg-jdbc-catalog/iceberg.properties @@ -7,4 +7,5 @@ iceberg.jdbc-catalog.connection-password=test iceberg.jdbc-catalog.catalog-name=iceberg_test iceberg.jdbc-catalog.default-warehouse-dir=hdfs://hadoop-master:9000/user/hive/warehouse hive.hdfs.socks-proxy=hadoop-master:1180 +iceberg.allowed-extra-properties=custom.table-property fs.hadoop.enabled=true diff --git a/testing/trino-product-tests-launcher/src/main/resources/docker/trino-product-tests/conf/environment/singlenode-spark-iceberg-rest/iceberg.properties b/testing/trino-product-tests-launcher/src/main/resources/docker/trino-product-tests/conf/environment/singlenode-spark-iceberg-rest/iceberg.properties index 8df4036baa1433..9f88080be70e60 100644 --- a/testing/trino-product-tests-launcher/src/main/resources/docker/trino-product-tests/conf/environment/singlenode-spark-iceberg-rest/iceberg.properties +++ b/testing/trino-product-tests-launcher/src/main/resources/docker/trino-product-tests/conf/environment/singlenode-spark-iceberg-rest/iceberg.properties @@ -1,4 +1,5 @@ connector.name=iceberg iceberg.catalog.type=rest iceberg.rest-catalog.uri=http://iceberg-with-rest:8181/ +iceberg.allowed-extra-properties=custom.table-property fs.hadoop.enabled=true diff --git a/testing/trino-product-tests-launcher/src/main/resources/docker/trino-product-tests/conf/environment/singlenode-spark-iceberg/iceberg.properties b/testing/trino-product-tests-launcher/src/main/resources/docker/trino-product-tests/conf/environment/singlenode-spark-iceberg/iceberg.properties index de03d40b223972..e6d9e6d509f49d 100644 --- a/testing/trino-product-tests-launcher/src/main/resources/docker/trino-product-tests/conf/environment/singlenode-spark-iceberg/iceberg.properties +++ b/testing/trino-product-tests-launcher/src/main/resources/docker/trino-product-tests/conf/environment/singlenode-spark-iceberg/iceberg.properties @@ -1,4 +1,5 @@ connector.name=iceberg hive.metastore.uri=thrift://hadoop-master:9083 iceberg.register-table-procedure.enabled=true +iceberg.allowed-extra-properties=custom.table-property fs.hadoop.enabled=true