Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,12 @@
import io.trino.spi.connector.ConnectorSession;
import io.trino.spi.connector.ConnectorSplitManager;
import io.trino.spi.connector.ConnectorTransactionHandle;
import io.trino.spi.function.FunctionProvider;
import io.trino.spi.session.PropertyMetadata;
import io.trino.spi.transaction.IsolationLevel;

import java.util.List;
import java.util.Optional;
import java.util.Set;

import static com.google.common.collect.Sets.immutableEnumSet;
Expand All @@ -51,6 +53,7 @@ public class LakehouseConnector
private final LakehouseSessionProperties sessionProperties;
private final LakehouseTableProperties tableProperties;
private final IcebergMaterializedViewProperties materializedViewProperties;
private final FunctionProvider functionProvider;

@Inject
public LakehouseConnector(
Expand All @@ -62,7 +65,8 @@ public LakehouseConnector(
LakehouseNodePartitioningProvider nodePartitioningProvider,
LakehouseSessionProperties sessionProperties,
LakehouseTableProperties tableProperties,
IcebergMaterializedViewProperties materializedViewProperties)
IcebergMaterializedViewProperties materializedViewProperties,
FunctionProvider functionProvider)
{
this.lifeCycleManager = requireNonNull(lifeCycleManager, "lifeCycleManager is null");
this.transactionManager = requireNonNull(transactionManager, "transactionManager is null");
Expand All @@ -73,6 +77,7 @@ public LakehouseConnector(
this.sessionProperties = requireNonNull(sessionProperties, "sessionProperties is null");
this.tableProperties = requireNonNull(tableProperties, "tableProperties is null");
this.materializedViewProperties = requireNonNull(materializedViewProperties, "materializedViewProperties is null");
this.functionProvider = requireNonNull(functionProvider, "functionProvider is null");
}

@Override
Expand Down Expand Up @@ -159,4 +164,10 @@ public Set<ConnectorCapabilities> getCapabilities()
{
return immutableEnumSet(NOT_NULL_COLUMN_CONSTRAINT, MATERIALIZED_VIEW_GRACE_PERIOD);
}

@Override
public Optional<FunctionProvider> getFunctionProvider()
{
return Optional.of(functionProvider);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.lakehouse;

import com.google.inject.Inject;
import io.trino.plugin.iceberg.functions.IcebergFunctionProvider;
import io.trino.spi.function.BoundSignature;
import io.trino.spi.function.FunctionDependencies;
import io.trino.spi.function.FunctionId;
import io.trino.spi.function.FunctionProvider;
import io.trino.spi.function.InvocationConvention;
import io.trino.spi.function.ScalarFunctionImplementation;

import static java.util.Objects.requireNonNull;

public class LakehouseFunctionProvider
implements FunctionProvider
{
private final IcebergFunctionProvider icebergFunctionProvider;

@Inject
public LakehouseFunctionProvider(
IcebergFunctionProvider icebergFunctionProvider)
{
this.icebergFunctionProvider = requireNonNull(icebergFunctionProvider, "icebergFunctionProvider is null");
}

@Override
public ScalarFunctionImplementation getScalarFunctionImplementation(
FunctionId functionId,
BoundSignature boundSignature,
FunctionDependencies functionDependencies,
InvocationConvention invocationConvention)
{
if ("system".equals(boundSignature.getName().getSchemaName()) && "bucket".equals(boundSignature.getName().getFunctionName())) {
return icebergFunctionProvider.getScalarFunctionImplementation(functionId, boundSignature, functionDependencies, invocationConvention);
}
throw new UnsupportedOperationException("%s does not provide %s scalar function".formatted(getClass().getName(), boundSignature.getName()));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
import io.trino.plugin.iceberg.catalog.glue.IcebergGlueCatalogModule;
import io.trino.plugin.iceberg.catalog.hms.IcebergHiveMetastoreCatalogModule;
import io.trino.plugin.iceberg.fileio.ForwardingFileIoFactory;
import io.trino.plugin.iceberg.functions.IcebergFunctionProvider;

import static com.google.inject.multibindings.OptionalBinder.newOptionalBinder;
import static io.airlift.configuration.ConfigBinder.configBinder;
Expand Down Expand Up @@ -75,6 +76,8 @@ protected void setup(Binder binder)

binder.bind(ForwardingFileIoFactory.class).in(Scopes.SINGLETON);

binder.bind(IcebergFunctionProvider.class).in(Scopes.SINGLETON);

install(switch (buildConfigObject(MetastoreTypeConfig.class).getMetastoreType()) {
case THRIFT -> new IcebergHiveMetastoreCatalogModule();
case FILE -> new IcebergFileMetastoreCatalogModule();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,10 @@
import io.trino.spi.connector.WriterScalingOptions;
import io.trino.spi.expression.ConnectorExpression;
import io.trino.spi.expression.Constant;
import io.trino.spi.function.BoundSignature;
import io.trino.spi.function.FunctionDependencyDeclaration;
import io.trino.spi.function.FunctionId;
import io.trino.spi.function.FunctionMetadata;
import io.trino.spi.function.LanguageFunction;
import io.trino.spi.function.SchemaFunctionName;
import io.trino.spi.security.GrantInfo;
Expand Down Expand Up @@ -173,6 +177,30 @@ public ConnectorTableHandle getTableHandle(ConnectorSession session, SchemaTable
return hiveMetadata.getTableHandle(session, tableName, startVersion, endVersion);
}

@Override
public Collection<FunctionMetadata> listFunctions(ConnectorSession session, String schemaName)
{
return icebergMetadata.listFunctions(session, schemaName);
}

@Override
public Collection<FunctionMetadata> getFunctions(ConnectorSession session, SchemaFunctionName name)
{
return icebergMetadata.getFunctions(session, name);
}

@Override
public FunctionMetadata getFunctionMetadata(ConnectorSession session, FunctionId functionId)
{
return icebergMetadata.getFunctionMetadata(session, functionId);
}

@Override
public FunctionDependencyDeclaration getFunctionDependencies(ConnectorSession session, FunctionId functionId, BoundSignature boundSignature)
{
return icebergMetadata.getFunctionDependencies(session, functionId, boundSignature);
}

@Override
public Optional<ConnectorTableExecuteHandle> getTableHandleForExecute(ConnectorSession session, ConnectorAccessControl accessControl, ConnectorTableHandle tableHandle, String procedureName, Map<String, Object> executeProperties, RetryMode retryMode)
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@
import io.trino.plugin.hive.orc.OrcWriterConfig;
import io.trino.plugin.hive.parquet.ParquetReaderConfig;
import io.trino.plugin.hive.parquet.ParquetWriterConfig;
import io.trino.plugin.iceberg.functions.tablechanges.TableChangesFunctionProcessorProviderFactory;
import io.trino.spi.function.FunctionProvider;

import static io.airlift.configuration.ConfigBinder.configBinder;
import static org.weakref.jmx.guice.ExportBinder.newExporter;
Expand Down Expand Up @@ -53,6 +55,9 @@ protected void setup(Binder binder)
binder.bind(FileFormatDataSourceStats.class).in(Scopes.SINGLETON);
newExporter(binder).export(FileFormatDataSourceStats.class).withGeneratedName();

binder.bind(FunctionProvider.class).to(LakehouseFunctionProvider.class).in(Scopes.SINGLETON);
binder.bind(TableChangesFunctionProcessorProviderFactory.class).in(Scopes.SINGLETON);

binder.bind(Key.get(boolean.class, HideDeltaLakeTables.class)).toInstance(false);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -365,4 +365,17 @@ public void testShowCreateTable()
type = 'ICEBERG'
)\\E""");
}

@Test
void testBucketFunction()
{
assertThat(query("SELECT lakehouse.system.bucket('trino', 16)"))
.matches("VALUES 10");

assertThat(query("SELECT lakehouse.system.other_function('trino', 16)"))
.failure().hasMessageMatching("line .:.: Function 'lakehouse.system.other_function' not registered");

assertThat(query("SELECT lakehouse.tpch.bucket('trino', 16)"))
.failure().hasMessageMatching("line .:.: Function 'lakehouse.tpch.bucket' not registered");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,7 @@
import io.trino.spi.connector.SaveMode;
import io.trino.spi.connector.SchemaTableName;
import io.trino.spi.expression.ConnectorExpression;
import io.trino.spi.function.BoundSignature;
import io.trino.spi.function.FunctionId;
import io.trino.spi.function.SchemaFunctionName;
import io.trino.spi.function.table.ConnectorTableFunctionHandle;
import io.trino.spi.predicate.TupleDomain;
import io.trino.spi.security.TrinoPrincipal;
Expand All @@ -48,11 +46,7 @@ public class TestLakehouseMetadata
.add(ConnectorMetadata.class.getMethod("supportsMissingColumnsOnInsert"))
.add(ConnectorMetadata.class.getMethod("refreshMaterializedView", ConnectorSession.class, SchemaTableName.class))
.add(ConnectorMetadata.class.getMethod("resolveIndex", ConnectorSession.class, ConnectorTableHandle.class, Set.class, Set.class, TupleDomain.class))
.add(ConnectorMetadata.class.getMethod("listFunctions", ConnectorSession.class, String.class))
.add(ConnectorMetadata.class.getMethod("getFunctions", ConnectorSession.class, SchemaFunctionName.class))
.add(ConnectorMetadata.class.getMethod("getFunctionMetadata", ConnectorSession.class, FunctionId.class))
.add(ConnectorMetadata.class.getMethod("getAggregationFunctionMetadata", ConnectorSession.class, FunctionId.class))
.add(ConnectorMetadata.class.getMethod("getFunctionDependencies", ConnectorSession.class, FunctionId.class, BoundSignature.class))
.add(ConnectorMetadata.class.getMethod("applyJoin", ConnectorSession.class, JoinType.class, ConnectorTableHandle.class, ConnectorTableHandle.class, ConnectorExpression.class, Map.class, Map.class, JoinStatistics.class))
.add(ConnectorMetadata.class.getMethod("applyJoin", ConnectorSession.class, JoinType.class, ConnectorTableHandle.class, ConnectorTableHandle.class, List.class, Map.class, Map.class, JoinStatistics.class))
.add(ConnectorMetadata.class.getMethod("applyTableFunction", ConnectorSession.class, ConnectorTableFunctionHandle.class))
Expand Down