Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ services:
- ./data/output/spark-notebooks:/home/iceberg/notebooks/notebooks
- ./data:/mnt/data
- ./spark-init.sql:/mnt/spark-init.sql
- ./spark-defaults.conf:/opt/spark/conf/spark-defaults.conf
environment:
- AWS_ACCESS_KEY_ID=admin
- AWS_SECRET_ACCESS_KEY=password
Expand Down
34 changes: 34 additions & 0 deletions docker/thirdparties/docker-compose/iceberg/spark-defaults.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

# Default system properties included when running spark-submit.
# This is useful for setting default environmental settings.

# Example:
spark.sql.session.timeZone Asia/Shanghai
spark.sql.extensions org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions
spark.sql.catalog.demo org.apache.iceberg.spark.SparkCatalog
spark.sql.catalog.demo.type rest
spark.sql.catalog.demo.uri http://rest:8181
spark.sql.catalog.demo.io-impl org.apache.iceberg.aws.s3.S3FileIO
spark.sql.catalog.demo.warehouse s3://warehouse/wh/
spark.sql.catalog.demo.s3.endpoint http://minio:9000
spark.sql.defaultCatalog demo
spark.eventLog.enabled true
spark.eventLog.dir /home/iceberg/spark-events
spark.history.fs.logDirectory /home/iceberg/spark-events
spark.sql.catalogImplementation in-memory

This file was deleted.

5 changes: 4 additions & 1 deletion docker/thirdparties/docker-compose/iceberg/spark-init.sql
Original file line number Diff line number Diff line change
Expand Up @@ -23,4 +23,7 @@ tblproperties (
'write.update.mode'='merge-on-read'
);
insert into demo.test_db.location_s3_table values (1,'a');
update demo.test_db.location_s3_table set val='b' where id=1;
update demo.test_db.location_s3_table set val='b' where id=1;

create table demo.test_db.tb_ts_ntz_filter (ts timestamp_ntz) using iceberg;
insert into demo.test_db.tb_ts_ntz_filter values (timestamp_ntz '2024-06-11 12:34:56.123456');
2 changes: 0 additions & 2 deletions docker/thirdparties/run-thirdparties-docker.sh
Original file line number Diff line number Diff line change
Expand Up @@ -363,10 +363,8 @@ if [[ "${RUN_ICEBERG}" -eq 1 ]]; then
# iceberg
cp "${ROOT}"/docker-compose/iceberg/iceberg.yaml.tpl "${ROOT}"/docker-compose/iceberg/iceberg.yaml
cp "${ROOT}"/docker-compose/iceberg/entrypoint.sh.tpl "${ROOT}"/docker-compose/iceberg/entrypoint.sh
cp "${ROOT}"/docker-compose/iceberg/spark-defaults.conf.tpl "${ROOT}"/docker-compose/iceberg/spark-defaults.conf
sed -i "s/doris--/${CONTAINER_UID}/g" "${ROOT}"/docker-compose/iceberg/iceberg.yaml
sed -i "s/doris--/${CONTAINER_UID}/g" "${ROOT}"/docker-compose/iceberg/entrypoint.sh
sed -i "s/doris--/${CONTAINER_UID}/g" "${ROOT}"/docker-compose/iceberg/spark-defaults.conf
sudo docker compose -f "${ROOT}"/docker-compose/iceberg/iceberg.yaml --env-file "${ROOT}"/docker-compose/iceberg/iceberg.env down
sudo rm -rf "${ROOT}"/docker-compose/iceberg/data
if [[ "${STOP}" -ne 1 ]]; then
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@

// TODO(dhc) add nanosecond timer for coordinator's root profile
public class TimeUtils {
public static final String UTC_TIME_ZONE = "Europe/London"; // This is just a Country to represent UTC offset +00:00
public static final String UTC_TIME_ZONE = "UTC"; // This is just a Country to represent UTC offset +00:00
public static final String DEFAULT_TIME_ZONE = "Asia/Shanghai";
public static final ZoneId TIME_ZONE;
public static final ImmutableMap<String, String> timeZoneAliasMap;
Expand Down Expand Up @@ -148,6 +148,10 @@ public static TimeZone getTimeZone() {
return TimeZone.getTimeZone(ZoneId.of(timezone, timeZoneAliasMap));
}

public static TimeZone getUTCTimeZone() {
return TimeZone.getTimeZone(UTC_TIME_ZONE);
}

// return the time zone of current system
public static TimeZone getSystemTimeZone() {
return TimeZone.getTimeZone(ZoneId.of(ZoneId.systemDefault().getId(), timeZoneAliasMap));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -319,7 +319,11 @@ public static Object extractDorisLiteral(org.apache.iceberg.types.Type icebergTy
case DATE:
return dateLiteral.getStringValue();
case TIMESTAMP:
return dateLiteral.getUnixTimestampWithMicroseconds(TimeUtils.getTimeZone());
if (((Types.TimestampType) icebergType).shouldAdjustToUTC()) {
return dateLiteral.getUnixTimestampWithMicroseconds(TimeUtils.getTimeZone());
} else {
return dateLiteral.getUnixTimestampWithMicroseconds(TimeUtils.getUTCTimeZone());
}
default:
return null;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,3 +52,16 @@
6 2024-05-30T20:34:56.123450
7 2024-05-30T20:34:56.123456

-- !qt14 --
2024-06-11T12:34:56.123456

-- !qt15 --

-- !qt16 --

-- !qt17 --
2024-06-11T12:34:56.123456

-- !qt18 --
2024-06-11T12:34:56.123456

Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,13 @@ suite("test_iceberg_filter", "p0,external,doris,external_docker,external_docker_
qt_qt12 """ select * from ${tb_ts_filter} where ts < '2024-05-30 20:34:56.1200' order by id """
qt_qt13 """ select * from ${tb_ts_filter} where ts > '2024-05-30 20:34:56.1200' order by id """

String tb_ts_ntz_filter = "${catalog_name}.test_db.tb_ts_ntz_filter";
qt_qt14 """ select * from ${tb_ts_ntz_filter} where ts = '2024-06-11 12:34:56.123456' """
qt_qt15 """ select * from ${tb_ts_ntz_filter} where ts > '2024-06-11 12:34:56.123456' """
qt_qt16 """ select * from ${tb_ts_ntz_filter} where ts < '2024-06-11 12:34:56.123456' """
qt_qt17 """ select * from ${tb_ts_ntz_filter} where ts > '2024-06-11 12:34:56.12345' """
qt_qt18 """ select * from ${tb_ts_ntz_filter} where ts < '2024-06-11 12:34:56.123466' """

// TODO support filter
// explain {
// sql("select * from ${tb_ts_filter} where ts < '2024-05-30 20:34:56'")
Expand Down