当前位置:网站首页>Sparksql and flinksql create and link table records
Sparksql and flinksql create and link table records
2022-07-28 08:34:00 【Lu Xinhang】
start-up flink sql:bin/sql-client.sh
Build table
flink Create a table
create table iceberg.xxx.xxx
(
id STRING comment 'id',
dt STRING comment ' Partition field '
)
PARTITIONED BY (dt)
with (
'write.format.default' = 'parquet', -- Specify the file storage format , Default parquet
'write.parquet.compression-codec' = 'gzip', -- Specify the file compression format ,
'commit.manifest-merge.enabled' = 'true', -- Auto merge on write manifest
'history.expire.max-snapshot-age-ms' = '43200000', -- Historical snapshot retention time (ms), Default 5 God , here 12h
'engine.hive.enabled' = 'true', -- Support hive Inquire about
'write.metadata.delete-after-commit.enabled' = 'true', -- Delete the oldest metadata file after each new metadata file is created
'write.metadata.previous-versions-max' = '20', -- The maximum number of metadata files of previous versions deleted after submission
'write.metadata.compression-codec' = 'gzip', -- Enable metadata compression as gzip Format
'location' = 'hdfs://ns1/lakehouse/schema_name/table_name' -- Appoint hdfs Address
);
spark Create a table
create table iceberg.xxx.xxxx
(
id STRING comment 'id',
dt STRING comment ' Partition field '
)
using iceberg
partitioned by (dt)
location 'hdfs://xxx/lakehouse/schema_name/table_name'
tblproperties (
'write.format.default' = 'parquet', -- Specify the file storage format , Default parquet
'write.parquet.compression-codec' = 'gzip', -- Specify the file compression format ,
'commit.manifest-merge.enabled' = 'true', -- Auto merge on write manifest
'history.expire.max-snapshot-age-ms' = '43200000', -- Historical snapshot retention time (ms), Default 5 God , here 12h
'engine.hive.enabled' = 'true', -- Support hive Inquire about
'write.metadata.delete-after-commit.enabled' = 'true', -- Delete the oldest metadata file after each new metadata file is created
'write.metadata.previous-versions-max' = '20', -- The maximum number of metadata files of previous versions deleted after submission
'write.metadata.compression-codec' = 'gzip' -- Enable metadata compression as gzip Format
);
Create a target table based on the source table
CREATE TABLE iceberg.schema_name.table_name
using iceberg
partitioned by (dt)
location 'hdfs://ns1/lakehouse/schema_name/table_name'
tblproperties (
'write.format.default' = 'parquet',
'write.parquet.compression-codec' = 'gzip',
'commit.manifest-merge.enabled' = 'true',
'engine.hive.enabled' = 'true',
'write.metadata.delete-after-commit.enabled' = 'true',
'write.metadata.previous-versions-max' = '20',
'write.metadata.compression-codec' = 'gzip'
) AS SELECT * from iceberg.schema_name.original_table_name where 1=2;
Connect mysql、 sql server
spark
-- mysql
CREATE
TEMPORARY
VIEW tb_order_group
USING org.apache.spark.sql.jdbc
OPTIONS (
url 'jdbc:mysql://xx.xxx.xx.xx:4909/db_name?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=UTF-8&autoReconnect=true&zeroDateTimeBehavior=convertToNull',
dbtable 'xxx',
user 'xxx',
password 'xxx'
);
-- sqlserver
CREATE
TEMPORARY
VIEW gxywhz
USING org.apache.spark.sql.jdbc
OPTIONS (
url 'jdbc:sqlserver://192.168.1.xx:xxx;DatabaseName=xxx',
dbtable 'dbo.xxx',
user 'xxx',
password 'xxxx'
);
flink
-- Connector Connect Create a mapping table
CREATE TABLE mysql_source
(
id int comment 'id'
primary key (id) NOT ENFORCED
) WITH (
'connector' = 'jdbc',
'url' = 'jdbc:mysql://192.168.xx.xx:3306/xxx',
'table-name' = 'tableName',
'driver' = 'com.mysql.jdbc.Driver',
'username' = 'root',
'password' = 'xxx'
);
CREATE TABLE sqlserver_source
(
id STRING comment 'id'
) WITH (
'connector' = 'jdbc',
'url' = 'jdbc:jtds:sqlserver://192.168.xx.xxx:10009;databaseName=xxx;useLOBs=false',
'table-name' = 'schema.tableName',
'driver' = 'net.sourceforge.jtds.jdbc.Driver',
'username' = 'xx',
'password' = 'xxxxx'
);
边栏推荐
猜你喜欢

Es6: template string

Kubernetes技术与架构(七)

Es6: arrow function usage

Prescan quick start to proficient in lecture 17, speed curve editor

单片机IO口控制12V电压通断,MOS和三极管电路

GBASE亮相联通云巡展(四川站) 以专业赋能云生态

MCU IO port controls 12V voltage on and off, MOS and triode circuit

2022 Niuke multi school second problem solving Report

【OpenCV】生成透明的PNG图像

解决EMC、EMI传导干扰的八大方法
随机推荐
Allure use
Plantuml Usage Summary
js糖果消消乐小游戏源码
[mindspire YiDianTong robot-01] you may have seen many Knowledge Q & A robots, but this is a little different
ASP. Net core foundation VIII
[chart component kit] Shanghai daoning provides developers with steema download, trial and tutorial
SQL function
Half bridge buck circuit - record
Prescan quick start to master the track editing path of Lecture 16
File operation of QT
Matlab file path
GBASE亮相联通云巡展(四川站) 以专业赋能云生态
JS card cascading style image switching JS special effect
本人男,27岁技术经理,收入太高,心头慌得一比
New generation cloud native message queue (II)
Draw.io image saving path settings
The even number of an integer queue is placed in the front, the odd number is placed in the back, and the relative position of the even and odd numbers remains unchanged
opengauss同步状态疑问
半桥BUCK电路—记录篇
Day112.尚医通:手机验证码登录功能