-
Notifications
You must be signed in to change notification settings - Fork 2.2k
Closed
Description
Search before asking
- I had searched in the issues and found no similar issues.
What happened
I was try to synchronize data from mysql to clickhouse use jdbc-clickhouse(mysql-cdc doesn`t support real-time data synchronization,right?), after tons of errors, this is the final error I cannot solve
SeaTunnel Version
2.3.2
SeaTunnel Config
env {
job.name = "SeaTunnel"
spark.executor.instances = 1
spark.executor.cores = 1
spark.executor.memory = "1g"
spark.master = local
spark.streaming.batchDuration = 5
spark.app.name = "seatunnel"
}
# 在source所属的块中配置数据源
source {
Jdbc {
driver = "com.mysql.cj.jdbc.Driver"
username = "dt"
password = "123456"
table-names = ["water.device"]
base-url = "jdbc:mysql://192.168.0.100:53100/water?useUnicode=true&characterEncoding=utf8&useSSL=false"
query = "select * from device"
}
}
transform{
}
# 在sink块中声明要输出到哪
sink {
Clickhouse {
host = "192.168.0.101:8123"
database = "default"
table = "testsea"
username = "default"
password = "abc2020"
# query = "INSERT INTO default.testsea (id, imei, brand, model, simcard_id, is_monitor, person_id, person_name, location_name, longitude, latitude, open_id, `type`, project_id, owner_id, owner_name, owner_phone_number, id_card_type, family_number, id_card_number, tenant_id, opt_by, create_at, update_at, active_at, status, state, wx_flag, device_name, debug_flag, mud_flag, tank_state, distribution_cabinet_state, indoor_state, other_state, debug_man, debug_time) VALUES(0, '', '', '', '', '', 0, '', '', '', '', '', '', 0, 0, '', '', '', 0, '', 0, 0, '', '', '', 0, '', '', '', '', '', '', '', '', '', '', '');
#"
# cdc options
primary_key = "id"
support_upsert = true
allow_experimental_lightweight_delete = true
}
}
Running Command
/home/apache-seatunnel-2.3.2/bin/start-seatunnel-spark-2-connector-v2.sh --master local[2] --deploy-mode client --config /home/apache-seatunnel-2.3.2/config/test-spark.confError Exception
23/07/17 02:46:34 INFO AbstractPluginDiscovery: Load SeaTunnelSource Plugin from /home/apache-seatunnel-2.3.2/connectors/seatunnel
23/07/17 02:46:34 INFO AbstractPluginDiscovery: Discovery plugin jar: Jdbc at: file:/home/apache-seatunnel-2.3.2/connectors/seatunnel/connector-jdbc-2.3.2.jar
23/07/17 02:46:34 INFO AbstractPluginDiscovery: Load plugin: PluginIdentifier{engineType='seatunnel', pluginType='source', pluginName='Jdbc'} from classpath
23/07/17 02:46:34 ERROR SparkTaskExecuteCommand: Run SeaTunnel on spark failed.
java.lang.NullPointerException
at org.apache.seatunnel.connectors.seatunnel.jdbc.internal.dialect.db2.DB2DialectFactory.acceptsURL(DB2DialectFactory.java:31)
at org.apache.seatunnel.connectors.seatunnel.jdbc.internal.dialect.JdbcDialectLoader.lambda$load$0(JdbcDialectLoader.java:60)
at java.base/java.util.stream.ReferencePipeline$2$1.accept(ReferencePipeline.java:176)
at java.base/java.util.LinkedList$LLSpliterator.forEachRemaining(LinkedList.java:1239)
at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:484)
at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:474)
at java.base/java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:913)
at java.base/java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234)
at java.base/java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:578)
at org.apache.seatunnel.connectors.seatunnel.jdbc.internal.dialect.JdbcDialectLoader.load(JdbcDialectLoader.java:60)
at org.apache.seatunnel.connectors.seatunnel.jdbc.source.JdbcSource.prepare(JdbcSource.java:102)
at org.apache.seatunnel.core.starter.spark.execution.SourceExecuteProcessor.initializePlugins(SourceExecuteProcessor.java:104)
at org.apache.seatunnel.core.starter.spark.execution.SparkAbstractPluginExecuteProcessor.<init>(SparkAbstractPluginExecuteProcessor.java:49)
at org.apache.seatunnel.core.starter.spark.execution.SourceExecuteProcessor.<init>(SourceExecuteProcessor.java:51)
at org.apache.seatunnel.core.starter.spark.execution.SparkExecution.<init>(SparkExecution.java:57)
at org.apache.seatunnel.core.starter.spark.command.SparkTaskExecuteCommand.execute(SparkTaskExecuteCommand.java:59)
at org.apache.seatunnel.core.starter.SeaTunnel.run(SeaTunnel.java:40)
at org.apache.seatunnel.core.starter.spark.SeaTunnelSpark.main(SeaTunnelSpark.java:35)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:855)
at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:161)
at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:184)
at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)
at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:930)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:939)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
23/07/17 02:46:34 ERROR SeaTunnel:
===============================================================================
23/07/17 02:46:34 ERROR SeaTunnel: Fatal Error,
23/07/17 02:46:34 ERROR SeaTunnel: Please submit bug report in https://github.com/apache/seatunnel/issues
23/07/17 02:46:34 ERROR SeaTunnel: Reason:null
23/07/17 02:46:34 ERROR SeaTunnel: Exception StackTrace:org.apache.seatunnel.core.starter.exception.CommandExecuteException
at org.apache.seatunnel.core.starter.spark.command.SparkTaskExecuteCommand.execute(SparkTaskExecuteCommand.java:63)
at org.apache.seatunnel.core.starter.SeaTunnel.run(SeaTunnel.java:40)
at org.apache.seatunnel.core.starter.spark.SeaTunnelSpark.main(SeaTunnelSpark.java:35)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:855)
at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:161)
at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:184)
at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)
at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:930)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:939)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
23/07/17 02:46:34 ERROR SeaTunnel:
===============================================================================
Exception in thread "main" org.apache.seatunnel.core.starter.exception.CommandExecuteException
at org.apache.seatunnel.core.starter.spark.command.SparkTaskExecuteCommand.execute(SparkTaskExecuteCommand.java:63)
at org.apache.seatunnel.core.starter.SeaTunnel.run(SeaTunnel.java:40)
at org.apache.seatunnel.core.starter.spark.SeaTunnelSpark.main(SeaTunnelSpark.java:35)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:855)
at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:161)
at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:184)
at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)
at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:930)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:939)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Flink or Spark Version
flink 1.15
spark 2.4.8
Java or Scala Version
java 11.0.12
Screenshots
No response
Are you willing to submit PR?
- Yes I am willing to submit a PR!
Code of Conduct
- I agree to follow this project's Code of Conduct