File tree Expand file tree Collapse file tree 1 file changed +3
-3
lines changed Expand file tree Collapse file tree 1 file changed +3
-3
lines changed Original file line number Diff line number Diff line change @@ -26,7 +26,7 @@ Importing data into a Delta Lake table is as easy as
26
26
27
27
``` shell script
28
28
spark-submit /
29
- --class " com.scribd.importer .spark.ImportRunner" sql-delta-import_2.12-0.2.1-SNAPSHOT.jar /
29
+ --class " io.delta.connectors .spark.JDBC .ImportRunner" sql-delta-import_2.12-0.2.1-SNAPSHOT.jar /
30
30
--jdbc-url jdbc:mysql://hostName:port/database /
31
31
--source source.table
32
32
--destination destination.table
@@ -49,7 +49,7 @@ optimize data storage for best performance on reads by just adding a couple of c
49
49
spark-submit /
50
50
--conf spark.databricks.delta.optimizeWrite.enabled=true /
51
51
--conf spark.databricks.delta.autoCompact.enabled=true /
52
- --class " com.scribd.importer .spark.ImportRunner" sql-delta-import_2.12-0.2.1-SNAPSHOT.jar /
52
+ --class " io.delta.connectors .spark.JDBC .ImportRunner" sql-delta-import_2.12-0.2.1-SNAPSHOT.jar /
53
53
--jdbc-url jdbc:mysql://hostName:port/database /
54
54
--source source.table
55
55
--destination destination.table
@@ -72,7 +72,7 @@ concurrency thus allowing you to tune those parameters independently
72
72
spark-submit --num-executors 15 --executor-cores 4 /
73
73
--conf spark.databricks.delta.optimizeWrite.enabled=true /
74
74
--conf spark.databricks.delta.autoCompact.enabled=true /
75
- --class " com.scribd.importer .spark.ImportRunner" sql-delta-import_2.12-0.2.1-SNAPSHOT.jar /
75
+ --class " io.delta.connectors .spark.JDBC .ImportRunner" sql-delta-import_2.12-0.2.1-SNAPSHOT.jar /
76
76
--jdbc-url jdbc:mysql://hostName:port/database /
77
77
--source source.table
78
78
--destination destination.table
You can’t perform that action at this time.
0 commit comments