<dependency>
<groupId>MySQL</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.48</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-jdbc_2.12</artifactId>
<version>1.8.0</version>
</dependency>
CREATE TABLE `temp` (
`id` bigint(20) NOT NULL AUTO_INCREMENT,
`name` varchar(255) DEFAULT NULL,
`time` varchar(255) DEFAULT NULL,
`type` bigint(20) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8
package com.fwmagic.flink.batch;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.io.jdbc.JDBCInputFormat;
import org.apache.flink.api.java.io.jdbc.JDBCOutputFormat;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.typeutils.RowTypeInfo;
import org.apache.flink.types.Row;
import java.util.concurrent.TimeUnit;
public class BatchDemoOperatorMysql {
public static void main(String[] args) throws Exception {
String driverClass = "com.mysql.jdbc.Driver";
String dbUrl = "jdbc:mysql://localhost:3306/test";
String userNmae = "root";
String passWord = "123456";
String sql = "insert into test.temp (name,time,type) values (?,?,?)";
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
/**
* 文件內(nèi)容:
* 關(guān)羽,2019-10-14 00:00:01,1
* 張飛,2019-10-14 00:00:02,2
* 趙云,2019-10-14 00:00:03,3
*/
String filePath = "/Users/temp/data.csv";
//讀csv文件內(nèi)容,轉(zhuǎn)成Row對(duì)象
DataSet<Row> outputData = env.readCsvFile(filePath).fieldDelimiter(",").types(String.class, String.class, Long.class).map(new MapFunction<Tuple3<String, String, Long>, Row>() {
@Override
public Row map(Tuple3<String, String, Long> t) throws Exception {
Row row = new Row(3);
row.setField(0, t.f0.getBytes("UTF-8"));
row.setField(1, t.f1.getBytes("UTF-8"));
row.setField(2, t.f2.longValue());
return row;
}
});
//將Row對(duì)象寫到mysql
outputData.output(JDBCOutputFormat.buildJDBCOutputFormat()
.setDrivername(driverClass)
.setDBUrl(dbUrl)
.setUsername(userNmae)
.setPassword(passWord)
.setQuery(sql)
.finish());
//觸發(fā)執(zhí)行
env.execute("insert data to mysql");
System.out.println("mysql寫入成功!");
TimeUnit.SECONDS.sleep(6);
//讀mysql
DataSource<Row> dataSource = env.createInput(JDBCInputFormat.buildJDBCInputFormat()
.setDrivername(driverClass)
.setDBUrl(dbUrl)
.setUsername(userNmae)
.setPassword(passWord)
.setQuery("select * from temp")
.setRowTypeInfo(new RowTypeInfo(BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.LONG_TYPE_INFO))
.finish());
//獲取數(shù)據(jù)并打印
dataSource.map(new MapFunction<Row, String>() {
@Override
public String map(Row value) throws Exception {
System.out.println(value);
return value.toString();
}
}).print();
}
}
文章名稱:Flink批處理之讀寫Mysql
當(dāng)前網(wǎng)址:http://muchs.cn/article28/jsopjp.html
成都網(wǎng)站建設(shè)公司_創(chuàng)新互聯(lián),為您提供響應(yīng)式網(wǎng)站、網(wǎng)站制作、電子商務(wù)、自適應(yīng)網(wǎng)站、微信小程序、商城網(wǎng)站
聲明:本網(wǎng)站發(fā)布的內(nèi)容(圖片、視頻和文字)以用戶投稿、用戶轉(zhuǎn)載內(nèi)容為主,如果涉及侵權(quán)請(qǐng)盡快告知,我們將會(huì)在第一時(shí)間刪除。文章觀點(diǎn)不代表本網(wǎng)站立場(chǎng),如需處理請(qǐng)聯(lián)系客服。電話:028-86922220;郵箱:631063699@qq.com。內(nèi)容未經(jīng)允許不得轉(zhuǎn)載,或轉(zhuǎn)載時(shí)需注明來源: 創(chuàng)新互聯(lián)