先设置表头,再写内容,内容得通过Row再转换成dataframe,再把内容与表头连接,再插入到MySQL中
#!/usr/bin/env python3
from pyspark.sql import Row
from pyspark.sql.types import *
from pyspark import SparkContext,SparkConf
from pyspark.sql import SparkSession
spark = SparkSession.builder.config(conf=SparkConf()).getOrCreate()
schema=StructType([StructField("id",IntegerType(),True),\#true代表可以为空
StructField("name",StringType(),True),\
StructField("gender",StringType(),True),\
StructField("age",IntegerType,True])
studentRDD = spark.saprkContext.parallelize(["3 HuangYukai M 26"]).map(lambda x:x.split(" "))
rowRDD = studentRDD.map(lamda x:Row(int(x[0].strip()),x[1].strip[],x[2].strip().int(x[3].strip())))
studentDF = spark.createDataFrame(rowRDD,schema)
prop={}
prop['user']='hadoop'
prop['password']='hadoop'
prop['driver']= "com.mysql.jdbc.Driver"
studentDF.write.jdbc("jdbc:mysql://localhost:3306/spark",'student','append',prop)
qq_45371603
发布了13 篇原创文章 · 获赞 0 · 访问量 78
私信
关注