您的位置:首页 > 编程语言 > Java开发

本地模式使用JAVA SACLA 开发 Spark SQL DataFrame

2016-03-14 21:34 651 查看
原文件:

{"name":"Michael"}

{"name":"Andy", "age":30}

{"name":"Justin", "age":19}

java

package com.dt.sparkApps.sql;

import org.apache.spark.SparkConf;

import org.apache.spark.api.java.JavaSparkContext;

import org.apache.spark.sql.DataFrame;

import org.apache.spark.sql.SQLContext;

public class DataFrameOps {

public static void main(String[] args) {

// TODO Auto-generated method stub

SparkConf conf=new SparkConf().setAppName("DataFrameOps").setMaster("local");

JavaSparkContext sc=new JavaSparkContext(conf);

SQLContext sqlContext=new SQLContext(sc);

//DataFrame df=sqlContext.read().json("hdfs://master:9000/library/people.json");

DataFrame df=sqlContext.read().json("G://IMFBigDataSpark2016//tesdata//people.json");

df.show();

df.printSchema();

df.select("name").show();

//select name,age+10 fom table;

df.select(df.col("name"),df.col("age").plus(10)).show();

//select * from table whee age>10

df.filter(df.col("age").gt(10)).show();

//select count(1) from table groupby age;

df.groupBy(df.col("age")).count().show();

}

}

结果

scala

package com.dt.spark.sql

import org.apache.spark.SparkConf

import org.apache.spark.SparkContext

import org.apache.spark.sql.SQLContext

object DataFrameOps {

def main(args: Array[String]){

val conf = new SparkConf() //创建SparkConf对象

conf.setAppName("Wow,My First Spark DataFrame App!") //设置应用程序的名称,在程序运行的监控界面可以看到名称

conf.setMaster("local") //此时,程序在本地运行,不需要安装Spark集群

val sc = new SparkContext(conf)

val sqlContext =new SQLContext(sc)

// val df =sqlContext.read.json("hdfs://master:9000/library/people.json");

val df =sqlContext.read.json("G://IMFBigDataSpark2016//tesdata//people.json");

df.show()

df.printSchema()

df.select("name").show()

df.select(df("name"), df("age")+10).show()

df.filter(df("age")>10).show()

}

}
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: