tt

#!/usr/bin/env python3
# -*- coding: utf-8 -*-

from pyspark import SparkContext ,SparkConf

conf=SparkConf().setAppName("miniProject").setMaster("local[4]")

import os


#import os
#os.environ['JAVA_HOME'] = '/opt/usr/lib/jdk1.8.0_211'  # 这里的路径为java的bin目录所在路径


#conf=SparkConf().setAppName("lg").setMaster("spark://192.168.10.182:7077")
sc = SparkContext(conf=conf)

con = {"es.resource" : "index/type"}  # assume Elasticsearch is running on localhost defaults

rdd = sc.newAPIHadoopRDD("org.elasticsearch.hadoop.mr.EsInputFormat",
                             "org.apache.hadoop.io.NullWritable",
                             "org.elasticsearch.hadoop.mr.LinkedMapWritable",
                             conf=con)
点赞