-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy path0064_rdd_transformation_multiple.py
57 lines (50 loc) · 1.5 KB
/
0064_rdd_transformation_multiple.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
"""
PySpark RDD Operations
https://sparkbyexamples.com/pyspark-rdd
RDD transformations –
Transformations are lazy operations,(Until you call action on RDD)
instead of updating an RDD, these operations return another RDD.
RDD actions –
operations that trigger computation and return RDD values.
"""
from pyspark.sql import SparkSession
spark = SparkSession.builder.appName("SparkByExamples.com").getOrCreate()
data = [
"Project Gutenberg’s",
"Alice’s Adventures in Wonderland",
"Project Gutenberg’s",
"Adventures in Wonderland",
"Project Gutenberg’s",
]
rdd = spark.sparkContext.parallelize(data)
for element in rdd.collect():
print(element)
# Flatmap
rdd2 = rdd.flatMap(lambda x: x.split(" "))
for element in rdd2.collect():
print(element)
# map
rdd3 = rdd2.map(lambda x: (x, 1))
for element in rdd3.collect():
print(element)
# reduceByKey
rdd4 = rdd3.reduceByKey(lambda a, b: a + b)
for element in rdd4.collect():
print(element)
# map
rdd5 = rdd4.map(lambda x: (x[1], x[0])).sortByKey()
for element in rdd5.collect():
print(element)
# filter
rdd6 = rdd5.filter(lambda x: "a" in x[1])
for element in rdd6.collect():
print(element)
from pyspark.sql.functions import col, expr
data = [("2019-01-23", 1), ("2019-06-24", 2), ("2019-09-20", 3)]
spark.createDataFrame(data).toDF("date", "increment").select(
col("date"),
col("increment"),
expr("add_months(to_date(date,'yyyy-MM-dd'),cast(increment as int))").alias(
"inc_date"
),
).show()