Skip to content

Commit b706696

Browse files
committed
Added sample data for testing local PySpark installation
1 parent 808b17d commit b706696

File tree

2 files changed

+423
-3
lines changed

2 files changed

+423
-3
lines changed

sql/PySpark/PySpark_Local_Example.ipynb

Lines changed: 16 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,20 @@
3131
"execution_count": 1,
3232
"id": "664504f6-bbac-466e-982d-42c62fb82a37",
3333
"metadata": {},
34-
"outputs": [],
34+
"outputs": [
35+
{
36+
"name": "stderr",
37+
"output_type": "stream",
38+
"text": [
39+
"22/01/02 20:21:59 WARN Utils: Your hostname, asus-laptop resolves to a loopback address: 127.0.1.1; using 172.30.59.173 instead (on interface eth0)\n",
40+
"22/01/02 20:21:59 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address\n",
41+
"Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties\n",
42+
"Setting default log level to \"WARN\".\n",
43+
"To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).\n",
44+
"22/01/02 20:22:00 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable\n"
45+
]
46+
}
47+
],
3548
"source": [
3649
"from pyspark.sql import SparkSession\n",
3750
"spark = SparkSession.builder.master(\"local[*]\").getOrCreate()\n",
@@ -79,7 +92,7 @@
7992
],
8093
"metadata": {
8194
"kernelspec": {
82-
"display_name": "Py3.9 (pyspark_dev)",
95+
"display_name": "Py3.8 (pyspark_dev)",
8396
"language": "python",
8497
"name": "pyspark_dev"
8598
},
@@ -93,7 +106,7 @@
93106
"name": "python",
94107
"nbconvert_exporter": "python",
95108
"pygments_lexer": "ipython3",
96-
"version": "3.9.8"
109+
"version": "3.8.10"
97110
}
98111
},
99112
"nbformat": 4,

0 commit comments

Comments
 (0)