Skip to content

Commit 8b7528c

Browse files
Merge pull request #105 from databricks-industry-solutions/serverless-support
added notebooks for serverless
2 parents 5354d03 + c6e430b commit 8b7528c

24 files changed

+2281
-47
lines changed

examples/daily/foundation_daily.ipynb

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,6 @@
102102
"outputs": [],
103103
"source": [
104104
"import logging\n",
105-
"logger = spark._jvm.org.apache.log4j\n",
106105
"logging.getLogger(\"py4j.java_gateway\").setLevel(logging.ERROR)\n",
107106
"logging.getLogger(\"py4j.clientserver\").setLevel(logging.ERROR)"
108107
]

examples/daily/global_daily.ipynb

Lines changed: 14 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,6 @@
102102
"outputs": [],
103103
"source": [
104104
"import logging\n",
105-
"logger = spark._jvm.org.apache.log4j\n",
106105
"logging.getLogger(\"py4j.java_gateway\").setLevel(logging.ERROR)\n",
107106
"logging.getLogger(\"py4j.clientserver\").setLevel(logging.ERROR)"
108107
]
@@ -622,9 +621,21 @@
622621
],
623622
"metadata": {
624623
"application/vnd.databricks.v1+notebook": {
625-
"computePreferences": null,
624+
"computePreferences": {
625+
"hardware": {
626+
"accelerator": null,
627+
"gpuPoolId": null,
628+
"memory": null
629+
}
630+
},
626631
"dashboards": [],
627-
"environmentMetadata": null,
632+
"environmentMetadata": {
633+
"base_environment": "",
634+
"dependencies": [
635+
"/Workspace/Users/[email protected]/many-model-forecasting"
636+
],
637+
"environment_version": "3"
638+
},
628639
"inputWidgetPreferences": null,
629640
"language": "python",
630641
"notebookMetadata": {

examples/daily/local_univariate_daily.ipynb

Lines changed: 26 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,6 @@
102102
"outputs": [],
103103
"source": [
104104
"import logging\n",
105-
"logger = spark._jvm.org.apache.log4j\n",
106105
"logging.getLogger(\"py4j.java_gateway\").setLevel(logging.ERROR)\n",
107106
"logging.getLogger(\"py4j.clientserver\").setLevel(logging.ERROR)"
108107
]
@@ -349,8 +348,18 @@
349348
},
350349
"outputs": [],
351350
"source": [
352-
"if n > sc.defaultParallelism:\n",
353-
" sqlContext.setConf(\"spark.sql.shuffle.partitions\", sc.defaultParallelism)"
351+
"# Get the current value of shuffle partitions\n",
352+
"current = spark.conf.get(\"spark.sql.shuffle.partitions\")\n",
353+
"\n",
354+
"# If not set to 'auto' (serverless), convert to int; otherwise, use default 200\n",
355+
"if current != \"auto\":\n",
356+
" current_val = int(current)\n",
357+
"else:\n",
358+
" current_val = 200 \n",
359+
"\n",
360+
"# If n is greater than the current value, update the shuffle partitions setting\n",
361+
"if n > current_val: \n",
362+
" spark.conf.set(\"spark.sql.shuffle.partitions\", str(n))"
354363
]
355364
},
356365
{
@@ -682,9 +691,21 @@
682691
],
683692
"metadata": {
684693
"application/vnd.databricks.v1+notebook": {
685-
"computePreferences": null,
694+
"computePreferences": {
695+
"hardware": {
696+
"accelerator": null,
697+
"gpuPoolId": null,
698+
"memory": null
699+
}
700+
},
686701
"dashboards": [],
687-
"environmentMetadata": null,
702+
"environmentMetadata": {
703+
"base_environment": "",
704+
"dependencies": [
705+
"/Workspace/Users/[email protected]/many-model-forecasting"
706+
],
707+
"environment_version": "3"
708+
},
688709
"inputWidgetPreferences": null,
689710
"language": "python",
690711
"notebookMetadata": {

examples/external_regressors/foundation_external_regressors_daily.ipynb

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,6 @@
103103
"source": [
104104
"import logging\n",
105105
"from tqdm.autonotebook import tqdm\n",
106-
"logger = spark._jvm.org.apache.log4j\n",
107106
"logging.getLogger(\"py4j.java_gateway\").setLevel(logging.ERROR)\n",
108107
"logging.getLogger(\"py4j.clientserver\").setLevel(logging.ERROR)\n",
109108
"import uuid"

examples/external_regressors/global_external_regressors_daily.ipynb

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,6 @@
103103
"source": [
104104
"import logging\n",
105105
"from tqdm.autonotebook import tqdm\n",
106-
"logger = spark._jvm.org.apache.log4j\n",
107106
"logging.getLogger(\"py4j.java_gateway\").setLevel(logging.ERROR)\n",
108107
"logging.getLogger(\"py4j.clientserver\").setLevel(logging.ERROR)\n",
109108
"import uuid"

examples/external_regressors/local_univariate_external_regressors_daily.ipynb

Lines changed: 26 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,6 @@
8484
"source": [
8585
"import logging\n",
8686
"from tqdm.autonotebook import tqdm\n",
87-
"logger = spark._jvm.org.apache.log4j\n",
8887
"logging.getLogger(\"py4j.java_gateway\").setLevel(logging.ERROR)\n",
8988
"logging.getLogger(\"py4j.clientserver\").setLevel(logging.ERROR)"
9089
]
@@ -333,8 +332,18 @@
333332
},
334333
"outputs": [],
335334
"source": [
336-
"if sample and size > sc.defaultParallelism:\n",
337-
" sqlContext.setConf(\"spark.sql.shuffle.partitions\", sc.defaultParallelism)"
335+
"# Get the current value of shuffle partitions\n",
336+
"current = spark.conf.get(\"spark.sql.shuffle.partitions\")\n",
337+
"\n",
338+
"# If not set to 'auto' (serverless), convert to int; otherwise, use default 200\n",
339+
"if current != \"auto\":\n",
340+
" current_val = int(current)\n",
341+
"else:\n",
342+
" current_val = 200 \n",
343+
"\n",
344+
"# If sample is true and size is greater than the current value, update the shuffle partitions setting\n",
345+
"if sample and size > current_val: \n",
346+
" spark.conf.set(\"spark.sql.shuffle.partitions\", str(size))"
338347
]
339348
},
340349
{
@@ -636,9 +645,21 @@
636645
],
637646
"metadata": {
638647
"application/vnd.databricks.v1+notebook": {
639-
"computePreferences": null,
648+
"computePreferences": {
649+
"hardware": {
650+
"accelerator": null,
651+
"gpuPoolId": null,
652+
"memory": null
653+
}
654+
},
640655
"dashboards": [],
641-
"environmentMetadata": null,
656+
"environmentMetadata": {
657+
"base_environment": "",
658+
"dependencies": [
659+
"/Workspace/Users/[email protected]/many-model-forecasting"
660+
],
661+
"environment_version": "3"
662+
},
642663
"inputWidgetPreferences": null,
643664
"language": "python",
644665
"notebookMetadata": {

examples/hourly/foundation_hourly.ipynb

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,6 @@
102102
"outputs": [],
103103
"source": [
104104
"import logging\n",
105-
"logger = spark._jvm.org.apache.log4j\n",
106105
"logging.getLogger(\"py4j.java_gateway\").setLevel(logging.ERROR)\n",
107106
"logging.getLogger(\"py4j.clientserver\").setLevel(logging.ERROR)"
108107
]

examples/hourly/global_hourly.ipynb

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,6 @@
102102
"outputs": [],
103103
"source": [
104104
"import logging\n",
105-
"logger = spark._jvm.org.apache.log4j\n",
106105
"logging.getLogger(\"py4j.java_gateway\").setLevel(logging.ERROR)\n",
107106
"logging.getLogger(\"py4j.clientserver\").setLevel(logging.ERROR)"
108107
]
@@ -608,7 +607,13 @@
608607
],
609608
"metadata": {
610609
"application/vnd.databricks.v1+notebook": {
611-
"computePreferences": null,
610+
"computePreferences": {
611+
"hardware": {
612+
"accelerator": null,
613+
"gpuPoolId": null,
614+
"memory": null
615+
}
616+
},
612617
"dashboards": [],
613618
"environmentMetadata": null,
614619
"inputWidgetPreferences": null,

examples/hourly/local_univariate_hourly.ipynb

Lines changed: 27 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,6 @@
102102
"outputs": [],
103103
"source": [
104104
"import logging\n",
105-
"logger = spark._jvm.org.apache.log4j\n",
106105
"logging.getLogger(\"py4j.java_gateway\").setLevel(logging.ERROR)\n",
107106
"logging.getLogger(\"py4j.clientserver\").setLevel(logging.ERROR)"
108107
]
@@ -341,16 +340,26 @@
341340
"rowLimit": 10000
342341
},
343342
"inputWidgets": {},
344-
"nuid": "2408e75a-7cbe-47c2-8d5a-6c710e1db5fe",
343+
"nuid": "ab4663c8-9b4b-4945-8317-dd4a5d8afc2e",
345344
"showTitle": false,
346345
"tableResultSettingsMap": {},
347346
"title": ""
348347
}
349348
},
350349
"outputs": [],
351350
"source": [
352-
"if n > sc.defaultParallelism:\n",
353-
" sqlContext.setConf(\"spark.sql.shuffle.partitions\", sc.defaultParallelism)"
351+
"# Get the current value of shuffle partitions\n",
352+
"current = spark.conf.get(\"spark.sql.shuffle.partitions\")\n",
353+
"\n",
354+
"# If not set to 'auto' (serverless), convert to int; otherwise, use default 200\n",
355+
"if current != \"auto\":\n",
356+
" current_val = int(current)\n",
357+
"else:\n",
358+
" current_val = 200 \n",
359+
"\n",
360+
"# If n is greater than the current value, update the shuffle partitions setting\n",
361+
"if n > current_val: \n",
362+
" spark.conf.set(\"spark.sql.shuffle.partitions\", str(n))"
354363
]
355364
},
356365
{
@@ -649,9 +658,21 @@
649658
],
650659
"metadata": {
651660
"application/vnd.databricks.v1+notebook": {
652-
"computePreferences": null,
661+
"computePreferences": {
662+
"hardware": {
663+
"accelerator": null,
664+
"gpuPoolId": null,
665+
"memory": null
666+
}
667+
},
653668
"dashboards": [],
654-
"environmentMetadata": null,
669+
"environmentMetadata": {
670+
"base_environment": "",
671+
"dependencies": [
672+
"/Workspace/Users/[email protected]/many-model-forecasting"
673+
],
674+
"environment_version": "3"
675+
},
655676
"inputWidgetPreferences": null,
656677
"language": "python",
657678
"notebookMetadata": {

examples/monthly/foundation_monthly.ipynb

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,6 @@
102102
"outputs": [],
103103
"source": [
104104
"import logging\n",
105-
"logger = spark._jvm.org.apache.log4j\n",
106105
"logging.getLogger(\"py4j.java_gateway\").setLevel(logging.ERROR)\n",
107106
"logging.getLogger(\"py4j.clientserver\").setLevel(logging.ERROR)"
108107
]

0 commit comments

Comments
 (0)