Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 12 additions & 2 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -103,14 +103,23 @@ dev-binder: .binder-image
--workdir /home/main/notebooks $(BINDER_IMAGE) \
/home/main/start-notebook.sh --ip=0.0.0.0

SPARK_MONITOR_JAR:=toree-spark-monitor-plugin-assembly-$(VERSION)$(SNAPSHOT).jar

target/scala-$(SCALA_VERSION)/$(ASSEMBLY_JAR): VM_WORKDIR=/src/toree-kernel
target/scala-$(SCALA_VERSION)/$(ASSEMBLY_JAR): ${shell find ./*/src/main/**/*}
target/scala-$(SCALA_VERSION)/$(ASSEMBLY_JAR): ${shell find ./*/build.sbt}
target/scala-$(SCALA_VERSION)/$(ASSEMBLY_JAR): ${shell find ./project/*.scala} ${shell find ./project/*.sbt}
target/scala-$(SCALA_VERSION)/$(ASSEMBLY_JAR): dist/toree-legal project/build.properties build.sbt
$(call RUN,$(ENV_OPTS) sbt root/assembly)

build: target/scala-$(SCALA_VERSION)/$(ASSEMBLY_JAR)
spark-monitor-plugin/target/scala-$(SCALA_VERSION)/$(SPARK_MONITOR_JAR): VM_WORKDIR=/src/toree-kernel
spark-monitor-plugin/target/scala-$(SCALA_VERSION)/$(SPARK_MONITOR_JAR): ${shell find ./spark-monitor-plugin/src/main/**/*}
spark-monitor-plugin/target/scala-$(SCALA_VERSION)/$(SPARK_MONITOR_JAR): spark-monitor-plugin/build.sbt
spark-monitor-plugin/target/scala-$(SCALA_VERSION)/$(SPARK_MONITOR_JAR): ${shell find ./project/*.scala} ${shell find ./project/*.sbt}
spark-monitor-plugin/target/scala-$(SCALA_VERSION)/$(SPARK_MONITOR_JAR): project/build.properties build.sbt
$(call RUN,$(ENV_OPTS) sbt sparkMonitorPlugin/assembly)

build: target/scala-$(SCALA_VERSION)/$(ASSEMBLY_JAR) spark-monitor-plugin/target/scala-$(SCALA_VERSION)/$(SPARK_MONITOR_JAR)

test: VM_WORKDIR=/src/toree-kernel
test:
Expand All @@ -119,9 +128,10 @@ test:
sbt-%:
$(call RUN,$(ENV_OPTS) sbt $(subst sbt-,,$@) )

dist/toree/lib: target/scala-$(SCALA_VERSION)/$(ASSEMBLY_JAR)
dist/toree/lib: target/scala-$(SCALA_VERSION)/$(ASSEMBLY_JAR) spark-monitor-plugin/target/scala-$(SCALA_VERSION)/$(SPARK_MONITOR_JAR)
@mkdir -p dist/toree/lib
@cp target/scala-$(SCALA_VERSION)/$(ASSEMBLY_JAR) dist/toree/lib/.
@cp spark-monitor-plugin/target/scala-$(SCALA_VERSION)/$(SPARK_MONITOR_JAR) dist/toree/lib/.

dist/toree/bin: ${shell find ./etc/bin/*}
@mkdir -p dist/toree/bin
Expand Down
53 changes: 53 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,59 @@ This results in 2 packages.

NOTE: `make release` uses `docker`. Please refer to `docker` installation instructions for your system.

## Building Individual Components

### Main Toree Assembly
To build just the main Toree assembly jar (without spark-monitor-plugin):
```
sbt assembly
```
This creates: `target/scala-2.12/toree-assembly-<VERSION>.jar`

### Spark Monitor Plugin
To build the spark-monitor-plugin as a separate jar:
```
sbt sparkMonitorPlugin/assembly
```
This creates: `spark-monitor-plugin/target/scala-2.12/spark-monitor-plugin-<VERSION>.jar`

### Build All Components
To compile all projects including both the main assembly and spark-monitor-plugin:
```
sbt compile
```

**Note**: The spark-monitor-plugin is now built as a separate jar and is not included in the main Toree assembly.

## Using the Spark Monitor Plugin

To enable the Spark Monitor Plugin in your Toree application, you need to specify the path to the plugin JAR when starting Toree:

### Option 1: Command Line Parameter
```bash
# Start Toree with spark-monitor-plugin enabled
java -jar target/scala-2.12/toree-assembly-<VERSION>.jar --magic-url file:///path/to/spark-monitor-plugin/target/scala-2.12/spark-monitor-plugin-<VERSION>.jar [other-options]
```

### Option 2: Jupyter Kernel Installation
When installing Toree as a Jupyter kernel, you can specify the plugin:
```bash
jupyter toree install --spark_home=<YOUR_SPARK_PATH> --kernel_name=toree_with_monitor --toree_opts="--magic-url file:///path/to/spark-monitor-plugin-<VERSION>.jar"
```

### Option 3: Configuration File
You can also specify the plugin in a configuration file and use the `--profile` option:
```json
{
"magic_urls": ["file:///path/to/spark-monitor-plugin-<VERSION>.jar"]
}
```
Then start with: `java -jar toree-assembly.jar --profile config.json`

**Important**:
- Make sure to use the absolute path to the spark-monitor-plugin JAR file and ensure the JAR is accessible from the location where Toree is running.
- The JAR file name does not contain "toree" prefix to avoid automatic loading as an internal plugin. This allows you to control when the SparkMonitorPlugin is enabled via the `--magic-url` parameter.

Run Examples
============
To play with the example notebooks, run
Expand Down
9 changes: 8 additions & 1 deletion build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ ThisBuild / credentials += Credentials(Path.userHome / ".ivy2" / ".credentials")
lazy val root = (project in file("."))
.settings(name := "toree")
.aggregate(
macros,protocol,plugins,communication,kernelApi,client,scalaInterpreter,sqlInterpreter,kernel
macros,protocol,plugins,sparkMonitorPlugin,communication,kernelApi,client,scalaInterpreter,sqlInterpreter,kernel
)
.dependsOn(
macros,protocol,communication,kernelApi,client,scalaInterpreter,sqlInterpreter,kernel
Expand Down Expand Up @@ -154,6 +154,13 @@ lazy val plugins = (project in file("plugins"))
.settings(name := "toree-plugins")
.dependsOn(macros)

/**
* Project representing the SparkMonitor plugin for Toree.
*/
lazy val sparkMonitorPlugin = (project in file("spark-monitor-plugin"))
.settings(name := "toree-spark-monitor-plugin")
.dependsOn(macros, protocol, plugins, kernel, kernelApi)

/**
* Project representing forms of communication used as input/output for the
* client/kernel.
Expand Down
2 changes: 1 addition & 1 deletion plugins/build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ Test / fork := true
libraryDependencies ++= Seq(
Dependencies.scalaReflect.value,
Dependencies.clapper,
Dependencies.slf4jApi
Dependencies.slf4jApi,
)

// Test dependencies
Expand Down
2 changes: 2 additions & 0 deletions project/Dependencies.scala
Original file line number Diff line number Diff line change
Expand Up @@ -84,4 +84,6 @@ object Dependencies {
)
}

val py4j = "net.sf.py4j" % "py4j" % "0.10.7" % "provided"

}
46 changes: 46 additions & 0 deletions spark-monitor-plugin/build.sbt
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/

Test / fork := true

// Needed for SparkMonitor plugin
libraryDependencies ++= Dependencies.sparkAll.value
libraryDependencies ++= Seq(
Dependencies.playJson,
Dependencies.py4j
)

// Test dependencies
libraryDependencies += Dependencies.scalaCompiler.value % "test"

// Assembly configuration for separate jar
enablePlugins(AssemblyPlugin)

assembly / assemblyMergeStrategy := {
case "module-info.class" => MergeStrategy.discard
case PathList("META-INF", "versions", "9", "module-info.class") => MergeStrategy.discard
case PathList("META-INF", xs @ _*) => MergeStrategy.discard
case x =>
val oldStrategy = (assembly / assemblyMergeStrategy).value
oldStrategy(x)
}

assembly / assemblyOption ~= {
_.withIncludeScala(false)
}

assembly / test := {}
Loading