Skip to content

Commit 4a8e6d0

Browse files
committed
Updated archetypes.
1 parent d475809 commit 4a8e6d0

File tree

12 files changed

+684
-6
lines changed

12 files changed

+684
-6
lines changed

peel-archetypes/peel-flink-bundle/src/main/resources/archetype-resources/__rootArtifactId__-bundle/src/main/resources/config/experiments.xml

Lines changed: 21 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,11 +12,31 @@
1212
<context:annotation-config/>
1313

1414
<!-- Scan for annotated Peel components in the '${package}' package -->
15-
<context:component-scan base-package="${package}"/>
15+
<context:component-scan base-package="${package}" use-default-filters="false">
16+
<context:include-filter type="annotation" expression="org.springframework.stereotype.Service"/>
17+
<context:include-filter type="annotation" expression="org.springframework.stereotype.Component"/>
18+
</context:component-scan>
1619

20+
<!-- Experiments Definitions can be written in one of two ways -->
21+
22+
<!-- Option A: XML-based definitions -->
23+
24+
<!-- Peel configuratoins -->
25+
<import resource="classpath:peel-core.xml"/>
26+
<import resource="classpath:peel-extensions.xml"/>
27+
28+
<!-- XML-based experiment definitions -->
1729
<!-- custom system beans -->
1830
<import resource="systems.xml"/>
1931
<!-- wordcount experiment beans -->
2032
<import resource="experiments.wordcount.xml"/>
2133

34+
<!-- Option B: Code-based definitions -->
35+
36+
<!-- Peel configuratoins -->
37+
<!--<import resource="classpath:peel-core.xml"/>-->
38+
39+
<!-- Code-based experiment definitions -->
40+
<!--<bean class="${package}.ExperimentsDefinitions"/>-->
41+
2242
</beans>

peel-archetypes/peel-flink-bundle/src/main/resources/archetype-resources/__rootArtifactId__-peelextensions/pom.xml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,10 @@
4242
<groupId>org.peelframework</groupId>
4343
<artifactId>peel-core</artifactId>
4444
</dependency>
45+
<dependency>
46+
<groupId>org.peelframework</groupId>
47+
<artifactId>peel-extensions</artifactId>
48+
</dependency>
4549
</dependencies>
4650

4751
<build>
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,202 @@
1+
#set( $symbol_pound = '#' )
2+
#set( $symbol_dollar = '$' )
3+
#set( $symbol_escape = '\' )
4+
package ${package}
5+
6+
import com.samskivert.mustache.Mustache
7+
import com.typesafe.config.ConfigFactory
8+
import org.peelframework.core.beans.data.{CopiedDataSet, DataSet, ExperimentOutput, GeneratedDataSet}
9+
import org.peelframework.core.beans.experiment.ExperimentSequence.SimpleParameters
10+
import org.peelframework.core.beans.experiment.{ExperimentSequence, ExperimentSuite}
11+
import org.peelframework.core.beans.system.Lifespan
12+
import org.peelframework.flink.beans.experiment.FlinkExperiment
13+
import org.peelframework.flink.beans.job.FlinkJob
14+
import org.peelframework.flink.beans.system.Flink
15+
import org.peelframework.hadoop.beans.system.HDFS2
16+
import org.peelframework.spark.beans.experiment.SparkExperiment
17+
import org.peelframework.spark.beans.system.Spark
18+
import org.springframework.context.annotation.{Bean, Configuration}
19+
import org.springframework.context.{ApplicationContext, ApplicationContextAware}
20+
21+
/** Experiments definitions for the '${parentArtifactId}' bundle. */
22+
@Configuration
23+
class ExperimentsDefinitions extends ApplicationContextAware {
24+
25+
/* The enclosing application context. */
26+
var ctx: ApplicationContext = null
27+
28+
def setApplicationContext(ctx: ApplicationContext): Unit = {
29+
this.ctx = ctx
30+
}
31+
32+
// ---------------------------------------------------
33+
// Systems
34+
// ---------------------------------------------------
35+
36+
@Bean(name = Array("hdfs-2.7.1"))
37+
def `hdfs-2.7.1`: HDFS2 = new HDFS2(
38+
version = "0.9.0",
39+
configKey = "hadoop-2",
40+
lifespan = Lifespan.SUITE,
41+
mc = ctx.getBean(classOf[Mustache.Compiler])
42+
)
43+
44+
@Bean(name = Array("flink-0.9.0"))
45+
def `flink-0.9.0`: Flink = new Flink(
46+
version = "0.9.0",
47+
configKey = "flink",
48+
lifespan = Lifespan.EXPERIMENT,
49+
dependencies = Set(ctx.getBean("hdfs-2.7.1", classOf[HDFS2])),
50+
mc = ctx.getBean(classOf[Mustache.Compiler])
51+
)
52+
53+
@Bean(name = Array("spark-1.3.1"))
54+
def `spark-1.3.1`: Spark = new Spark(
55+
version = "1.3.1",
56+
configKey = "spark",
57+
lifespan = Lifespan.EXPERIMENT,
58+
dependencies = Set(ctx.getBean("hdfs-2.7.1", classOf[HDFS2])),
59+
mc = ctx.getBean(classOf[Mustache.Compiler])
60+
)
61+
62+
// ---------------------------------------------------
63+
// Data Generators
64+
// ---------------------------------------------------
65+
66+
@Bean(name = Array("datagen.words"))
67+
def `datagen.words`: FlinkJob = new FlinkJob(
68+
runner = ctx.getBean("flink-0.9.0", classOf[Flink]),
69+
command =
70+
"""
71+
|-v -c ${package}.datagen.flink.WordGenerator ${symbol_escape}
72+
|${symbol_dollar}{app.path.datagens}/${parentArtifactId}-datagens-${version}.jar ${symbol_escape}
73+
|${symbol_dollar}{datagen.dictionary.dize} ${symbol_escape}
74+
|${symbol_dollar}{system.default.config.parallelism.total} ${symbol_escape}
75+
|${symbol_dollar}{datagen.tuples.per.task} ${symbol_escape}
76+
|${symbol_dollar}{system.hadoop-2.path.input}/rubbish.txt
77+
""".stripMargin.trim
78+
)
79+
80+
// ---------------------------------------------------
81+
// Data Sets
82+
// ---------------------------------------------------
83+
84+
@Bean(name = Array("dataset.words.static"))
85+
def `dataset.words.static`: DataSet = new CopiedDataSet(
86+
src = "${symbol_dollar}{app.path.datasets}/rubbish.txt",
87+
dst = "${symbol_dollar}{system.hadoop-2.path.input}/rubbish.txt",
88+
fs = ctx.getBean("hdfs-2.7.1", classOf[HDFS2])
89+
)
90+
91+
@Bean(name = Array("dataset.words.generated"))
92+
def `dataset.words.generated`: DataSet = new GeneratedDataSet(
93+
src = ctx.getBean("datagen.words", classOf[FlinkJob]),
94+
dst = "${symbol_dollar}{system.hadoop-2.path.input}/rubbish.txt",
95+
fs = ctx.getBean("hdfs-2.7.1", classOf[HDFS2])
96+
)
97+
98+
@Bean(name = Array("wordcount.output"))
99+
def `wordcount.output`: ExperimentOutput = new ExperimentOutput(
100+
path = "${symbol_dollar}{system.hadoop-2.path.output}/wordcount",
101+
fs = ctx.getBean("hdfs-2.7.1", classOf[HDFS2])
102+
)
103+
104+
// ---------------------------------------------------
105+
// Experiments
106+
// ---------------------------------------------------
107+
108+
@Bean(name = Array("wordcount.default"))
109+
def `wordcount.default`: ExperimentSuite = {
110+
val `wordcount.flink.default` = new FlinkExperiment(
111+
name = "wordcount.flink.default",
112+
command =
113+
"""
114+
|-v -c ${package}.flink.FlinkWC ${symbol_escape}
115+
|${symbol_dollar}{app.path.apps}/${parentArtifactId}-flink-jobs-${version}.jar ${symbol_escape}
116+
|${symbol_dollar}{system.hadoop-2.path.input}/rubbish.txt ${symbol_escape}
117+
|${symbol_dollar}{system.hadoop-2.path.output}/wordcount
118+
""".stripMargin.trim,
119+
config = ConfigFactory.parseString(""),
120+
runs = 3,
121+
runner = ctx.getBean("flink-0.9.0", classOf[Flink]),
122+
inputs = Set(ctx.getBean("dataset.words.static", classOf[DataSet])),
123+
outputs = Set(ctx.getBean("wordcount.output", classOf[ExperimentOutput]))
124+
)
125+
126+
val `wordcount.spark.default` = new SparkExperiment(
127+
name = "wordcount.spark.default",
128+
command =
129+
"""
130+
|--class ${package}.spark.SparkWC ${symbol_escape}
131+
|${symbol_dollar}{app.path.apps}/${parentArtifactId}-spark-jobs-${version}.jar ${symbol_escape}
132+
|${symbol_dollar}{system.hadoop-2.path.input}/rubbish.txt ${symbol_escape}
133+
|${symbol_dollar}{system.hadoop-2.path.output}/wordcount
134+
""".stripMargin.trim,
135+
config = ConfigFactory.parseString(""),
136+
runs = 3,
137+
runner = ctx.getBean("spark-1.3.1", classOf[Spark]),
138+
inputs = Set(ctx.getBean("dataset.words.static", classOf[DataSet])),
139+
outputs = Set(ctx.getBean("wordcount.output", classOf[ExperimentOutput]))
140+
)
141+
142+
new ExperimentSuite(Seq(
143+
`wordcount.flink.default`,
144+
`wordcount.spark.default`))
145+
}
146+
147+
@Bean(name = Array("wordcount.scale-out"))
148+
def `wordcount.scale-out`: ExperimentSuite = {
149+
val `wordcount.flink.prototype` = new FlinkExperiment(
150+
name = "wordcount.flink.__topXXX__",
151+
command =
152+
"""
153+
|-v -c ${package}.flink.FlinkWC ${symbol_escape}
154+
|${symbol_dollar}{app.path.apps}/${parentArtifactId}-flink-jobs-${version}.jar ${symbol_escape}
155+
|${symbol_dollar}{system.hadoop-2.path.input}/rubbish.txt ${symbol_escape}
156+
|${symbol_dollar}{system.hadoop-2.path.output}/wordcount
157+
""".stripMargin.trim,
158+
config = ConfigFactory.parseString(
159+
"""
160+
|system.default.config.slaves = ${symbol_dollar}{env.slaves.__topXXX__.hosts}
161+
|system.default.config.parallelism.total = ${symbol_dollar}{env.slaves.__topXXX__.total.parallelism}
162+
|datagen.dictionary.dize = 10000
163+
|datagen.tuples.per.task = 10000000 ${symbol_pound} ~ 100 MB
164+
""".stripMargin.trim),
165+
runs = 3,
166+
runner = ctx.getBean("flink-0.9.0", classOf[Flink]),
167+
inputs = Set(ctx.getBean("dataset.words.generated", classOf[DataSet])),
168+
outputs = Set(ctx.getBean("wordcount.output", classOf[ExperimentOutput]))
169+
)
170+
171+
val `wordcount.spark.prototype` = new SparkExperiment(
172+
name = "wordcount.spark.__topXXX__",
173+
command =
174+
"""
175+
|--class ${package}.spark.SparkWC ${symbol_escape}
176+
|${symbol_dollar}{app.path.apps}/${parentArtifactId}-spark-jobs-${version}.jar ${symbol_escape}
177+
|${symbol_dollar}{system.hadoop-2.path.input}/rubbish.txt ${symbol_escape}
178+
|${symbol_dollar}{system.hadoop-2.path.output}/wordcount
179+
""".stripMargin.trim,
180+
config = ConfigFactory.parseString(
181+
"""
182+
|system.default.config.slaves = ${symbol_dollar}{env.slaves.__topXXX__.hosts}
183+
|system.default.config.parallelism.total = ${symbol_dollar}{env.slaves.__topXXX__.total.parallelism}
184+
|datagen.dictionary.dize = 10000
185+
|datagen.tuples.per.task = 10000000 ${symbol_pound} ~ 100 MB
186+
""".stripMargin.trim),
187+
runs = 3,
188+
runner = ctx.getBean("spark-1.3.1", classOf[Spark]),
189+
inputs = Set(ctx.getBean("dataset.words.generated", classOf[DataSet])),
190+
outputs = Set(ctx.getBean("wordcount.output", classOf[ExperimentOutput]))
191+
)
192+
193+
new ExperimentSuite(
194+
new ExperimentSequence(
195+
parameters = new SimpleParameters(
196+
paramName = "topXXX",
197+
paramVals = Seq("top005", "top010", "top020")),
198+
prototypes = Seq(
199+
`wordcount.flink.prototype`,
200+
`wordcount.spark.prototype`)))
201+
}
202+
}

peel-archetypes/peel-flink-bundle/src/main/resources/archetype-resources/__rootArtifactId__-peelextensions/src/main/scala/cli/command/QueryRuntimes.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@ class QueryRuntimes extends Command {
9494
logger.info(s"------------------------------------------------------------------------------------------------")
9595
logger.info(s"| name | name | min | max | median |")
9696
logger.info(s"------------------------------------------------------------------------------------------------")
97-
for ((suite, name, median, min, max) <- runtimes) {
97+
for ((suite, name, min, max, median) <- runtimes) {
9898
logger.info(f"| ${symbol_dollar}suite%-25s | ${symbol_dollar}name%-25s | ${symbol_dollar}min%10d | ${symbol_dollar}max%10d | ${symbol_dollar}median%10d | ")
9999
}
100100
logger.info(s"------------------------------------------------------------------------------------------------")

peel-archetypes/peel-flinkspark-bundle/src/main/resources/archetype-resources/__rootArtifactId__-bundle/src/main/resources/config/experiments.xml

Lines changed: 21 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,11 +12,31 @@
1212
<context:annotation-config/>
1313

1414
<!-- Scan for annotated Peel components in the '${package}' package -->
15-
<context:component-scan base-package="${package}"/>
15+
<context:component-scan base-package="${package}" use-default-filters="false">
16+
<context:include-filter type="annotation" expression="org.springframework.stereotype.Service"/>
17+
<context:include-filter type="annotation" expression="org.springframework.stereotype.Component"/>
18+
</context:component-scan>
1619

20+
<!-- Experiments Definitions can be written in one of two ways -->
21+
22+
<!-- Option A: XML-based definitions -->
23+
24+
<!-- Peel configuratoins -->
25+
<import resource="classpath:peel-core.xml"/>
26+
<import resource="classpath:peel-extensions.xml"/>
27+
28+
<!-- XML-based experiment definitions -->
1729
<!-- custom system beans -->
1830
<import resource="systems.xml"/>
1931
<!-- wordcount experiment beans -->
2032
<import resource="experiments.wordcount.xml"/>
2133

34+
<!-- Option B: Code-based definitions -->
35+
36+
<!-- Peel configuratoins -->
37+
<!--<import resource="classpath:peel-core.xml"/>-->
38+
39+
<!-- Code-based experiment definitions -->
40+
<!--<bean class="${package}.ExperimentsDefinitions"/>-->
41+
2242
</beans>

peel-archetypes/peel-flinkspark-bundle/src/main/resources/archetype-resources/__rootArtifactId__-peelextensions/pom.xml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,10 @@
4242
<groupId>org.peelframework</groupId>
4343
<artifactId>peel-core</artifactId>
4444
</dependency>
45+
<dependency>
46+
<groupId>org.peelframework</groupId>
47+
<artifactId>peel-extensions</artifactId>
48+
</dependency>
4549
</dependencies>
4650

4751
<build>

0 commit comments

Comments
 (0)