1
+ #!/usr/bin/python2.7
2
+
3
+ # Copyright 2016 Netflix, Inc.
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ import logging
18
+
19
+ import yaml
20
+ from pygenie .client import Genie
21
+ from pygenie .conf import GenieConf
22
+
23
+ logging .basicConfig (level = logging .WARNING )
24
+
25
+ LOGGER = logging .getLogger (__name__ )
26
+
27
+
28
+ def load_yaml (yaml_file ):
29
+ with open (yaml_file ) as _file :
30
+ return yaml .load (_file )
31
+
32
+
33
+ genie_conf = GenieConf ()
34
+ genie_conf .genie .url = "http://genie:8080"
35
+
36
+ genie = Genie (genie_conf )
37
+
38
+ hadoop_application = load_yaml ("applications/hadoop271.yml" )
39
+ hadoop_application_id = genie .create_application (hadoop_application )
40
+ LOGGER .warn ("Created Hadoop 2.7.1 application with id = [%s]" % hadoop_application_id )
41
+
42
+ spark_163_application = load_yaml ("applications/spark163.yml" )
43
+ spark_163_application_id = genie .create_application (spark_163_application )
44
+ LOGGER .warn ("Created Spark 1.6.3 application with id = [%s]" % spark_163_application_id )
45
+
46
+ spark_201_application = load_yaml ("applications/spark201.yml" )
47
+ spark_201_application_id = genie .create_application (spark_201_application )
48
+ LOGGER .warn ("Created Spark 2.0.1 application with id = [%s]" % spark_201_application_id )
49
+
50
+ hadoop_command = load_yaml ("commands/hadoop271.yml" )
51
+ hadoop_command_id = genie .create_command (hadoop_command )
52
+ LOGGER .warn ("Created Hadoop command with id = [%s]" % hadoop_command_id )
53
+
54
+ hdfs_command = load_yaml ("commands/hdfs271.yml" )
55
+ hdfs_command_id = genie .create_command (hdfs_command )
56
+ LOGGER .warn ("Created HDFS command with id = [%s]" % hdfs_command_id )
57
+
58
+ yarn_command = load_yaml ("commands/yarn271.yml" )
59
+ yarn_command_id = genie .create_command (yarn_command )
60
+ LOGGER .warn ("Created Yarn command with id = [%s]" % yarn_command_id )
61
+
62
+ spark_163_shell_command = load_yaml ("commands/sparkShell163.yml" )
63
+ spark_163_shell_command_id = genie .create_command (spark_163_shell_command )
64
+ LOGGER .warn ("Created Spark 1.6.3 Shell command with id = [%s]" % spark_163_shell_command_id )
65
+
66
+ spark_163_submit_command = load_yaml ("commands/sparkSubmit163.yml" )
67
+ spark_163_submit_command_id = genie .create_command (spark_163_submit_command )
68
+ LOGGER .warn ("Created Spark 1.6.3 Submit command with id = [%s]" % spark_163_submit_command_id )
69
+
70
+ spark_201_shell_command = load_yaml ("commands/sparkShell201.yml" )
71
+ spark_201_shell_command_id = genie .create_command (spark_201_shell_command )
72
+ LOGGER .warn ("Created Spark 2.0.1 Shell command with id = [%s]" % spark_201_shell_command_id )
73
+
74
+ spark_201_submit_command = load_yaml ("commands/sparkSubmit201.yml" )
75
+ spark_201_submit_command_id = genie .create_command (spark_201_submit_command )
76
+ LOGGER .warn ("Created Spark 2.0.1 Submit command with id = [%s]" % spark_201_submit_command_id )
77
+
78
+ genie .set_application_for_command (hadoop_command_id , [hadoop_application_id ])
79
+ LOGGER .warn ("Set applications for Hadoop command to = [%s]" % hadoop_application_id )
80
+
81
+ genie .set_application_for_command (hdfs_command_id , [hadoop_application_id ])
82
+ LOGGER .warn ("Set applications for HDFS command to = [[%s]]" % hadoop_application_id )
83
+
84
+ genie .set_application_for_command (yarn_command_id , [hadoop_application_id ])
85
+ LOGGER .warn ("Set applications for Yarn command to = [[%s]]" % hadoop_application_id )
86
+
87
+ genie .set_application_for_command (spark_163_shell_command_id , [hadoop_application_id , spark_163_application_id ])
88
+ LOGGER .warn ("Set applications for Spark 1.6.3 Shell command to = [%s]" %
89
+ [hadoop_application_id , spark_163_application_id ])
90
+
91
+ genie .set_application_for_command (spark_163_submit_command_id , [hadoop_application_id , spark_163_application_id ])
92
+ LOGGER .warn ("Set applications for Spark 1.6.3 Submit command to = [%s]" %
93
+ [hadoop_application_id , spark_163_application_id ])
94
+
95
+ genie .set_application_for_command (spark_201_shell_command_id , [hadoop_application_id , spark_201_application_id ])
96
+ LOGGER .warn ("Set applications for Spark 2.0.1 Shell command to = [%s]" %
97
+ [hadoop_application_id , spark_201_application_id ])
98
+
99
+ genie .set_application_for_command (spark_201_submit_command_id , [hadoop_application_id , spark_201_application_id ])
100
+ LOGGER .warn ("Set applications for Spark 2.0.1 Submit command to = [%s]" %
101
+ [hadoop_application_id , spark_201_application_id ])
102
+
103
+ prod_cluster = load_yaml ("clusters/prod.yml" )
104
+ prod_cluster_id = genie .create_cluster (prod_cluster )
105
+ LOGGER .warn ("Created prod cluster with id = [%s]" % prod_cluster_id )
106
+
107
+ test_cluster = load_yaml ("clusters/test.yml" )
108
+ test_cluster_id = genie .create_cluster (test_cluster )
109
+ LOGGER .warn ("Created test cluster with id = [%s]" % test_cluster_id )
110
+
111
+ genie .set_commands_for_cluster (
112
+ prod_cluster_id ,
113
+ [hadoop_command_id , hdfs_command_id , yarn_command_id , spark_163_shell_command_id , spark_201_shell_command_id ,
114
+ spark_163_submit_command_id , spark_201_submit_command_id ]
115
+ )
116
+ LOGGER .warn ("Added all commands to the prod cluster with id = [%s]" % prod_cluster_id )
117
+ genie .set_commands_for_cluster (
118
+ test_cluster_id ,
119
+ [hadoop_command_id , hdfs_command_id , yarn_command_id , spark_163_shell_command_id , spark_201_shell_command_id ,
120
+ spark_163_submit_command_id , spark_201_submit_command_id ]
121
+ )
122
+ LOGGER .warn ("Added all commands to the test cluster with id = [%s]" % test_cluster_id )
0 commit comments