azkaban-aplcache
Changes
eclipse-styles.xml 48(+28 -20)
src/main/java/azkaban/alert/Alerter.java 26(+22 -4)
src/main/java/azkaban/database/AbstractJdbcLoader.java 208(+105 -103)
src/main/java/azkaban/database/AzkabanDatabaseSetup.java 708(+363 -345)
src/main/java/azkaban/database/DataSourceUtils.java 424(+217 -207)
src/main/java/azkaban/execapp/AzkabanExecutorServer.java 546(+276 -270)
src/main/java/azkaban/execapp/event/Event.java 115(+58 -57)
src/main/java/azkaban/execapp/event/FlowWatcher.java 176(+89 -87)
src/main/java/azkaban/execapp/ExecutorServlet.java 724(+364 -360)
src/main/java/azkaban/execapp/FlowRunner.java 2038(+1034 -1004)
src/main/java/azkaban/execapp/FlowRunnerManager.java 1344(+702 -642)
src/main/java/azkaban/execapp/JMXHttpServlet.java 102(+52 -50)
src/main/java/azkaban/execapp/JobRunner.java 1239(+625 -614)
src/main/java/azkaban/execapp/ProjectVersion.java 174(+89 -85)
src/main/java/azkaban/executor/ConnectorParams.java 143(+73 -70)
src/main/java/azkaban/executor/ExecutableFlow.java 416(+209 -207)
src/main/java/azkaban/executor/ExecutableFlowBase.java 802(+408 -394)
src/main/java/azkaban/executor/ExecutableJobInfo.java 253(+127 -126)
src/main/java/azkaban/executor/ExecutableNode.java 831(+422 -409)
src/main/java/azkaban/executor/ExecutionAttempt.java 122(+62 -60)
src/main/java/azkaban/executor/ExecutionOptions.java 444(+227 -217)
src/main/java/azkaban/executor/ExecutorLoader.java 107(+67 -40)
src/main/java/azkaban/executor/ExecutorManager.java 2454(+1280 -1174)
src/main/java/azkaban/executor/ExecutorManagerAdapter.java 290(+169 -121)
src/main/java/azkaban/executor/ExecutorManagerServlet.java 410(+205 -205)
src/main/java/azkaban/executor/JdbcExecutorLoader.java 2337(+1085 -1252)
src/main/java/azkaban/executor/mail/DefaultMailCreator.java 316(+178 -138)
src/main/java/azkaban/executor/Status.java 166(+83 -83)
src/main/java/azkaban/flow/CommonJobProperties.java 227(+116 -111)
src/main/java/azkaban/flow/Edge.java 271(+136 -135)
src/main/java/azkaban/flow/Flow.java 768(+387 -381)
src/main/java/azkaban/flow/FlowProps.java 97(+49 -48)
src/main/java/azkaban/flow/Node.java 322(+161 -161)
src/main/java/azkaban/jmx/DisplayName.java 20(+18 -2)
src/main/java/azkaban/jmx/JmxFlowRunnerManager.java 126(+71 -55)
src/main/java/azkaban/jmx/JmxJettyServer.java 260(+138 -122)
src/main/java/azkaban/jmx/JmxJettyServerMBean.java 152(+84 -68)
src/main/java/azkaban/jmx/JmxTriggerManager.java 118(+67 -51)
src/main/java/azkaban/jobExecutor/AbstractJob.java 160(+80 -80)
src/main/java/azkaban/jobExecutor/AbstractProcessJob.java 319(+162 -157)
src/main/java/azkaban/jobExecutor/JavaProcessJob.java 247(+125 -122)
src/main/java/azkaban/jobExecutor/Job.java 80(+39 -41)
src/main/java/azkaban/jobExecutor/LongArgJob.java 212(+107 -105)
src/main/java/azkaban/jobExecutor/NoopJob.java 72(+36 -36)
src/main/java/azkaban/jobExecutor/ProcessJob.java 350(+174 -176)
src/main/java/azkaban/jobExecutor/PythonJob.java 29(+15 -14)
src/main/java/azkaban/jobExecutor/RubyJob.java 29(+15 -14)
src/main/java/azkaban/jobExecutor/ScriptJob.java 33(+18 -15)
src/main/java/azkaban/jobtype/JobTypeManager.java 677(+348 -329)
src/main/java/azkaban/jobtype/JobTypePluginSet.java 244(+125 -119)
src/main/java/azkaban/migration/scheduler/Schedule.java 633(+304 -329)
src/main/java/azkaban/migration/sla/SLA.java 490(+255 -235)
src/main/java/azkaban/migration/sla/SlaOptions.java 102(+62 -40)
src/main/java/azkaban/project/JdbcProjectLoader.java 2663(+1403 -1260)
src/main/java/azkaban/project/Project.java 804(+406 -398)
src/main/java/azkaban/project/ProjectFileHandler.java 196(+94 -102)
src/main/java/azkaban/project/ProjectLoader.java 459(+243 -216)
src/main/java/azkaban/project/ProjectLogEvent.java 163(+82 -81)
src/main/java/azkaban/project/ProjectManager.java 769(+408 -361)
src/main/java/azkaban/scheduler/Schedule.java 692(+321 -371)
src/main/java/azkaban/scheduler/ScheduleManager.java 458(+226 -232)
src/main/java/azkaban/sla/SlaOption.java 309(+162 -147)
src/main/java/azkaban/trigger/ActionTypeLoader.java 307(+159 -148)
src/main/java/azkaban/trigger/builtin/BasicTimeChecker.java 342(+170 -172)
src/main/java/azkaban/trigger/builtin/ExecuteFlowAction.java 528(+275 -253)
src/main/java/azkaban/trigger/builtin/ExecutionChecker.java 221(+112 -109)
src/main/java/azkaban/trigger/builtin/SlaAlertAction.java 300(+159 -141)
src/main/java/azkaban/trigger/builtin/SlaChecker.java 584(+304 -280)
src/main/java/azkaban/trigger/CheckerTypeLoader.java 316(+164 -152)
src/main/java/azkaban/trigger/Condition.java 294(+150 -144)
src/main/java/azkaban/trigger/JdbcTriggerLoader.java 626(+318 -308)
src/main/java/azkaban/trigger/Trigger.java 853(+411 -442)
src/main/java/azkaban/trigger/TriggerAction.java 30(+15 -15)
src/main/java/azkaban/trigger/TriggerManager.java 964(+495 -469)
src/main/java/azkaban/trigger/TriggerStatus.java 44(+22 -22)
src/main/java/azkaban/user/Permission.java 350(+176 -174)
src/main/java/azkaban/user/Role.java 34(+17 -17)
src/main/java/azkaban/user/User.java 264(+132 -132)
src/main/java/azkaban/user/UserManager.java 74(+39 -35)
src/main/java/azkaban/user/XmlUserManager.java 606(+308 -298)
src/main/java/azkaban/utils/AbstractMailer.java 141(+73 -68)
src/main/java/azkaban/utils/cache/Cache.java 336(+169 -167)
src/main/java/azkaban/utils/cache/CacheManager.java 218(+109 -109)
src/main/java/azkaban/utils/cache/Element.java 46(+23 -23)
src/main/java/azkaban/utils/CircularBuffer.java 85(+43 -42)
src/main/java/azkaban/utils/DirectoryFlowLoader.java 706(+358 -348)
src/main/java/azkaban/utils/Emailer.java 319(+165 -154)
src/main/java/azkaban/utils/EmailMessage.java 361(+180 -181)
src/main/java/azkaban/utils/FileIOUtils.java 817(+413 -404)
src/main/java/azkaban/utils/GZIPUtils.java 69(+36 -33)
src/main/java/azkaban/utils/JSONUtils.java 453(+226 -227)
src/main/java/azkaban/utils/LogGobbler.java 107(+54 -53)
src/main/java/azkaban/utils/Md5Hasher.java 66(+33 -33)
src/main/java/azkaban/utils/Pair.java 94(+47 -47)
src/main/java/azkaban/utils/Props.java 1865(+924 -941)
src/main/java/azkaban/utils/PropsUtils.java 634(+318 -316)
src/main/java/azkaban/utils/StringUtils.java 87(+43 -44)
src/main/java/azkaban/utils/SwapQueue.java 107(+55 -52)
src/main/java/azkaban/utils/Triple.java 116(+58 -58)
src/main/java/azkaban/utils/TypedMapWrapper.java 262(+129 -133)
src/main/java/azkaban/utils/Utils.java 699(+351 -348)
src/main/java/azkaban/utils/WebUtils.java 283(+138 -145)
src/main/java/azkaban/webapp/AzkabanServer.java 210(+107 -103)
src/main/java/azkaban/webapp/AzkabanWebServer.java 2329(+1192 -1137)
src/main/java/azkaban/webapp/servlet/ExecutorServlet.java 1609(+833 -776)
src/main/java/azkaban/webapp/servlet/HistoryServlet.java 549(+286 -263)
src/main/java/azkaban/webapp/servlet/HttpRequestUtils.java 419(+214 -205)
src/main/java/azkaban/webapp/servlet/JMXHttpServlet.java 444(+228 -216)
src/main/java/azkaban/webapp/servlet/Page.java 96(+48 -48)
src/main/java/azkaban/webapp/servlet/ProjectManagerServlet.java 2612(+1344 -1268)
src/main/java/azkaban/webapp/servlet/ProjectServlet.java 211(+104 -107)
src/main/java/azkaban/webapp/servlet/ScheduleServlet.java 1265(+653 -612)
src/main/less/azkaban-graph.less 80(+40 -40)
src/main/less/base.less 10(+5 -5)
src/main/less/callout.less 4(+2 -2)
src/main/less/context-menu.less 4(+2 -2)
src/main/less/flow.less 94(+47 -47)
src/main/less/header.less 2(+1 -1)
src/main/less/log.less 2(+1 -1)
src/main/less/navbar.less 4(+2 -2)
src/main/less/project.less 34(+17 -17)
src/main/less/tables.less 54(+27 -27)
src/web/js/azkaban/model/job-log.js 10(+5 -5)
src/web/js/azkaban/model/svg-graph.js 114(+57 -57)
src/web/js/azkaban/util/ajax.js 350(+175 -175)
src/web/js/azkaban/util/common.js 34(+17 -17)
src/web/js/azkaban/util/date.js 108(+54 -54)
src/web/js/azkaban/util/flow-loader.js 246(+123 -123)
src/web/js/azkaban/util/job-status.js 34(+17 -17)
src/web/js/azkaban/util/layout.js 672(+336 -336)
src/web/js/azkaban/util/schedule.js 66(+33 -33)
src/web/js/azkaban/util/svg-navigate.js 784(+392 -392)
src/web/js/azkaban/util/svgutils.js 111(+54 -57)
src/web/js/azkaban/view/admin-setup.js 220(+113 -107)
src/web/js/azkaban/view/context-menu.js 102(+51 -51)
src/web/js/azkaban/view/flow.js 830(+415 -415)
src/web/js/azkaban/view/flow-execute-dialog.js 1238(+619 -619)
src/web/js/azkaban/view/flow-execution-list.js 736(+368 -368)
src/web/js/azkaban/view/flow-extended.js 122(+61 -61)
src/web/js/azkaban/view/history.js 100(+50 -50)
src/web/js/azkaban/view/history-day.js 159(+78 -81)
src/web/js/azkaban/view/jmx.js 252(+126 -126)
src/web/js/azkaban/view/job-details.js 64(+32 -32)
src/web/js/azkaban/view/job-edit.js 438(+219 -219)
src/web/js/azkaban/view/job-history.js 28(+14 -14)
src/web/js/azkaban/view/job-list.js 650(+325 -325)
src/web/js/azkaban/view/login.js 94(+47 -47)
src/web/js/azkaban/view/main.js 356(+178 -178)
src/web/js/azkaban/view/message-dialog.js 44(+22 -22)
src/web/js/azkaban/view/project.js 446(+223 -223)
src/web/js/azkaban/view/project-logs.js 102(+51 -51)
src/web/js/azkaban/view/project-modals.js 140(+70 -70)
src/web/js/azkaban/view/project-permissions.js 618(+309 -309)
src/web/js/azkaban/view/scheduled.js 20(+10 -10)
src/web/js/azkaban/view/schedule-options.js 1100(+550 -550)
src/web/js/azkaban/view/schedule-panel.js 124(+62 -62)
src/web/js/azkaban/view/schedule-sla.js 550(+275 -275)
src/web/js/azkaban/view/schedule-svg.js 1034(+517 -517)
src/web/js/azkaban/view/svg-graph.js 1312(+656 -656)
src/web/js/azkaban/view/table-sort.js 248(+124 -124)
src/web/js/azkaban/view/time-graph.js 32(+16 -16)
src/web/js/azkaban/view/triggers.js 362(+181 -181)
unit/java/azkaban/Scrubber.java 20(+10 -10)
unit/java/azkaban/test/execapp/event/BlockingStatusTest.java 207(+104 -103)
unit/java/azkaban/test/execapp/FlowRunnerPipelineTest.java 1199(+633 -566)
unit/java/azkaban/test/execapp/FlowRunnerTest.java 846(+439 -407)
unit/java/azkaban/test/execapp/JobRunnerTest.java 693(+355 -338)
unit/java/azkaban/test/execapp/MockExecutorLoader.java 420(+221 -199)
unit/java/azkaban/test/execapp/MockProjectLoader.java 422(+212 -210)
unit/java/azkaban/test/executor/ExecutableFlowTest.java 726(+374 -352)
unit/java/azkaban/test/executor/JavaJob.java 148(+74 -74)
unit/java/azkaban/test/executor/JavaJobRunnerMain.java 535(+280 -255)
unit/java/azkaban/test/executor/JdbcExecutorLoaderTest.java 909(+475 -434)
unit/java/azkaban/test/executor/SleepJavaJob.java 169(+84 -85)
unit/java/azkaban/test/jobExecutor/Utils.java 27(+13 -14)
unit/java/azkaban/test/jobtype/JobTypeManagerTest.java 555(+286 -269)
unit/java/azkaban/test/project/JdbcProjectLoaderTest.java 1040(+534 -506)
unit/java/azkaban/test/project/ProjectTest.java 35(+18 -17)
unit/java/azkaban/test/trigger/ConditionTest.java 146(+79 -67)
unit/java/azkaban/test/trigger/JdbcTriggerLoaderTest.java 355(+183 -172)
unit/java/azkaban/test/trigger/ThresholdChecker.java 183(+91 -92)
unit/java/azkaban/test/trigger/TriggerManagerTest.java 334(+173 -161)
unit/java/azkaban/test/trigger/TriggerTest.java 85(+46 -39)
unit/java/azkaban/test/user/PermissionTest.java 186(+94 -92)
unit/java/azkaban/test/user/XmlUserManagerTest.java 365(+188 -177)
unit/java/azkaban/test/utils/cache/CacheTest.java 268(+134 -134)
unit/java/azkaban/test/utils/FileIOUtilsTest.java 210(+109 -101)
unit/java/azkaban/test/utils/JsonUtilsTest.java 89(+46 -43)
unit/java/azkaban/test/utils/PropsUtilsTest.java 307(+149 -158)
Details
eclipse-styles.xml 48(+28 -20)
diff --git a/eclipse-styles.xml b/eclipse-styles.xml
index 07e74f7..dd64bfb 100644
--- a/eclipse-styles.xml
+++ b/eclipse-styles.xml
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<profiles version="12">
-<profile kind="CodeFormatterProfile" name="Azkaban" version="12">
+<profile kind="CodeFormatterProfile" name="Hadoop" version="12">
<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.disabling_tag" value="@formatter:off"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation" value="insert"/>
@@ -18,11 +18,11 @@
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.use_on_off_tags" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_ellipsis" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation" value="do not insert"/>
@@ -48,13 +48,13 @@
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_binary_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_package" value="0"/>
-<setting id="org.eclipse.jdt.core.compiler.source" value="1.5"/>
+<setting id="org.eclipse.jdt.core.compiler.source" value="1.6"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.format_line_comments" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.join_wrapped_lines" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.join_wrapped_lines" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments" value="do not insert"/>
@@ -72,9 +72,9 @@
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment" value="true"/>
-<setting id="org.eclipse.jdt.core.formatter.lineSplit" value="2000"/>
+<setting id="org.eclipse.jdt.core.formatter.lineSplit" value="80"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression" value="do not insert"/>
@@ -82,21 +82,22 @@
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_method" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.indentation.size" value="4"/>
+<setting id="org.eclipse.jdt.core.formatter.indentation.size" value="2"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.enabling_tag" value="@formatter:on"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration" value="16"/>
-<setting id="org.eclipse.jdt.core.formatter.alignment_for_assignment" value="0"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_assignment" value="16"/>
<setting id="org.eclipse.jdt.core.compiler.problem.assertIdentifier" value="error"/>
-<setting id="org.eclipse.jdt.core.formatter.tabulation.char" value="tab"/>
+<setting id="org.eclipse.jdt.core.formatter.tabulation.char" value="space"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_try_resources" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_body" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_method" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.wrap_outer_expressions_when_nested" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line" value="false"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement" value="insert"/>
@@ -104,6 +105,7 @@
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_method_declaration" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_try" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation" value="do not insert"/>
@@ -116,6 +118,7 @@
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_try" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.comment.preserve_white_space_between_code_and_line_comments" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw" value="insert"/>
@@ -127,6 +130,7 @@
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_method_declaration" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.compact_else_if" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.wrap_before_or_operator_multicatch" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.format_line_comment_starting_on_first_column" value="true"/>
@@ -135,12 +139,13 @@
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_constant" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.comment.indent_root_tags" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_union_type_in_multicatch" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.tabulation.size" value="4"/>
+<setting id="org.eclipse.jdt.core.formatter.tabulation.size" value="2"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block" value="insert"/>
@@ -148,15 +153,15 @@
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.indent_empty_lines" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized" value="do not insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block_in_case" value="end_of_line"/>
<setting id="org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve" value="1"/>
@@ -167,7 +172,7 @@
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter" value="insert"/>
-<setting id="org.eclipse.jdt.core.compiler.compliance" value="1.5"/>
+<setting id="org.eclipse.jdt.core.compiler.compliance" value="1.6"/>
<setting id="org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer" value="2"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration" value="insert"/>
@@ -181,7 +186,7 @@
<setting id="org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters" value="insert"/>
-<setting id="org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference" value="do not insert"/>
@@ -191,6 +196,7 @@
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_type" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while" value="do not insert"/>
<setting id="org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode" value="enabled"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_try" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_label" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter" value="do not insert"/>
@@ -199,7 +205,7 @@
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.comment.format_javadoc_comments" value="true"/>
-<setting id="org.eclipse.jdt.core.formatter.comment.line_length" value="2000"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.line_length" value="80"/>
<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_package" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_import_groups" value="1"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments" value="do not insert"/>
@@ -215,7 +221,7 @@
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_block" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration" value="16"/>
-<setting id="org.eclipse.jdt.core.formatter.join_lines_in_comments" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.join_lines_in_comments" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_compact_if" value="16"/>
@@ -242,7 +248,8 @@
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration" value="16"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer" value="insert"/>
-<setting id="org.eclipse.jdt.core.compiler.codegen.targetPlatform" value="1.5"/>
+<setting id="org.eclipse.jdt.core.compiler.codegen.targetPlatform" value="1.6"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_resources_in_try" value="80"/>
<setting id="org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation" value="0"/>
<setting id="org.eclipse.jdt.core.formatter.comment.format_header" value="false"/>
@@ -276,6 +283,7 @@
<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header" value="true"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for" value="insert"/>
<setting id="org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_try_resources" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line" value="false"/>
src/main/java/azkaban/alert/Alerter.java 26(+22 -4)
diff --git a/src/main/java/azkaban/alert/Alerter.java b/src/main/java/azkaban/alert/Alerter.java
index 1ba02a8..196cb0c 100644
--- a/src/main/java/azkaban/alert/Alerter.java
+++ b/src/main/java/azkaban/alert/Alerter.java
@@ -1,11 +1,29 @@
+/*
+ * Copyright 2014 LinkedIn Corp.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
package azkaban.alert;
import azkaban.executor.ExecutableFlow;
import azkaban.sla.SlaOption;
public interface Alerter {
- void alertOnSuccess(ExecutableFlow exflow) throws Exception;
- void alertOnError(ExecutableFlow exflow, String ... extraReasons) throws Exception;
- void alertOnFirstError(ExecutableFlow exflow) throws Exception;
- void alertOnSla(SlaOption slaOption, String slaMessage) throws Exception;
+ void alertOnSuccess(ExecutableFlow exflow) throws Exception;
+ void alertOnError(ExecutableFlow exflow, String ... extraReasons) throws Exception;
+
+ void alertOnFirstError(ExecutableFlow exflow) throws Exception;
+
+ void alertOnSla(SlaOption slaOption, String slaMessage) throws Exception;
}
src/main/java/azkaban/database/AbstractJdbcLoader.java 208(+105 -103)
diff --git a/src/main/java/azkaban/database/AbstractJdbcLoader.java b/src/main/java/azkaban/database/AbstractJdbcLoader.java
index faac467..b626fab 100644
--- a/src/main/java/azkaban/database/AbstractJdbcLoader.java
+++ b/src/main/java/azkaban/database/AbstractJdbcLoader.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -29,104 +29,106 @@ import org.apache.commons.dbutils.ResultSetHandler;
import azkaban.utils.Props;
public abstract class AbstractJdbcLoader {
- /**
- * Used for when we store text data. Plain uses UTF8 encoding.
- */
- public static enum EncodingType {
- PLAIN(1), GZIP(2);
-
- private int numVal;
-
- EncodingType(int numVal) {
- this.numVal = numVal;
- }
-
- public int getNumVal() {
- return numVal;
- }
-
- public static EncodingType fromInteger(int x) {
- switch (x) {
- case 1:
- return PLAIN;
- case 2:
- return GZIP;
- default:
- return PLAIN;
- }
- }
- }
-
- private AzkabanDataSource dataSource;
-
- public AbstractJdbcLoader(Props props) {
- dataSource = DataSourceUtils.getDataSource(props);
- }
-
- protected Connection getDBConnection(boolean autoCommit) throws IOException {
- Connection connection = null;
- try {
- connection = dataSource.getConnection();
- connection.setAutoCommit(autoCommit);
- } catch (Exception e) {
- DbUtils.closeQuietly(connection);
- throw new IOException("Error getting DB connection.", e);
- }
-
- return connection;
- }
-
- protected QueryRunner createQueryRunner() {
- return new QueryRunner(dataSource);
- }
-
- protected boolean allowsOnDuplicateKey() {
- return dataSource.allowsOnDuplicateKey();
- }
-
- public static class IntHandler implements ResultSetHandler<Integer> {
- @Override
- public Integer handle(ResultSet rs) throws SQLException {
- if (!rs.next()) {
- return 0;
- }
-
- return rs.getInt(1);
- }
- }
-
- public static class SingleStringHandler implements ResultSetHandler<String> {
- @Override
- public String handle(ResultSet rs) throws SQLException {
- if (!rs.next()) {
- return null;
- }
-
- return rs.getString(1);
- }
- }
-
- public static class IntListHandler implements ResultSetHandler<ArrayList<Integer>> {
- @Override
- public ArrayList<Integer> handle(ResultSet rs) throws SQLException {
- ArrayList<Integer> results = new ArrayList<Integer>();
- while(rs.next()) {
- results.add(rs.getInt(1));
- }
-
- return results;
- }
- }
-
- public static class StringListHandler implements ResultSetHandler<ArrayList<String>> {
- @Override
- public ArrayList<String> handle(ResultSet rs) throws SQLException {
- ArrayList<String> results = new ArrayList<String>();
- while(rs.next()) {
- results.add(rs.getString(1));
- }
-
- return results;
- }
- }
+ /**
+ * Used for when we store text data. Plain uses UTF8 encoding.
+ */
+ public static enum EncodingType {
+ PLAIN(1), GZIP(2);
+
+ private int numVal;
+
+ EncodingType(int numVal) {
+ this.numVal = numVal;
+ }
+
+ public int getNumVal() {
+ return numVal;
+ }
+
+ public static EncodingType fromInteger(int x) {
+ switch (x) {
+ case 1:
+ return PLAIN;
+ case 2:
+ return GZIP;
+ default:
+ return PLAIN;
+ }
+ }
+ }
+
+ private AzkabanDataSource dataSource;
+
+ public AbstractJdbcLoader(Props props) {
+ dataSource = DataSourceUtils.getDataSource(props);
+ }
+
+ protected Connection getDBConnection(boolean autoCommit) throws IOException {
+ Connection connection = null;
+ try {
+ connection = dataSource.getConnection();
+ connection.setAutoCommit(autoCommit);
+ } catch (Exception e) {
+ DbUtils.closeQuietly(connection);
+ throw new IOException("Error getting DB connection.", e);
+ }
+
+ return connection;
+ }
+
+ protected QueryRunner createQueryRunner() {
+ return new QueryRunner(dataSource);
+ }
+
+ protected boolean allowsOnDuplicateKey() {
+ return dataSource.allowsOnDuplicateKey();
+ }
+
+ public static class IntHandler implements ResultSetHandler<Integer> {
+ @Override
+ public Integer handle(ResultSet rs) throws SQLException {
+ if (!rs.next()) {
+ return 0;
+ }
+
+ return rs.getInt(1);
+ }
+ }
+
+ public static class SingleStringHandler implements ResultSetHandler<String> {
+ @Override
+ public String handle(ResultSet rs) throws SQLException {
+ if (!rs.next()) {
+ return null;
+ }
+
+ return rs.getString(1);
+ }
+ }
+
+ public static class IntListHandler implements
+ ResultSetHandler<ArrayList<Integer>> {
+ @Override
+ public ArrayList<Integer> handle(ResultSet rs) throws SQLException {
+ ArrayList<Integer> results = new ArrayList<Integer>();
+ while (rs.next()) {
+ results.add(rs.getInt(1));
+ }
+
+ return results;
+ }
+ }
+
+ public static class StringListHandler implements
+ ResultSetHandler<ArrayList<String>> {
+ @Override
+ public ArrayList<String> handle(ResultSet rs) throws SQLException {
+ ArrayList<String> results = new ArrayList<String>();
+ while (rs.next()) {
+ results.add(rs.getString(1));
+ }
+
+ return results;
+ }
+ }
}
src/main/java/azkaban/database/AzkabanDatabaseSetup.java 708(+363 -345)
diff --git a/src/main/java/azkaban/database/AzkabanDatabaseSetup.java b/src/main/java/azkaban/database/AzkabanDatabaseSetup.java
index bd292d9..b31e1eb 100644
--- a/src/main/java/azkaban/database/AzkabanDatabaseSetup.java
+++ b/src/main/java/azkaban/database/AzkabanDatabaseSetup.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -42,346 +42,364 @@ import azkaban.utils.FileIOUtils;
import azkaban.utils.Props;
public class AzkabanDatabaseSetup {
- private static final Logger logger = Logger.getLogger(AzkabanDatabaseSetup.class);
- public static final String DATABASE_CHECK_VERSION = "database.check.version";
- public static final String DATABASE_AUTO_UPDATE_TABLES = "database.auto.update.tables";
- public static final String DATABASE_SQL_SCRIPT_DIR = "database.sql.scripts.dir";
-
- private static final String DEFAULT_SCRIPT_PATH = "sql";
- private static final String CREATE_SCRIPT_PREFIX = "create.";
- private static final String UPDATE_SCRIPT_PREFIX = "update.";
- private static final String SQL_SCRIPT_SUFFIX = ".sql";
-
- private static String FETCH_PROPERTY_BY_TYPE = "SELECT name, value FROM properties WHERE type=?";
- private static final String INSERT_DB_PROPERTY = "INSERT INTO properties (name, type, value, modified_time) values (?,?,?,?)";
- private static final String UPDATE_DB_PROPERTY = "UPDATE properties SET value=?,modified_time=? WHERE name=? AND type=?";
-
- private AzkabanDataSource dataSource;
- private Map<String, String> tables;
- private Map<String, String> installedVersions;
- private Set<String> missingTables;
- private Map<String, List<String>> upgradeList;
- private Props dbProps;
- private String version;
- private boolean needsUpdating;
-
- private String scriptPath = null;
-
- public AzkabanDatabaseSetup(Props props) {
- this(DataSourceUtils.getDataSource(props));
- this.scriptPath = props.getString(DATABASE_SQL_SCRIPT_DIR, DEFAULT_SCRIPT_PATH);
- }
-
- public AzkabanDatabaseSetup(AzkabanDataSource ds) {
- this.dataSource = ds;
- if (scriptPath == null) {
- scriptPath = DEFAULT_SCRIPT_PATH;
- }
- }
-
- public void loadTableInfo() throws IOException, SQLException {
- tables = new HashMap<String, String>();
- installedVersions = new HashMap<String, String>();
- missingTables = new HashSet<String>();
- upgradeList = new HashMap<String, List<String>>();
-
- dbProps = loadDBProps();
- version = dbProps.getString("version");
-
- loadInstalledTables();
- loadTableVersion();
- findMissingTables();
- findOutOfDateTables();
-
- needsUpdating = !upgradeList.isEmpty() || !missingTables.isEmpty();
- }
-
- public boolean needsUpdating() {
- if (version == null) {
- throw new RuntimeException("Uninitialized. Call loadTableInfo first.");
- }
-
- return needsUpdating;
- }
-
- public void printUpgradePlan() {
- if (!tables.isEmpty()) {
- logger.info("The following are installed tables");
- for (Map.Entry<String, String> installedTable: tables.entrySet()) {
- logger.info(" " + installedTable.getKey() + " version:" + installedTable.getValue());
- }
- }
- else {
- logger.info("No installed tables found.");
- }
-
- if (!missingTables.isEmpty()) {
- logger.info("The following are missing tables that need to be installed");
- for (String table: missingTables) {
- logger.info(" " + table);
- }
- }
- else {
- logger.info("There are no missing tables.");
- }
-
- if (!upgradeList.isEmpty()) {
- logger.info("The following tables need to be updated.");
- for (Map.Entry<String, List<String>> upgradeTable: upgradeList.entrySet()) {
- String tableInfo = " " + upgradeTable.getKey() + " versions:";
- for (String upVersion: upgradeTable.getValue()) {
- tableInfo += upVersion + ",";
- }
-
- logger.info(tableInfo);
- }
- }
- else {
- logger.info("No tables need to be updated.");
- }
- }
-
- public void updateDatabase(boolean createTable, boolean updateTable) throws SQLException, IOException {
- // We call this because it has an unitialize check.
- if (!needsUpdating()) {
- logger.info("Nothing to be done.");
- return;
- }
-
- if (createTable && !missingTables.isEmpty()) {
- createNewTables();
- }
- if (updateTable && !upgradeList.isEmpty()) {
- updateTables();
- }
- }
-
- private Props loadDBProps() throws IOException {
- File dbPropsFile = new File(this.scriptPath, "database.properties");
-
- if (!dbPropsFile.exists()) {
- throw new IOException("Cannot find 'database.properties' file in " + dbPropsFile.getPath());
- }
-
- Props props = new Props(null, dbPropsFile);
- return props;
- }
-
- private void loadTableVersion() throws SQLException {
- logger.info("Searching for table versions in the properties table");
- if (tables.containsKey("properties")) {
- // Load version from settings
- QueryRunner runner = new QueryRunner(dataSource);
- Map<String,String> map = runner.query(FETCH_PROPERTY_BY_TYPE, new PropertiesHandler(), PropertyType.DB.getNumVal());
- for (String key: map.keySet()) {
- String value = map.get(key);
- if (key.endsWith(".version")) {
- String tableName = key.substring(0, key.length() - ".version".length());
- installedVersions.put(tableName, value);
- if (tables.containsKey(tableName)) {
- tables.put(tableName, value);
- }
- }
- }
- }
- else {
- logger.info("Properties table doesn't exist.");
- }
- }
-
- private void loadInstalledTables() throws SQLException {
- logger.info("Searching for installed tables");
- Connection conn = null;
- try {
- conn = dataSource.getConnection();
- ResultSet rs = conn.getMetaData().getTables(conn.getCatalog(), null, null, new String[]{"TABLE"});
-
- while(rs.next()) {
- tables.put(rs.getString("TABLE_NAME").toLowerCase(), "2.1");
- }
- }
- finally {
- DbUtils.commitAndCloseQuietly(conn);
- }
- }
-
- private void findMissingTables() {
- File directory = new File(scriptPath);
- File[] createScripts = directory.listFiles(new FileIOUtils.PrefixSuffixFileFilter(CREATE_SCRIPT_PREFIX, SQL_SCRIPT_SUFFIX));
-
- for (File script: createScripts) {
- String name = script.getName();
- String[] nameSplit = name.split("\\.");
- String tableName = nameSplit[1];
-
- if (!tables.containsKey(tableName)) {
- missingTables.add(tableName);
- }
- }
- }
-
- private void findOutOfDateTables() {
- for (String key : tables.keySet()) {
- String version = tables.get(key);
-
- List<String> upgradeVersions = findOutOfDateTable(key, version);
- if (upgradeVersions != null && !upgradeVersions.isEmpty()) {
- upgradeList.put(key, upgradeVersions);
- }
- }
- }
-
- private List<String> findOutOfDateTable(String table, String version) {
- File directory = new File(scriptPath);
- ArrayList<String> versions = new ArrayList<String>();
-
- File[] createScripts = directory.listFiles(new FileIOUtils.PrefixSuffixFileFilter(UPDATE_SCRIPT_PREFIX + table, SQL_SCRIPT_SUFFIX));
- if (createScripts.length == 0) {
- return null;
- }
-
- String updateFileNameVersion = UPDATE_SCRIPT_PREFIX + table + "." + version;
- for (File file: createScripts) {
- String fileName = file.getName();
- if (fileName.compareTo(updateFileNameVersion) > 0) {
- if (fileName.startsWith(updateFileNameVersion)) {
- continue;
- }
-
- String[] split = fileName.split("\\.");
- String versionNum = "";
-
- for (int i = 2; i < split.length - 1; ++i) {
- try {
- Integer.parseInt(split[i]);
- versionNum += split[i] + ".";
- }
- catch (NumberFormatException e) {
- break;
- }
- }
- if (versionNum.endsWith(".")) {
- versionNum = versionNum.substring(0, versionNum.length() - 1);
-
- if (versionNum.compareTo(version) == 0) {
- versions.add(versionNum);
- }
- }
- }
- }
-
- Collections.sort(versions);
- return versions;
- }
-
- private void createNewTables() throws SQLException, IOException {
- Connection conn = dataSource.getConnection();
- conn.setAutoCommit(false);
- try {
- // Make sure that properties table is created first.
- if (missingTables.contains("properties")) {
- runTableScripts(conn, "properties", version, dataSource.getDBType(), false);
- }
- for (String table: missingTables) {
- if (!table.equals("properties")) {
- runTableScripts(conn, table, version, dataSource.getDBType(), false);
- }
- }
- }
- finally {
- conn.close();
- }
- }
-
- private void updateTables() throws SQLException, IOException {
- Connection conn = dataSource.getConnection();
- conn.setAutoCommit(false);
- try {
- // Make sure that properties table is created first.
- if (upgradeList.containsKey("properties")) {
- for (String version: upgradeList.get("properties")) {
- runTableScripts(conn, "properties", version, dataSource.getDBType(), true);
- }
- }
- for (String table: upgradeList.keySet()) {
- if (!table.equals("properties")) {
- for (String version: upgradeList.get(table)) {
- runTableScripts(conn, table, version, dataSource.getDBType(), true);
- }
- }
- }
- }
- finally {
- conn.close();
- }
- }
-
- private void runTableScripts(Connection conn, String table, String version, String dbType, boolean update) throws IOException, SQLException {
- String scriptName = "";
- if (update) {
- scriptName = "update." + table + "." + version;
- logger.info("Update table " + table + " to version " + version);
- }
- else {
- scriptName = "create." + table;
- logger.info("Creating new table " + table + " version " + version);
- }
-
- String dbSpecificScript = scriptName + "." + dbType + ".sql";
-
- File script = new File(scriptPath, dbSpecificScript);
- if (!script.exists()) {
- String dbScript = scriptName + ".sql";
- script = new File(scriptPath, dbScript);
-
- if (!script.exists()) {
- throw new IOException("Creation files do not exist for table " + table);
- }
- }
-
- BufferedInputStream buff = null;
- try {
- buff = new BufferedInputStream(new FileInputStream(script));
- String queryStr = IOUtils.toString(buff);
-
- String[] splitQuery = queryStr.split(";\\s*\n");
-
- QueryRunner runner = new QueryRunner();
-
- for (String query: splitQuery) {
- runner.update(conn, query);
- }
-
- // If it's properties, then we want to commit the table before we update it
- if (table.equals("properties")) {
- conn.commit();
- }
-
- String propertyName = table + ".version";
- if (!installedVersions.containsKey(table)) {
- runner.update(conn, INSERT_DB_PROPERTY, propertyName, DataSourceUtils.PropertyType.DB.getNumVal(), version, System.currentTimeMillis());
- }
- else {
- runner.update(conn, UPDATE_DB_PROPERTY, version, System.currentTimeMillis(), propertyName, DataSourceUtils.PropertyType.DB.getNumVal());
- }
- conn.commit();
- }
- finally {
- IOUtils.closeQuietly(buff);
- }
- }
-
- public static class PropertiesHandler implements ResultSetHandler<Map<String, String>> {
- @Override
- public Map<String, String> handle(ResultSet rs) throws SQLException {
- Map<String, String> results = new HashMap<String, String>();
- while(rs.next()) {
- String key = rs.getString(1);
- String value = rs.getString(2);
- results.put(key, value);
- }
-
- return results;
- }
- }
+ private static final Logger logger = Logger
+ .getLogger(AzkabanDatabaseSetup.class);
+ public static final String DATABASE_CHECK_VERSION = "database.check.version";
+ public static final String DATABASE_AUTO_UPDATE_TABLES =
+ "database.auto.update.tables";
+ public static final String DATABASE_SQL_SCRIPT_DIR =
+ "database.sql.scripts.dir";
+
+ private static final String DEFAULT_SCRIPT_PATH = "sql";
+ private static final String CREATE_SCRIPT_PREFIX = "create.";
+ private static final String UPDATE_SCRIPT_PREFIX = "update.";
+ private static final String SQL_SCRIPT_SUFFIX = ".sql";
+
+ private static String FETCH_PROPERTY_BY_TYPE =
+ "SELECT name, value FROM properties WHERE type=?";
+ private static final String INSERT_DB_PROPERTY =
+ "INSERT INTO properties (name, type, value, modified_time) values (?,?,?,?)";
+ private static final String UPDATE_DB_PROPERTY =
+ "UPDATE properties SET value=?,modified_time=? WHERE name=? AND type=?";
+
+ private AzkabanDataSource dataSource;
+ private Map<String, String> tables;
+ private Map<String, String> installedVersions;
+ private Set<String> missingTables;
+ private Map<String, List<String>> upgradeList;
+ private Props dbProps;
+ private String version;
+ private boolean needsUpdating;
+
+ private String scriptPath = null;
+
+ public AzkabanDatabaseSetup(Props props) {
+ this(DataSourceUtils.getDataSource(props));
+ this.scriptPath =
+ props.getString(DATABASE_SQL_SCRIPT_DIR, DEFAULT_SCRIPT_PATH);
+ }
+
+ public AzkabanDatabaseSetup(AzkabanDataSource ds) {
+ this.dataSource = ds;
+ if (scriptPath == null) {
+ scriptPath = DEFAULT_SCRIPT_PATH;
+ }
+ }
+
+ public void loadTableInfo() throws IOException, SQLException {
+ tables = new HashMap<String, String>();
+ installedVersions = new HashMap<String, String>();
+ missingTables = new HashSet<String>();
+ upgradeList = new HashMap<String, List<String>>();
+
+ dbProps = loadDBProps();
+ version = dbProps.getString("version");
+
+ loadInstalledTables();
+ loadTableVersion();
+ findMissingTables();
+ findOutOfDateTables();
+
+ needsUpdating = !upgradeList.isEmpty() || !missingTables.isEmpty();
+ }
+
+ public boolean needsUpdating() {
+ if (version == null) {
+ throw new RuntimeException("Uninitialized. Call loadTableInfo first.");
+ }
+
+ return needsUpdating;
+ }
+
+ public void printUpgradePlan() {
+ if (!tables.isEmpty()) {
+ logger.info("The following are installed tables");
+ for (Map.Entry<String, String> installedTable : tables.entrySet()) {
+ logger.info(" " + installedTable.getKey() + " version:"
+ + installedTable.getValue());
+ }
+ } else {
+ logger.info("No installed tables found.");
+ }
+
+ if (!missingTables.isEmpty()) {
+ logger.info("The following are missing tables that need to be installed");
+ for (String table : missingTables) {
+ logger.info(" " + table);
+ }
+ } else {
+ logger.info("There are no missing tables.");
+ }
+
+ if (!upgradeList.isEmpty()) {
+ logger.info("The following tables need to be updated.");
+ for (Map.Entry<String, List<String>> upgradeTable : upgradeList
+ .entrySet()) {
+ String tableInfo = " " + upgradeTable.getKey() + " versions:";
+ for (String upVersion : upgradeTable.getValue()) {
+ tableInfo += upVersion + ",";
+ }
+
+ logger.info(tableInfo);
+ }
+ } else {
+ logger.info("No tables need to be updated.");
+ }
+ }
+
+ public void updateDatabase(boolean createTable, boolean updateTable)
+ throws SQLException, IOException {
+ // We call this because it has an unitialize check.
+ if (!needsUpdating()) {
+ logger.info("Nothing to be done.");
+ return;
+ }
+
+ if (createTable && !missingTables.isEmpty()) {
+ createNewTables();
+ }
+ if (updateTable && !upgradeList.isEmpty()) {
+ updateTables();
+ }
+ }
+
+ private Props loadDBProps() throws IOException {
+ File dbPropsFile = new File(this.scriptPath, "database.properties");
+
+ if (!dbPropsFile.exists()) {
+ throw new IOException("Cannot find 'database.properties' file in "
+ + dbPropsFile.getPath());
+ }
+
+ Props props = new Props(null, dbPropsFile);
+ return props;
+ }
+
+ private void loadTableVersion() throws SQLException {
+ logger.info("Searching for table versions in the properties table");
+ if (tables.containsKey("properties")) {
+ // Load version from settings
+ QueryRunner runner = new QueryRunner(dataSource);
+ Map<String, String> map =
+ runner.query(FETCH_PROPERTY_BY_TYPE, new PropertiesHandler(),
+ PropertyType.DB.getNumVal());
+ for (String key : map.keySet()) {
+ String value = map.get(key);
+ if (key.endsWith(".version")) {
+ String tableName =
+ key.substring(0, key.length() - ".version".length());
+ installedVersions.put(tableName, value);
+ if (tables.containsKey(tableName)) {
+ tables.put(tableName, value);
+ }
+ }
+ }
+ } else {
+ logger.info("Properties table doesn't exist.");
+ }
+ }
+
+ private void loadInstalledTables() throws SQLException {
+ logger.info("Searching for installed tables");
+ Connection conn = null;
+ try {
+ conn = dataSource.getConnection();
+ ResultSet rs =
+ conn.getMetaData().getTables(conn.getCatalog(), null, null,
+ new String[] { "TABLE" });
+
+ while (rs.next()) {
+ tables.put(rs.getString("TABLE_NAME").toLowerCase(), "2.1");
+ }
+ } finally {
+ DbUtils.commitAndCloseQuietly(conn);
+ }
+ }
+
+ private void findMissingTables() {
+ File directory = new File(scriptPath);
+ File[] createScripts =
+ directory.listFiles(new FileIOUtils.PrefixSuffixFileFilter(
+ CREATE_SCRIPT_PREFIX, SQL_SCRIPT_SUFFIX));
+
+ for (File script : createScripts) {
+ String name = script.getName();
+ String[] nameSplit = name.split("\\.");
+ String tableName = nameSplit[1];
+
+ if (!tables.containsKey(tableName)) {
+ missingTables.add(tableName);
+ }
+ }
+ }
+
+ private void findOutOfDateTables() {
+ for (String key : tables.keySet()) {
+ String version = tables.get(key);
+
+ List<String> upgradeVersions = findOutOfDateTable(key, version);
+ if (upgradeVersions != null && !upgradeVersions.isEmpty()) {
+ upgradeList.put(key, upgradeVersions);
+ }
+ }
+ }
+
+ private List<String> findOutOfDateTable(String table, String version) {
+ File directory = new File(scriptPath);
+ ArrayList<String> versions = new ArrayList<String>();
+
+ File[] createScripts =
+ directory.listFiles(new FileIOUtils.PrefixSuffixFileFilter(
+ UPDATE_SCRIPT_PREFIX + table, SQL_SCRIPT_SUFFIX));
+ if (createScripts.length == 0) {
+ return null;
+ }
+
+ String updateFileNameVersion = UPDATE_SCRIPT_PREFIX + table + "." + version;
+ for (File file : createScripts) {
+ String fileName = file.getName();
+ if (fileName.compareTo(updateFileNameVersion) > 0) {
+ if (fileName.startsWith(updateFileNameVersion)) {
+ continue;
+ }
+
+ String[] split = fileName.split("\\.");
+ String versionNum = "";
+
+ for (int i = 2; i < split.length - 1; ++i) {
+ try {
+ Integer.parseInt(split[i]);
+ versionNum += split[i] + ".";
+ } catch (NumberFormatException e) {
+ break;
+ }
+ }
+ if (versionNum.endsWith(".")) {
+ versionNum = versionNum.substring(0, versionNum.length() - 1);
+
+ if (versionNum.compareTo(version) == 0) {
+ versions.add(versionNum);
+ }
+ }
+ }
+ }
+
+ Collections.sort(versions);
+ return versions;
+ }
+
+ private void createNewTables() throws SQLException, IOException {
+ Connection conn = dataSource.getConnection();
+ conn.setAutoCommit(false);
+ try {
+ // Make sure that properties table is created first.
+ if (missingTables.contains("properties")) {
+ runTableScripts(conn, "properties", version, dataSource.getDBType(),
+ false);
+ }
+ for (String table : missingTables) {
+ if (!table.equals("properties")) {
+ runTableScripts(conn, table, version, dataSource.getDBType(), false);
+ }
+ }
+ } finally {
+ conn.close();
+ }
+ }
+
+ private void updateTables() throws SQLException, IOException {
+ Connection conn = dataSource.getConnection();
+ conn.setAutoCommit(false);
+ try {
+ // Make sure that properties table is created first.
+ if (upgradeList.containsKey("properties")) {
+ for (String version : upgradeList.get("properties")) {
+ runTableScripts(conn, "properties", version, dataSource.getDBType(),
+ true);
+ }
+ }
+ for (String table : upgradeList.keySet()) {
+ if (!table.equals("properties")) {
+ for (String version : upgradeList.get(table)) {
+ runTableScripts(conn, table, version, dataSource.getDBType(), true);
+ }
+ }
+ }
+ } finally {
+ conn.close();
+ }
+ }
+
+ private void runTableScripts(Connection conn, String table, String version,
+ String dbType, boolean update) throws IOException, SQLException {
+ String scriptName = "";
+ if (update) {
+ scriptName = "update." + table + "." + version;
+ logger.info("Update table " + table + " to version " + version);
+ } else {
+ scriptName = "create." + table;
+ logger.info("Creating new table " + table + " version " + version);
+ }
+
+ String dbSpecificScript = scriptName + "." + dbType + ".sql";
+
+ File script = new File(scriptPath, dbSpecificScript);
+ if (!script.exists()) {
+ String dbScript = scriptName + ".sql";
+ script = new File(scriptPath, dbScript);
+
+ if (!script.exists()) {
+ throw new IOException("Creation files do not exist for table " + table);
+ }
+ }
+
+ BufferedInputStream buff = null;
+ try {
+ buff = new BufferedInputStream(new FileInputStream(script));
+ String queryStr = IOUtils.toString(buff);
+
+ String[] splitQuery = queryStr.split(";\\s*\n");
+
+ QueryRunner runner = new QueryRunner();
+
+ for (String query : splitQuery) {
+ runner.update(conn, query);
+ }
+
+ // If it's properties, then we want to commit the table before we update
+ // it
+ if (table.equals("properties")) {
+ conn.commit();
+ }
+
+ String propertyName = table + ".version";
+ if (!installedVersions.containsKey(table)) {
+ runner.update(conn, INSERT_DB_PROPERTY, propertyName,
+ DataSourceUtils.PropertyType.DB.getNumVal(), version,
+ System.currentTimeMillis());
+ } else {
+ runner.update(conn, UPDATE_DB_PROPERTY, version,
+ System.currentTimeMillis(), propertyName,
+ DataSourceUtils.PropertyType.DB.getNumVal());
+ }
+ conn.commit();
+ } finally {
+ IOUtils.closeQuietly(buff);
+ }
+ }
+
+ public static class PropertiesHandler implements
+ ResultSetHandler<Map<String, String>> {
+ @Override
+ public Map<String, String> handle(ResultSet rs) throws SQLException {
+ Map<String, String> results = new HashMap<String, String>();
+ while (rs.next()) {
+ String key = rs.getString(1);
+ String value = rs.getString(2);
+ results.put(key, value);
+ }
+
+ return results;
+ }
+ }
}
diff --git a/src/main/java/azkaban/database/AzkabanDatabaseUpdater.java b/src/main/java/azkaban/database/AzkabanDatabaseUpdater.java
index 4c620bf..6f410a3 100644
--- a/src/main/java/azkaban/database/AzkabanDatabaseUpdater.java
+++ b/src/main/java/azkaban/database/AzkabanDatabaseUpdater.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -31,67 +31,69 @@ import azkaban.utils.Props;
import azkaban.webapp.AzkabanServer;
public class AzkabanDatabaseUpdater {
- private static final Logger logger = Logger.getLogger(AzkabanDatabaseUpdater.class);
-
- public static void main(String[] args) throws Exception {
- OptionParser parser = new OptionParser();
-
- OptionSpec<String> scriptDirectory = parser
- .acceptsAll(Arrays.asList("s", "script"), "Directory of update scripts.")
- .withRequiredArg()
- .describedAs("script").ofType(String.class);
-
- OptionSpec<Void> updateOption =
- parser.acceptsAll(Arrays.asList("u", "update"), "Will update if necessary");
-
- Props props = AzkabanServer.loadProps(args, parser);
-
- if (props == null) {
- logger.error("Properties not found. Need it to connect to the db.");
- logger.error("Exiting...");
- return;
- }
-
- OptionSet options = parser.parse(args);
- boolean updateDB = false;
- if (options.has(updateOption)) {
- updateDB = true;
- }
- else {
- logger.info("Running DatabaseUpdater in test mode");
- }
-
- String scriptDir = "sql";
- if (options.has(scriptDirectory)) {
- scriptDir = options.valueOf(scriptDirectory);
- }
-
- runDatabaseUpdater(props, scriptDir, updateDB);
- }
-
- public static void runDatabaseUpdater(Props props, String sqlDir, boolean updateDB) throws IOException, SQLException {
- logger.info("Use scripting directory " + sqlDir);
-
- if (updateDB) {
- logger.info("Will auto update any changes.");
- }
- else {
- logger.info("Running DatabaseUpdater in test mode. Use -u to update");
- }
-
- AzkabanDatabaseSetup setup = new AzkabanDatabaseSetup(props);
- setup.loadTableInfo();
- if (!setup.needsUpdating()) {
- logger.info("Everything looks up to date.");
- return;
- }
-
- logger.info("Need to update the db.");
- setup.printUpgradePlan();
-
- if (updateDB) {
- logger.info("Updating DB");
- setup.updateDatabase(true,true);
- }
- }
+ private static final Logger logger = Logger
+ .getLogger(AzkabanDatabaseUpdater.class);
+
+ public static void main(String[] args) throws Exception {
+ OptionParser parser = new OptionParser();
+
+ OptionSpec<String> scriptDirectory =
+ parser
+ .acceptsAll(Arrays.asList("s", "script"),
+ "Directory of update scripts.").withRequiredArg()
+ .describedAs("script").ofType(String.class);
+
+ OptionSpec<Void> updateOption =
+ parser.acceptsAll(Arrays.asList("u", "update"),
+ "Will update if necessary");
+
+ Props props = AzkabanServer.loadProps(args, parser);
+
+ if (props == null) {
+ logger.error("Properties not found. Need it to connect to the db.");
+ logger.error("Exiting...");
+ return;
+ }
+
+ OptionSet options = parser.parse(args);
+ boolean updateDB = false;
+ if (options.has(updateOption)) {
+ updateDB = true;
+ } else {
+ logger.info("Running DatabaseUpdater in test mode");
+ }
+
+ String scriptDir = "sql";
+ if (options.has(scriptDirectory)) {
+ scriptDir = options.valueOf(scriptDirectory);
+ }
+
+ runDatabaseUpdater(props, scriptDir, updateDB);
+ }
+
+ public static void runDatabaseUpdater(Props props, String sqlDir,
+ boolean updateDB) throws IOException, SQLException {
+ logger.info("Use scripting directory " + sqlDir);
+
+ if (updateDB) {
+ logger.info("Will auto update any changes.");
+ } else {
+ logger.info("Running DatabaseUpdater in test mode. Use -u to update");
+ }
+
+ AzkabanDatabaseSetup setup = new AzkabanDatabaseSetup(props);
+ setup.loadTableInfo();
+ if (!setup.needsUpdating()) {
+ logger.info("Everything looks up to date.");
+ return;
+ }
+
+ logger.info("Need to update the db.");
+ setup.printUpgradePlan();
+
+ if (updateDB) {
+ logger.info("Updating DB");
+ setup.updateDatabase(true, true);
+ }
+ }
}
diff --git a/src/main/java/azkaban/database/AzkabanDataSource.java b/src/main/java/azkaban/database/AzkabanDataSource.java
index 501c801..19ce67c 100644
--- a/src/main/java/azkaban/database/AzkabanDataSource.java
+++ b/src/main/java/azkaban/database/AzkabanDataSource.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -19,7 +19,7 @@ package azkaban.database;
import org.apache.commons.dbcp.BasicDataSource;
public abstract class AzkabanDataSource extends BasicDataSource {
- public abstract boolean allowsOnDuplicateKey();
-
- public abstract String getDBType();
+ public abstract boolean allowsOnDuplicateKey();
+
+ public abstract String getDBType();
}
src/main/java/azkaban/database/DataSourceUtils.java 424(+217 -207)
diff --git a/src/main/java/azkaban/database/DataSourceUtils.java b/src/main/java/azkaban/database/DataSourceUtils.java
index a18f1cf..9983c25 100644
--- a/src/main/java/azkaban/database/DataSourceUtils.java
+++ b/src/main/java/azkaban/database/DataSourceUtils.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -26,208 +26,218 @@ import java.sql.Connection;
import azkaban.utils.Props;
public class DataSourceUtils {
-
- private static Logger logger = Logger.getLogger(DataSourceUtils.class);
-
- /**
- * Property types
- */
- public static enum PropertyType {
- DB(1);
-
- private int numVal;
-
- PropertyType(int numVal) {
- this.numVal = numVal;
- }
-
- public int getNumVal() {
- return numVal;
- }
-
- public static PropertyType fromInteger(int x) {
- switch (x) {
- case 1:
- return DB;
- default:
- return DB;
- }
- }
- }
-
- /**
- * Create Datasource from parameters in the properties
- *
- * @param props
- * @return
- */
- public static AzkabanDataSource getDataSource(Props props) {
- String databaseType = props.getString("database.type");
-
- AzkabanDataSource dataSource = null;
- if (databaseType.equals("mysql")) {
- int port = props.getInt("mysql.port");
- String host = props.getString("mysql.host");
- String database = props.getString("mysql.database");
- String user = props.getString("mysql.user");
- String password = props.getString("mysql.password");
- int numConnections = props.getInt("mysql.numconnections");
-
- dataSource = getMySQLDataSource(host, port, database, user, password, numConnections);
- }
- else if (databaseType.equals("h2")) {
- String path = props.getString("h2.path");
- dataSource = getH2DataSource(path);
- }
-
- return dataSource;
- }
-
- /**
- * Create a MySQL DataSource
- *
- * @param host
- * @param port
- * @param dbName
- * @param user
- * @param password
- * @param numConnections
- * @return
- */
- public static AzkabanDataSource getMySQLDataSource(String host, Integer port, String dbName, String user, String password, Integer numConnections) {
- return new MySQLBasicDataSource(host, port, dbName, user, password, numConnections);
- }
-
- /**
- * Create H2 DataSource
- * @param file
- * @return
- */
- public static AzkabanDataSource getH2DataSource(String file) {
- return new EmbeddedH2BasicDataSource(file);
- }
-
- /**
- * Hidden datasource
- */
- private DataSourceUtils() {
- }
-
- /**
- * MySQL data source based on AzkabanDataSource
- *
- */
- public static class MySQLBasicDataSource extends AzkabanDataSource {
-
- private static MonitorThread monitorThread = null;
-
- private MySQLBasicDataSource(String host, int port, String dbName, String user, String password, int numConnections) {
- super();
-
- String url = "jdbc:mysql://" + (host + ":" + port + "/" + dbName);
- addConnectionProperty("useUnicode","yes");
- addConnectionProperty("characterEncoding","UTF-8");
- setDriverClassName("com.mysql.jdbc.Driver");
- setUsername(user);
- setPassword(password);
- setUrl(url);
- setMaxActive(numConnections);
- setValidationQuery("/* ping */ select 1");
- setTestOnBorrow(true);
-
- if(monitorThread == null) {
- monitorThread = new MonitorThread(this);
- monitorThread.start();
- }
- }
-
- @Override
- public boolean allowsOnDuplicateKey() {
- return true;
- }
-
- @Override
- public String getDBType() {
- return "mysql";
- }
-
- private class MonitorThread extends Thread {
- private static final long MONITOR_THREAD_WAIT_INTERVAL_MS = 30*1000;
- private boolean shutdown = false;
- MySQLBasicDataSource dataSource;
-
- public MonitorThread(MySQLBasicDataSource mysqlSource) {
- this.setName("MySQL-DB-Monitor-Thread");
- dataSource = mysqlSource;
- }
-
- @SuppressWarnings("unused")
- public void shutdown() {
- shutdown = true;
- this.interrupt();
- }
-
- public void run() {
- while (!shutdown) {
- synchronized (this) {
- try {
- pingDB();
- wait(MONITOR_THREAD_WAIT_INTERVAL_MS);
- } catch (InterruptedException e) {
- logger.info("Interrupted. Probably to shut down.");
- }
- }
- }
- }
-
- private void pingDB() {
- Connection connection = null;
- try {
- connection = dataSource.getConnection();
- PreparedStatement query = connection.prepareStatement("SELECT 1");
- query.execute();
- } catch (SQLException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- logger.error("MySQL connection test failed. Please check MySQL connection health!");
- } finally {
- DbUtils.closeQuietly(connection);
- }
- }
- }
-
- }
-
- /**
- * H2 Datasource
- *
- */
- public static class EmbeddedH2BasicDataSource extends AzkabanDataSource {
- private EmbeddedH2BasicDataSource(String filePath) {
- super();
- String url = "jdbc:h2:file:" + filePath;
- setDriverClassName("org.h2.Driver");
- setUrl(url);
- }
-
- @Override
- public boolean allowsOnDuplicateKey() {
- return false;
- }
-
- @Override
- public String getDBType() {
- return "h2";
- }
- }
-
- public static void testConnection(DataSource ds) throws SQLException {
- QueryRunner runner = new QueryRunner(ds);
- runner.update("SHOW TABLES");
- }
-
- public static void testMySQLConnection(String host, Integer port, String dbName, String user, String password, Integer numConnections) throws SQLException {
- DataSource ds = new MySQLBasicDataSource(host, port, dbName, user, password, numConnections);
- testConnection(ds);
- }
+
+ private static Logger logger = Logger.getLogger(DataSourceUtils.class);
+
+ /**
+ * Property types
+ */
+ public static enum PropertyType {
+ DB(1);
+
+ private int numVal;
+
+ PropertyType(int numVal) {
+ this.numVal = numVal;
+ }
+
+ public int getNumVal() {
+ return numVal;
+ }
+
+ public static PropertyType fromInteger(int x) {
+ switch (x) {
+ case 1:
+ return DB;
+ default:
+ return DB;
+ }
+ }
+ }
+
+ /**
+ * Create Datasource from parameters in the properties
+ *
+ * @param props
+ * @return
+ */
+ public static AzkabanDataSource getDataSource(Props props) {
+ String databaseType = props.getString("database.type");
+
+ AzkabanDataSource dataSource = null;
+ if (databaseType.equals("mysql")) {
+ int port = props.getInt("mysql.port");
+ String host = props.getString("mysql.host");
+ String database = props.getString("mysql.database");
+ String user = props.getString("mysql.user");
+ String password = props.getString("mysql.password");
+ int numConnections = props.getInt("mysql.numconnections");
+
+ dataSource =
+ getMySQLDataSource(host, port, database, user, password,
+ numConnections);
+ } else if (databaseType.equals("h2")) {
+ String path = props.getString("h2.path");
+ dataSource = getH2DataSource(path);
+ }
+
+ return dataSource;
+ }
+
+ /**
+ * Create a MySQL DataSource
+ *
+ * @param host
+ * @param port
+ * @param dbName
+ * @param user
+ * @param password
+ * @param numConnections
+ * @return
+ */
+ public static AzkabanDataSource getMySQLDataSource(String host, Integer port,
+ String dbName, String user, String password, Integer numConnections) {
+ return new MySQLBasicDataSource(host, port, dbName, user, password,
+ numConnections);
+ }
+
+ /**
+ * Create H2 DataSource
+ *
+ * @param file
+ * @return
+ */
+ public static AzkabanDataSource getH2DataSource(String file) {
+ return new EmbeddedH2BasicDataSource(file);
+ }
+
+ /**
+ * Hidden datasource
+ */
+ private DataSourceUtils() {
+ }
+
+ /**
+ * MySQL data source based on AzkabanDataSource
+ *
+ */
+ public static class MySQLBasicDataSource extends AzkabanDataSource {
+
+ private static MonitorThread monitorThread = null;
+
+ private MySQLBasicDataSource(String host, int port, String dbName,
+ String user, String password, int numConnections) {
+ super();
+
+ String url = "jdbc:mysql://" + (host + ":" + port + "/" + dbName);
+ addConnectionProperty("useUnicode", "yes");
+ addConnectionProperty("characterEncoding", "UTF-8");
+ setDriverClassName("com.mysql.jdbc.Driver");
+ setUsername(user);
+ setPassword(password);
+ setUrl(url);
+ setMaxActive(numConnections);
+ setValidationQuery("/* ping */ select 1");
+ setTestOnBorrow(true);
+
+ if (monitorThread == null) {
+ monitorThread = new MonitorThread(this);
+ monitorThread.start();
+ }
+ }
+
+ @Override
+ public boolean allowsOnDuplicateKey() {
+ return true;
+ }
+
+ @Override
+ public String getDBType() {
+ return "mysql";
+ }
+
+ private class MonitorThread extends Thread {
+ private static final long MONITOR_THREAD_WAIT_INTERVAL_MS = 30 * 1000;
+ private boolean shutdown = false;
+ MySQLBasicDataSource dataSource;
+
+ public MonitorThread(MySQLBasicDataSource mysqlSource) {
+ this.setName("MySQL-DB-Monitor-Thread");
+ dataSource = mysqlSource;
+ }
+
+ @SuppressWarnings("unused")
+ public void shutdown() {
+ shutdown = true;
+ this.interrupt();
+ }
+
+ public void run() {
+ while (!shutdown) {
+ synchronized (this) {
+ try {
+ pingDB();
+ wait(MONITOR_THREAD_WAIT_INTERVAL_MS);
+ } catch (InterruptedException e) {
+ logger.info("Interrupted. Probably to shut down.");
+ }
+ }
+ }
+ }
+
+ private void pingDB() {
+ Connection connection = null;
+ try {
+ connection = dataSource.getConnection();
+ PreparedStatement query = connection.prepareStatement("SELECT 1");
+ query.execute();
+ } catch (SQLException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ logger
+ .error("MySQL connection test failed. Please check MySQL connection health!");
+ } finally {
+ DbUtils.closeQuietly(connection);
+ }
+ }
+ }
+
+ }
+
+ /**
+ * H2 Datasource
+ *
+ */
+ public static class EmbeddedH2BasicDataSource extends AzkabanDataSource {
+ private EmbeddedH2BasicDataSource(String filePath) {
+ super();
+ String url = "jdbc:h2:file:" + filePath;
+ setDriverClassName("org.h2.Driver");
+ setUrl(url);
+ }
+
+ @Override
+ public boolean allowsOnDuplicateKey() {
+ return false;
+ }
+
+ @Override
+ public String getDBType() {
+ return "h2";
+ }
+ }
+
+ public static void testConnection(DataSource ds) throws SQLException {
+ QueryRunner runner = new QueryRunner(ds);
+ runner.update("SHOW TABLES");
+ }
+
+ public static void testMySQLConnection(String host, Integer port,
+ String dbName, String user, String password, Integer numConnections)
+ throws SQLException {
+ DataSource ds =
+ new MySQLBasicDataSource(host, port, dbName, user, password,
+ numConnections);
+ testConnection(ds);
+ }
}
src/main/java/azkaban/execapp/AzkabanExecutorServer.java 546(+276 -270)
diff --git a/src/main/java/azkaban/execapp/AzkabanExecutorServer.java b/src/main/java/azkaban/execapp/AzkabanExecutorServer.java
index 0cb7c55..c0f09b3 100644
--- a/src/main/java/azkaban/execapp/AzkabanExecutorServer.java
+++ b/src/main/java/azkaban/execapp/AzkabanExecutorServer.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -48,271 +48,277 @@ import azkaban.webapp.AzkabanServer;
import azkaban.webapp.servlet.AzkabanServletContextListener;
public class AzkabanExecutorServer {
- private static final Logger logger = Logger.getLogger(AzkabanExecutorServer.class);
- private static final int MAX_FORM_CONTENT_SIZE = 10*1024*1024;
-
- public static final String AZKABAN_HOME = "AZKABAN_HOME";
- public static final String DEFAULT_CONF_PATH = "conf";
- public static final String AZKABAN_PROPERTIES_FILE = "azkaban.properties";
- public static final String AZKABAN_PRIVATE_PROPERTIES_FILE = "azkaban.private.properties";
- public static final String JOBTYPE_PLUGIN_DIR = "azkaban.jobtype.plugin.dir";
- public static final int DEFAULT_PORT_NUMBER = 12321;
-
- private static final String DEFAULT_TIMEZONE_ID = "default.timezone.id";
- private static final int DEFAULT_THREAD_NUMBER = 50;
-
- private static AzkabanExecutorServer app;
-
- private ExecutorLoader executionLoader;
- private ProjectLoader projectLoader;
- private FlowRunnerManager runnerManager;
- private Props props;
- private Props executorGlobalProps;
- private Server server;
-
- private ArrayList<ObjectName> registeredMBeans = new ArrayList<ObjectName>();
- private MBeanServer mbeanServer;
-
- /**
- * Constructor
- *
- * @throws Exception
- */
- public AzkabanExecutorServer(Props props) throws Exception {
- this.props = props;
-
- int portNumber = props.getInt("executor.port", DEFAULT_PORT_NUMBER);
- int maxThreads = props.getInt("executor.maxThreads", DEFAULT_THREAD_NUMBER);
-
- server = new Server(portNumber);
- QueuedThreadPool httpThreadPool = new QueuedThreadPool(maxThreads);
- server.setThreadPool(httpThreadPool);
-
- boolean isStatsOn = props.getBoolean("executor.connector.stats", true);
- logger.info("Setting up connector with stats on: " + isStatsOn);
-
- for (Connector connector : server.getConnectors()) {
- connector.setStatsOn(isStatsOn);
- }
-
- Context root = new Context(server, "/", Context.SESSIONS);
- root.setMaxFormContentSize(MAX_FORM_CONTENT_SIZE);
-
- root.addServlet(new ServletHolder(new ExecutorServlet()), "/executor");
- root.addServlet(new ServletHolder(new JMXHttpServlet()), "/jmx");
- root.setAttribute(AzkabanServletContextListener.AZKABAN_SERVLET_CONTEXT_KEY, this);
-
-
- executionLoader = createExecLoader(props);
- projectLoader = createProjectLoader(props);
- runnerManager = new FlowRunnerManager(props, executionLoader, projectLoader, this.getClass().getClassLoader());
-
- configureMBeanServer();
-
- try {
- server.start();
- }
- catch (Exception e) {
- logger.warn(e);
- Utils.croak(e.getMessage(), 1);
- }
-
- logger.info("Azkaban Executor Server started on port " + portNumber);
- }
-
- private ExecutorLoader createExecLoader(Props props) {
- return new JdbcExecutorLoader(props);
- }
-
- private ProjectLoader createProjectLoader(Props props) {
- return new JdbcProjectLoader(props);
- }
-
- public void stopServer() throws Exception {
- server.stop();
- server.destroy();
- }
-
- public ProjectLoader getProjectLoader() {
- return projectLoader;
- }
-
- public ExecutorLoader getExecutorLoader() {
- return executionLoader;
- }
-
- /**
- * Returns the global azkaban properties
- *
- * @return
- */
- public Props getAzkabanProps() {
- return props;
- }
-
- public Props getExecutorGlobalProps() {
- return executorGlobalProps;
- }
-
- /**
- * Azkaban using Jetty
- *
- * @param args
- * @throws IOException
- */
- public static void main(String[] args) throws Exception {
- logger.error("Starting Jetty Azkaban Executor...");
- Props azkabanSettings = AzkabanServer.loadProps(args);
-
- if (azkabanSettings == null) {
- logger.error("Azkaban Properties not loaded.");
- logger.error("Exiting Azkaban Executor Server...");
- return;
- }
-
- // Setup time zone
- if (azkabanSettings.containsKey(DEFAULT_TIMEZONE_ID)) {
- String timezone = azkabanSettings.getString(DEFAULT_TIMEZONE_ID);
- System.setProperty("user.timezone", timezone);
- TimeZone.setDefault(TimeZone.getTimeZone(timezone));
- DateTimeZone.setDefault(DateTimeZone.forID(timezone));
-
- logger.info("Setting timezone to " + timezone);
- }
-
- app = new AzkabanExecutorServer(azkabanSettings);
-
- Runtime.getRuntime().addShutdownHook(new Thread() {
-
- public void run() {
- logger.info("Shutting down http server...");
- try {
- app.stopServer();
- } catch (Exception e) {
- logger.error("Error while shutting down http server.", e);
- }
- logger.info("kk thx bye.");
- }
- });
- }
-
- /**
- * Loads the Azkaban property file from the AZKABAN_HOME conf directory
- *
- * @return
- */
- /*package*/ static Props loadConfigurationFromAzkabanHome() {
- String azkabanHome = System.getenv("AZKABAN_HOME");
-
- if (azkabanHome == null) {
- logger.error("AZKABAN_HOME not set. Will try default.");
- return null;
- }
-
- if (!new File(azkabanHome).isDirectory()
- || !new File(azkabanHome).canRead()) {
- logger.error(azkabanHome + " is not a readable directory.");
- return null;
- }
-
- File confPath = new File(azkabanHome, DEFAULT_CONF_PATH);
- if (!confPath.exists() || !confPath.isDirectory()
- || !confPath.canRead()) {
- logger.error(azkabanHome
- + " does not contain a readable conf directory.");
- return null;
- }
-
- return loadAzkabanConfigurationFromDirectory(confPath);
- }
-
- public FlowRunnerManager getFlowRunnerManager() {
- return runnerManager;
- }
-
- /**
- * Loads the Azkaban conf file int a Props object
- *
- * @param path
- * @return
- */
- private static Props loadAzkabanConfigurationFromDirectory(File dir) {
- File azkabanPrivatePropsFile = new File(dir, AZKABAN_PRIVATE_PROPERTIES_FILE);
- File azkabanPropsFile = new File(dir, AZKABAN_PROPERTIES_FILE);
-
- Props props = null;
- try {
- // This is purely optional
- if (azkabanPrivatePropsFile.exists() && azkabanPrivatePropsFile.isFile()) {
- logger.info("Loading azkaban private properties file" );
- props = new Props(null, azkabanPrivatePropsFile);
- }
-
- if (azkabanPropsFile.exists() && azkabanPropsFile.isFile()) {
- logger.info("Loading azkaban properties file" );
- props = new Props(props, azkabanPropsFile);
- }
- } catch (FileNotFoundException e) {
- logger.error("File not found. Could not load azkaban config file", e);
- } catch (IOException e) {
- logger.error("File found, but error reading. Could not load azkaban config file", e);
- }
-
- return props;
- }
-
- private void configureMBeanServer() {
- logger.info("Registering MBeans...");
- mbeanServer = ManagementFactory.getPlatformMBeanServer();
-
- registerMbean("executorJetty", new JmxJettyServer(server));
- registerMbean("flowRunnerManager", new JmxFlowRunnerManager(runnerManager));
- }
-
- public void close() {
- try {
- for (ObjectName name : registeredMBeans) {
- mbeanServer.unregisterMBean(name);
- logger.info("Jmx MBean " + name.getCanonicalName() + " unregistered.");
- }
- } catch (Exception e) {
- logger.error("Failed to cleanup MBeanServer", e);
- }
- }
-
- private void registerMbean(String name, Object mbean) {
- Class<?> mbeanClass = mbean.getClass();
- ObjectName mbeanName;
- try {
- mbeanName = new ObjectName(mbeanClass.getName() + ":name=" + name);
- mbeanServer.registerMBean(mbean, mbeanName);
- logger.info("Bean " + mbeanClass.getCanonicalName() + " registered.");
- registeredMBeans.add(mbeanName);
- } catch (Exception e) {
- logger.error("Error registering mbean " + mbeanClass.getCanonicalName(), e);
- }
-
- }
-
- public List<ObjectName> getMbeanNames() {
- return registeredMBeans;
- }
-
- public MBeanInfo getMBeanInfo(ObjectName name) {
- try {
- return mbeanServer.getMBeanInfo(name);
- } catch (Exception e) {
- logger.error(e);
- return null;
- }
- }
-
- public Object getMBeanAttribute(ObjectName name, String attribute) {
- try {
- return mbeanServer.getAttribute(name, attribute);
- } catch (Exception e) {
- logger.error(e);
- return null;
- }
- }
+ private static final Logger logger = Logger
+ .getLogger(AzkabanExecutorServer.class);
+ private static final int MAX_FORM_CONTENT_SIZE = 10 * 1024 * 1024;
+
+ public static final String AZKABAN_HOME = "AZKABAN_HOME";
+ public static final String DEFAULT_CONF_PATH = "conf";
+ public static final String AZKABAN_PROPERTIES_FILE = "azkaban.properties";
+ public static final String AZKABAN_PRIVATE_PROPERTIES_FILE =
+ "azkaban.private.properties";
+ public static final String JOBTYPE_PLUGIN_DIR = "azkaban.jobtype.plugin.dir";
+ public static final int DEFAULT_PORT_NUMBER = 12321;
+
+ private static final String DEFAULT_TIMEZONE_ID = "default.timezone.id";
+ private static final int DEFAULT_THREAD_NUMBER = 50;
+
+ private static AzkabanExecutorServer app;
+
+ private ExecutorLoader executionLoader;
+ private ProjectLoader projectLoader;
+ private FlowRunnerManager runnerManager;
+ private Props props;
+ private Props executorGlobalProps;
+ private Server server;
+
+ private ArrayList<ObjectName> registeredMBeans = new ArrayList<ObjectName>();
+ private MBeanServer mbeanServer;
+
+ /**
+ * Constructor
+ *
+ * @throws Exception
+ */
+ public AzkabanExecutorServer(Props props) throws Exception {
+ this.props = props;
+
+ int portNumber = props.getInt("executor.port", DEFAULT_PORT_NUMBER);
+ int maxThreads = props.getInt("executor.maxThreads", DEFAULT_THREAD_NUMBER);
+
+ server = new Server(portNumber);
+ QueuedThreadPool httpThreadPool = new QueuedThreadPool(maxThreads);
+ server.setThreadPool(httpThreadPool);
+
+ boolean isStatsOn = props.getBoolean("executor.connector.stats", true);
+ logger.info("Setting up connector with stats on: " + isStatsOn);
+
+ for (Connector connector : server.getConnectors()) {
+ connector.setStatsOn(isStatsOn);
+ }
+
+ Context root = new Context(server, "/", Context.SESSIONS);
+ root.setMaxFormContentSize(MAX_FORM_CONTENT_SIZE);
+
+ root.addServlet(new ServletHolder(new ExecutorServlet()), "/executor");
+ root.addServlet(new ServletHolder(new JMXHttpServlet()), "/jmx");
+ root.setAttribute(
+ AzkabanServletContextListener.AZKABAN_SERVLET_CONTEXT_KEY, this);
+
+ executionLoader = createExecLoader(props);
+ projectLoader = createProjectLoader(props);
+ runnerManager =
+ new FlowRunnerManager(props, executionLoader, projectLoader, this
+ .getClass().getClassLoader());
+
+ configureMBeanServer();
+
+ try {
+ server.start();
+ } catch (Exception e) {
+ logger.warn(e);
+ Utils.croak(e.getMessage(), 1);
+ }
+
+ logger.info("Azkaban Executor Server started on port " + portNumber);
+ }
+
+ private ExecutorLoader createExecLoader(Props props) {
+ return new JdbcExecutorLoader(props);
+ }
+
+ private ProjectLoader createProjectLoader(Props props) {
+ return new JdbcProjectLoader(props);
+ }
+
+ public void stopServer() throws Exception {
+ server.stop();
+ server.destroy();
+ }
+
+ public ProjectLoader getProjectLoader() {
+ return projectLoader;
+ }
+
+ public ExecutorLoader getExecutorLoader() {
+ return executionLoader;
+ }
+
+ /**
+ * Returns the global azkaban properties
+ *
+ * @return
+ */
+ public Props getAzkabanProps() {
+ return props;
+ }
+
+ public Props getExecutorGlobalProps() {
+ return executorGlobalProps;
+ }
+
+ /**
+ * Azkaban using Jetty
+ *
+ * @param args
+ * @throws IOException
+ */
+ public static void main(String[] args) throws Exception {
+ logger.error("Starting Jetty Azkaban Executor...");
+ Props azkabanSettings = AzkabanServer.loadProps(args);
+
+ if (azkabanSettings == null) {
+ logger.error("Azkaban Properties not loaded.");
+ logger.error("Exiting Azkaban Executor Server...");
+ return;
+ }
+
+ // Setup time zone
+ if (azkabanSettings.containsKey(DEFAULT_TIMEZONE_ID)) {
+ String timezone = azkabanSettings.getString(DEFAULT_TIMEZONE_ID);
+ System.setProperty("user.timezone", timezone);
+ TimeZone.setDefault(TimeZone.getTimeZone(timezone));
+ DateTimeZone.setDefault(DateTimeZone.forID(timezone));
+
+ logger.info("Setting timezone to " + timezone);
+ }
+
+ app = new AzkabanExecutorServer(azkabanSettings);
+
+ Runtime.getRuntime().addShutdownHook(new Thread() {
+
+ public void run() {
+ logger.info("Shutting down http server...");
+ try {
+ app.stopServer();
+ } catch (Exception e) {
+ logger.error("Error while shutting down http server.", e);
+ }
+ logger.info("kk thx bye.");
+ }
+ });
+ }
+
+ /**
+ * Loads the Azkaban property file from the AZKABAN_HOME conf directory
+ *
+ * @return
+ */
+ /* package */static Props loadConfigurationFromAzkabanHome() {
+ String azkabanHome = System.getenv("AZKABAN_HOME");
+
+ if (azkabanHome == null) {
+ logger.error("AZKABAN_HOME not set. Will try default.");
+ return null;
+ }
+
+ if (!new File(azkabanHome).isDirectory()
+ || !new File(azkabanHome).canRead()) {
+ logger.error(azkabanHome + " is not a readable directory.");
+ return null;
+ }
+
+ File confPath = new File(azkabanHome, DEFAULT_CONF_PATH);
+ if (!confPath.exists() || !confPath.isDirectory() || !confPath.canRead()) {
+ logger
+ .error(azkabanHome + " does not contain a readable conf directory.");
+ return null;
+ }
+
+ return loadAzkabanConfigurationFromDirectory(confPath);
+ }
+
+ public FlowRunnerManager getFlowRunnerManager() {
+ return runnerManager;
+ }
+
+ /**
+ * Loads the Azkaban conf file int a Props object
+ *
+ * @param path
+ * @return
+ */
+ private static Props loadAzkabanConfigurationFromDirectory(File dir) {
+ File azkabanPrivatePropsFile =
+ new File(dir, AZKABAN_PRIVATE_PROPERTIES_FILE);
+ File azkabanPropsFile = new File(dir, AZKABAN_PROPERTIES_FILE);
+
+ Props props = null;
+ try {
+ // This is purely optional
+ if (azkabanPrivatePropsFile.exists() && azkabanPrivatePropsFile.isFile()) {
+ logger.info("Loading azkaban private properties file");
+ props = new Props(null, azkabanPrivatePropsFile);
+ }
+
+ if (azkabanPropsFile.exists() && azkabanPropsFile.isFile()) {
+ logger.info("Loading azkaban properties file");
+ props = new Props(props, azkabanPropsFile);
+ }
+ } catch (FileNotFoundException e) {
+ logger.error("File not found. Could not load azkaban config file", e);
+ } catch (IOException e) {
+ logger.error(
+ "File found, but error reading. Could not load azkaban config file",
+ e);
+ }
+
+ return props;
+ }
+
+ private void configureMBeanServer() {
+ logger.info("Registering MBeans...");
+ mbeanServer = ManagementFactory.getPlatformMBeanServer();
+
+ registerMbean("executorJetty", new JmxJettyServer(server));
+ registerMbean("flowRunnerManager", new JmxFlowRunnerManager(runnerManager));
+ }
+
+ public void close() {
+ try {
+ for (ObjectName name : registeredMBeans) {
+ mbeanServer.unregisterMBean(name);
+ logger.info("Jmx MBean " + name.getCanonicalName() + " unregistered.");
+ }
+ } catch (Exception e) {
+ logger.error("Failed to cleanup MBeanServer", e);
+ }
+ }
+
+ private void registerMbean(String name, Object mbean) {
+ Class<?> mbeanClass = mbean.getClass();
+ ObjectName mbeanName;
+ try {
+ mbeanName = new ObjectName(mbeanClass.getName() + ":name=" + name);
+ mbeanServer.registerMBean(mbean, mbeanName);
+ logger.info("Bean " + mbeanClass.getCanonicalName() + " registered.");
+ registeredMBeans.add(mbeanName);
+ } catch (Exception e) {
+ logger.error("Error registering mbean " + mbeanClass.getCanonicalName(),
+ e);
+ }
+
+ }
+
+ public List<ObjectName> getMbeanNames() {
+ return registeredMBeans;
+ }
+
+ public MBeanInfo getMBeanInfo(ObjectName name) {
+ try {
+ return mbeanServer.getMBeanInfo(name);
+ } catch (Exception e) {
+ logger.error(e);
+ return null;
+ }
+ }
+
+ public Object getMBeanAttribute(ObjectName name, String attribute) {
+ try {
+ return mbeanServer.getAttribute(name, attribute);
+ } catch (Exception e) {
+ logger.error(e);
+ return null;
+ }
+ }
}
diff --git a/src/main/java/azkaban/execapp/event/BlockingStatus.java b/src/main/java/azkaban/execapp/event/BlockingStatus.java
index 4bf638d..eb10a92 100644
--- a/src/main/java/azkaban/execapp/event/BlockingStatus.java
+++ b/src/main/java/azkaban/execapp/event/BlockingStatus.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -19,58 +19,58 @@ package azkaban.execapp.event;
import azkaban.executor.Status;
public class BlockingStatus {
- private static final long WAIT_TIME = 5*60*1000;
- private final int execId;
- private final String jobId;
- private Status status;
-
- public BlockingStatus(int execId, String jobId, Status initialStatus) {
- this.execId = execId;
- this.jobId = jobId;
- this.status = initialStatus;
- }
-
- public Status blockOnFinishedStatus() {
- if (status == null) {
- return null;
- }
-
- while (!Status.isStatusFinished(status)) {
- synchronized(this) {
- try {
- this.wait(WAIT_TIME);
- } catch (InterruptedException e) {
- }
- }
- }
-
- return status;
- }
+ private static final long WAIT_TIME = 5 * 60 * 1000;
+ private final int execId;
+ private final String jobId;
+ private Status status;
- public Status viewStatus() {
- return this.status;
- }
-
- public void unblock() {
- synchronized(this) {
- this.notifyAll();
- }
- }
-
- public void changeStatus(Status status) {
- synchronized(this) {
- this.status = status;
- if (Status.isStatusFinished(status)) {
- unblock();
- }
- }
- }
-
- public int getExecId() {
- return execId;
- }
+ public BlockingStatus(int execId, String jobId, Status initialStatus) {
+ this.execId = execId;
+ this.jobId = jobId;
+ this.status = initialStatus;
+ }
- public String getJobId() {
- return jobId;
- }
+ public Status blockOnFinishedStatus() {
+ if (status == null) {
+ return null;
+ }
+
+ while (!Status.isStatusFinished(status)) {
+ synchronized (this) {
+ try {
+ this.wait(WAIT_TIME);
+ } catch (InterruptedException e) {
+ }
+ }
+ }
+
+ return status;
+ }
+
+ public Status viewStatus() {
+ return this.status;
+ }
+
+ public void unblock() {
+ synchronized (this) {
+ this.notifyAll();
+ }
+ }
+
+ public void changeStatus(Status status) {
+ synchronized (this) {
+ this.status = status;
+ if (Status.isStatusFinished(status)) {
+ unblock();
+ }
+ }
+ }
+
+ public int getExecId() {
+ return execId;
+ }
+
+ public String getJobId() {
+ return jobId;
+ }
}
src/main/java/azkaban/execapp/event/Event.java 115(+58 -57)
diff --git a/src/main/java/azkaban/execapp/event/Event.java b/src/main/java/azkaban/execapp/event/Event.java
index 16fbb0a..261b211 100644
--- a/src/main/java/azkaban/execapp/event/Event.java
+++ b/src/main/java/azkaban/execapp/event/Event.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -17,59 +17,60 @@
package azkaban.execapp.event;
public class Event {
- public enum Type {
- FLOW_STARTED,
- FLOW_FINISHED,
- JOB_STARTED,
- JOB_FINISHED,
- JOB_STATUS_CHANGED,
- EXTERNAL_FLOW_UPDATED,
- EXTERNAL_JOB_UPDATED
- }
-
- private final Object runner;
- private final Type type;
- private final Object eventData;
- private final long time;
- private final boolean shouldUpdate;
-
- private Event(Object runner, Type type, Object eventData, boolean shouldUpdate) {
- this.runner = runner;
- this.type = type;
- this.eventData = eventData;
- this.time = System.currentTimeMillis();
- this.shouldUpdate = shouldUpdate;
- }
-
- public Object getRunner() {
- return runner;
- }
-
- public Type getType() {
- return type;
- }
-
- public long getTime() {
- return time;
- }
-
- public Object getData() {
- return eventData;
- }
-
- public static Event create(Object runner, Type type) {
- return new Event(runner, type, null, true);
- }
-
- public static Event create(Object runner, Type type, Object eventData) {
- return new Event(runner, type, eventData, true);
- }
-
- public static Event create(Object runner, Type type, Object eventData, boolean shouldUpdate) {
- return new Event(runner, type, eventData, shouldUpdate);
- }
+ public enum Type {
+ FLOW_STARTED,
+ FLOW_FINISHED,
+ JOB_STARTED,
+ JOB_FINISHED,
+ JOB_STATUS_CHANGED,
+ EXTERNAL_FLOW_UPDATED,
+ EXTERNAL_JOB_UPDATED
+ }
- public boolean isShouldUpdate() {
- return shouldUpdate;
- }
+ private final Object runner;
+ private final Type type;
+ private final Object eventData;
+ private final long time;
+ private final boolean shouldUpdate;
+
+ private Event(Object runner, Type type, Object eventData, boolean shouldUpdate) {
+ this.runner = runner;
+ this.type = type;
+ this.eventData = eventData;
+ this.time = System.currentTimeMillis();
+ this.shouldUpdate = shouldUpdate;
+ }
+
+ public Object getRunner() {
+ return runner;
+ }
+
+ public Type getType() {
+ return type;
+ }
+
+ public long getTime() {
+ return time;
+ }
+
+ public Object getData() {
+ return eventData;
+ }
+
+ public static Event create(Object runner, Type type) {
+ return new Event(runner, type, null, true);
+ }
+
+ public static Event create(Object runner, Type type, Object eventData) {
+ return new Event(runner, type, eventData, true);
+ }
+
+ public static Event create(Object runner, Type type, Object eventData,
+ boolean shouldUpdate) {
+ return new Event(runner, type, eventData, shouldUpdate);
+ }
+
+ public boolean isShouldUpdate() {
+ return shouldUpdate;
+ }
}
diff --git a/src/main/java/azkaban/execapp/event/EventHandler.java b/src/main/java/azkaban/execapp/event/EventHandler.java
index 9887176..13ecab6 100644
--- a/src/main/java/azkaban/execapp/event/EventHandler.java
+++ b/src/main/java/azkaban/execapp/event/EventHandler.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -20,23 +20,24 @@ import java.util.ArrayList;
import java.util.HashSet;
public class EventHandler {
- private HashSet<EventListener> listeners = new HashSet<EventListener>();
-
- public EventHandler() {
- }
+ private HashSet<EventListener> listeners = new HashSet<EventListener>();
- public void addListener(EventListener listener) {
- listeners.add(listener);
- }
-
- public void fireEventListeners(Event event) {
- ArrayList<EventListener> listeners = new ArrayList<EventListener>(this.listeners);
- for (EventListener listener: listeners) {
- listener.handleEvent(event);
- }
- }
-
- public void removeListener(EventListener listener) {
- listeners.remove(listener);
- }
+ public EventHandler() {
+ }
+
+ public void addListener(EventListener listener) {
+ listeners.add(listener);
+ }
+
+ public void fireEventListeners(Event event) {
+ ArrayList<EventListener> listeners =
+ new ArrayList<EventListener>(this.listeners);
+ for (EventListener listener : listeners) {
+ listener.handleEvent(event);
+ }
+ }
+
+ public void removeListener(EventListener listener) {
+ listeners.remove(listener);
+ }
}
diff --git a/src/main/java/azkaban/execapp/event/EventListener.java b/src/main/java/azkaban/execapp/event/EventListener.java
index 490ec36..0dd1dcf 100644
--- a/src/main/java/azkaban/execapp/event/EventListener.java
+++ b/src/main/java/azkaban/execapp/event/EventListener.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -17,5 +17,5 @@
package azkaban.execapp.event;
public interface EventListener {
- public void handleEvent(Event event);
+ public void handleEvent(Event event);
}
src/main/java/azkaban/execapp/event/FlowWatcher.java 176(+89 -87)
diff --git a/src/main/java/azkaban/execapp/event/FlowWatcher.java b/src/main/java/azkaban/execapp/event/FlowWatcher.java
index 9ae7ed0..74e33ed 100644
--- a/src/main/java/azkaban/execapp/event/FlowWatcher.java
+++ b/src/main/java/azkaban/execapp/event/FlowWatcher.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -26,88 +26,90 @@ import azkaban.executor.ExecutableNode;
import azkaban.executor.Status;
public abstract class FlowWatcher {
- private Logger logger;
-
- private int execId;
- private ExecutableFlow flow;
- private Map<String, BlockingStatus> map = new ConcurrentHashMap<String, BlockingStatus>();
- private boolean cancelWatch = false;
-
- public FlowWatcher(int execId) {
- this.execId = execId;
- }
-
- public void setFlow(ExecutableFlow flow) {
- this.flow = flow;
- }
-
- public void setLogger(Logger logger) {
- this.logger = logger;
- }
-
- protected Logger getLogger() {
- return this.logger;
- }
-
- /**
- * Called to fire events to the JobRunner listeners
- * @param jobId
- */
- protected synchronized void handleJobStatusChange(String jobId, Status status) {
- BlockingStatus block = map.get(jobId);
- if (block != null) {
- block.changeStatus(status);
- }
- }
-
- public int getExecId() {
- return execId;
- }
-
- public synchronized BlockingStatus getBlockingStatus(String jobId) {
- if (cancelWatch) {
- return null;
- }
-
- ExecutableNode node = flow.getExecutableNodePath(jobId);
- if (node == null) {
- return null;
- }
-
- BlockingStatus blockingStatus = map.get(jobId);
- if (blockingStatus == null) {
- blockingStatus = new BlockingStatus(execId, jobId, node.getStatus());
- map.put(jobId, blockingStatus);
- }
-
- return blockingStatus;
- }
-
- public Status peekStatus(String jobId) {
- ExecutableNode node = flow.getExecutableNodePath(jobId);
- if (node != null) {
- return node.getStatus();
- }
-
- return null;
- }
-
- public synchronized void unblockAllWatches() {
- logger.info("Unblock all watches on " + execId);
- cancelWatch = true;
-
- for(BlockingStatus status : map.values()) {
- logger.info("Unblocking " + status.getJobId());
- status.changeStatus(Status.SKIPPED);
- status.unblock();
- }
-
- logger.info("Successfully unblocked all watches on " + execId);
- }
-
- public boolean isWatchCancelled() {
- return cancelWatch;
- }
-
- public abstract void stopWatcher();
+ private Logger logger;
+
+ private int execId;
+ private ExecutableFlow flow;
+ private Map<String, BlockingStatus> map =
+ new ConcurrentHashMap<String, BlockingStatus>();
+ private boolean cancelWatch = false;
+
+ public FlowWatcher(int execId) {
+ this.execId = execId;
+ }
+
+ public void setFlow(ExecutableFlow flow) {
+ this.flow = flow;
+ }
+
+ public void setLogger(Logger logger) {
+ this.logger = logger;
+ }
+
+ protected Logger getLogger() {
+ return this.logger;
+ }
+
+ /**
+ * Called to fire events to the JobRunner listeners
+ *
+ * @param jobId
+ */
+ protected synchronized void handleJobStatusChange(String jobId, Status status) {
+ BlockingStatus block = map.get(jobId);
+ if (block != null) {
+ block.changeStatus(status);
+ }
+ }
+
+ public int getExecId() {
+ return execId;
+ }
+
+ public synchronized BlockingStatus getBlockingStatus(String jobId) {
+ if (cancelWatch) {
+ return null;
+ }
+
+ ExecutableNode node = flow.getExecutableNodePath(jobId);
+ if (node == null) {
+ return null;
+ }
+
+ BlockingStatus blockingStatus = map.get(jobId);
+ if (blockingStatus == null) {
+ blockingStatus = new BlockingStatus(execId, jobId, node.getStatus());
+ map.put(jobId, blockingStatus);
+ }
+
+ return blockingStatus;
+ }
+
+ public Status peekStatus(String jobId) {
+ ExecutableNode node = flow.getExecutableNodePath(jobId);
+ if (node != null) {
+ return node.getStatus();
+ }
+
+ return null;
+ }
+
+ public synchronized void unblockAllWatches() {
+ logger.info("Unblock all watches on " + execId);
+ cancelWatch = true;
+
+ for (BlockingStatus status : map.values()) {
+ logger.info("Unblocking " + status.getJobId());
+ status.changeStatus(Status.SKIPPED);
+ status.unblock();
+ }
+
+ logger.info("Successfully unblocked all watches on " + execId);
+ }
+
+ public boolean isWatchCancelled() {
+ return cancelWatch;
+ }
+
+ public abstract void stopWatcher();
}
diff --git a/src/main/java/azkaban/execapp/event/LocalFlowWatcher.java b/src/main/java/azkaban/execapp/event/LocalFlowWatcher.java
index fc85dc8..ce60ee6 100644
--- a/src/main/java/azkaban/execapp/event/LocalFlowWatcher.java
+++ b/src/main/java/azkaban/execapp/event/LocalFlowWatcher.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -22,57 +22,55 @@ import azkaban.execapp.event.Event.Type;
import azkaban.executor.ExecutableNode;
public class LocalFlowWatcher extends FlowWatcher {
- private LocalFlowWatcherListener watcherListener;
- private FlowRunner runner;
- private boolean isShutdown = false;
-
- public LocalFlowWatcher(FlowRunner runner) {
- super(runner.getExecutableFlow().getExecutionId());
- super.setFlow(runner.getExecutableFlow());
-
- watcherListener = new LocalFlowWatcherListener();
- this.runner = runner;
- runner.addListener(watcherListener);
- }
+ private LocalFlowWatcherListener watcherListener;
+ private FlowRunner runner;
+ private boolean isShutdown = false;
- @Override
- public void stopWatcher() {
- // Just freeing stuff
- if(isShutdown) {
- return;
- }
-
- isShutdown = true;
- runner.removeListener(watcherListener);
- runner = null;
-
- getLogger().info("Stopping watcher, and unblocking pipeline");
- super.unblockAllWatches();
- }
+ public LocalFlowWatcher(FlowRunner runner) {
+ super(runner.getExecutableFlow().getExecutionId());
+ super.setFlow(runner.getExecutableFlow());
- public class LocalFlowWatcherListener implements EventListener {
- @Override
- public void handleEvent(Event event) {
- if (event.getType() == Type.JOB_FINISHED) {
- if (event.getRunner() instanceof FlowRunner) {
- // The flow runner will finish a job without it running
- Object data = event.getData();
- if (data instanceof ExecutableNode) {
- ExecutableNode node = (ExecutableNode)data;
- handleJobStatusChange(node.getNestedId(), node.getStatus());
- }
- }
- else if (event.getRunner() instanceof JobRunner) {
- // A job runner is finished
- JobRunner runner = (JobRunner)event.getRunner();
- ExecutableNode node = runner.getNode();
- System.out.println(node + " looks like " + node.getStatus());
- handleJobStatusChange(node.getNestedId(), node.getStatus());
- }
- }
- else if (event.getType() == Type.FLOW_FINISHED) {
- stopWatcher();
- }
- }
- }
+ watcherListener = new LocalFlowWatcherListener();
+ this.runner = runner;
+ runner.addListener(watcherListener);
+ }
+
+ @Override
+ public void stopWatcher() {
+ // Just freeing stuff
+ if (isShutdown) {
+ return;
+ }
+
+ isShutdown = true;
+ runner.removeListener(watcherListener);
+ runner = null;
+
+ getLogger().info("Stopping watcher, and unblocking pipeline");
+ super.unblockAllWatches();
+ }
+
+ public class LocalFlowWatcherListener implements EventListener {
+ @Override
+ public void handleEvent(Event event) {
+ if (event.getType() == Type.JOB_FINISHED) {
+ if (event.getRunner() instanceof FlowRunner) {
+ // The flow runner will finish a job without it running
+ Object data = event.getData();
+ if (data instanceof ExecutableNode) {
+ ExecutableNode node = (ExecutableNode) data;
+ handleJobStatusChange(node.getNestedId(), node.getStatus());
+ }
+ } else if (event.getRunner() instanceof JobRunner) {
+ // A job runner is finished
+ JobRunner runner = (JobRunner) event.getRunner();
+ ExecutableNode node = runner.getNode();
+ System.out.println(node + " looks like " + node.getStatus());
+ handleJobStatusChange(node.getNestedId(), node.getStatus());
+ }
+ } else if (event.getType() == Type.FLOW_FINISHED) {
+ stopWatcher();
+ }
+ }
+ }
}
diff --git a/src/main/java/azkaban/execapp/event/RemoteFlowWatcher.java b/src/main/java/azkaban/execapp/event/RemoteFlowWatcher.java
index b266555..9dd50c3 100644
--- a/src/main/java/azkaban/execapp/event/RemoteFlowWatcher.java
+++ b/src/main/java/azkaban/execapp/event/RemoteFlowWatcher.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -26,100 +26,100 @@ import azkaban.executor.ExecutorManagerException;
import azkaban.executor.Status;
public class RemoteFlowWatcher extends FlowWatcher {
- private final static long CHECK_INTERVAL_MS = 60*1000;
-
- private int execId;
- private ExecutorLoader loader;
- private ExecutableFlow flow;
- private RemoteUpdaterThread thread;
- private boolean isShutdown = false;
-
- // Every minute
- private long checkIntervalMs = CHECK_INTERVAL_MS;
-
- public RemoteFlowWatcher(int execId, ExecutorLoader loader) {
- this(execId, loader, CHECK_INTERVAL_MS);
- }
-
- public RemoteFlowWatcher(int execId, ExecutorLoader loader, long interval) {
- super(execId);
- checkIntervalMs = interval;
-
- try {
- flow = loader.fetchExecutableFlow(execId);
- } catch (ExecutorManagerException e) {
- return;
- }
-
- super.setFlow(flow);
- this.loader = loader;
- this.execId = execId;
- if (flow != null) {
- this.thread = new RemoteUpdaterThread();
- this.thread.setName("Remote-watcher-flow-" + execId);
- this.thread.start();
- }
- }
-
- private class RemoteUpdaterThread extends Thread {
- @Override
- public void run() {
- do {
- ExecutableFlow updateFlow = null;
- try {
- updateFlow = loader.fetchExecutableFlow(execId);
- } catch (ExecutorManagerException e) {
- e.printStackTrace();
- isShutdown = true;
- }
-
- long updateTime = 0;
- if (flow == null) {
- flow = updateFlow;
- }
- else {
- Map<String, Object> updateData = updateFlow.toUpdateObject(updateTime);
- ArrayList<ExecutableNode> updatedNodes = new ArrayList<ExecutableNode>();
- flow.applyUpdateObject(updateData, updatedNodes);
-
- flow.setStatus(updateFlow.getStatus());
- flow.setEndTime(updateFlow.getEndTime());
- flow.setUpdateTime(updateFlow.getUpdateTime());
-
- for (ExecutableNode node : updatedNodes) {
- handleJobStatusChange(node.getNestedId(), node.getStatus());
- }
-
- updateTime = flow.getUpdateTime();
- }
-
- if (Status.isStatusFinished(flow.getStatus())) {
- isShutdown = true;
- }
- else {
- synchronized(this) {
- try {
- wait(checkIntervalMs);
- } catch (InterruptedException e) {
- }
- }
- }
- } while (!isShutdown);
- }
-
- }
-
- @Override
- public synchronized void stopWatcher() {
- if(isShutdown) {
- return;
- }
- isShutdown = true;
- if (thread != null) {
- thread.interrupt();
- }
- super.unblockAllWatches();
- loader = null;
- flow = null;
- }
+ private final static long CHECK_INTERVAL_MS = 60 * 1000;
+
+ private int execId;
+ private ExecutorLoader loader;
+ private ExecutableFlow flow;
+ private RemoteUpdaterThread thread;
+ private boolean isShutdown = false;
+
+ // Every minute
+ private long checkIntervalMs = CHECK_INTERVAL_MS;
+
+ public RemoteFlowWatcher(int execId, ExecutorLoader loader) {
+ this(execId, loader, CHECK_INTERVAL_MS);
+ }
+
+ public RemoteFlowWatcher(int execId, ExecutorLoader loader, long interval) {
+ super(execId);
+ checkIntervalMs = interval;
+
+ try {
+ flow = loader.fetchExecutableFlow(execId);
+ } catch (ExecutorManagerException e) {
+ return;
+ }
+
+ super.setFlow(flow);
+ this.loader = loader;
+ this.execId = execId;
+ if (flow != null) {
+ this.thread = new RemoteUpdaterThread();
+ this.thread.setName("Remote-watcher-flow-" + execId);
+ this.thread.start();
+ }
+ }
+
+ private class RemoteUpdaterThread extends Thread {
+ @Override
+ public void run() {
+ do {
+ ExecutableFlow updateFlow = null;
+ try {
+ updateFlow = loader.fetchExecutableFlow(execId);
+ } catch (ExecutorManagerException e) {
+ e.printStackTrace();
+ isShutdown = true;
+ }
+
+ long updateTime = 0;
+ if (flow == null) {
+ flow = updateFlow;
+ } else {
+ Map<String, Object> updateData =
+ updateFlow.toUpdateObject(updateTime);
+ ArrayList<ExecutableNode> updatedNodes =
+ new ArrayList<ExecutableNode>();
+ flow.applyUpdateObject(updateData, updatedNodes);
+
+ flow.setStatus(updateFlow.getStatus());
+ flow.setEndTime(updateFlow.getEndTime());
+ flow.setUpdateTime(updateFlow.getUpdateTime());
+
+ for (ExecutableNode node : updatedNodes) {
+ handleJobStatusChange(node.getNestedId(), node.getStatus());
+ }
+
+ updateTime = flow.getUpdateTime();
+ }
+
+ if (Status.isStatusFinished(flow.getStatus())) {
+ isShutdown = true;
+ } else {
+ synchronized (this) {
+ try {
+ wait(checkIntervalMs);
+ } catch (InterruptedException e) {
+ }
+ }
+ }
+ } while (!isShutdown);
+ }
+
+ }
+
+ @Override
+ public synchronized void stopWatcher() {
+ if (isShutdown) {
+ return;
+ }
+ isShutdown = true;
+ if (thread != null) {
+ thread.interrupt();
+ }
+ super.unblockAllWatches();
+ loader = null;
+ flow = null;
+ }
}
src/main/java/azkaban/execapp/ExecutorServlet.java 724(+364 -360)
diff --git a/src/main/java/azkaban/execapp/ExecutorServlet.java b/src/main/java/azkaban/execapp/ExecutorServlet.java
index cb8a211..d146d0e 100644
--- a/src/main/java/azkaban/execapp/ExecutorServlet.java
+++ b/src/main/java/azkaban/execapp/ExecutorServlet.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -41,361 +41,365 @@ import azkaban.utils.JSONUtils;
import azkaban.webapp.servlet.AzkabanServletContextListener;
public class ExecutorServlet extends HttpServlet implements ConnectorParams {
- private static final long serialVersionUID = 1L;
- private static final Logger logger = Logger.getLogger(ExecutorServlet.class.getName());
- public static final String JSON_MIME_TYPE = "application/json";
-
- private AzkabanExecutorServer application;
- private FlowRunnerManager flowRunnerManager;
-
- public ExecutorServlet() {
- super();
- }
-
- @Override
- public void init(ServletConfig config) throws ServletException {
- application = (AzkabanExecutorServer) config.getServletContext().getAttribute(AzkabanServletContextListener.AZKABAN_SERVLET_CONTEXT_KEY);
-
- if (application == null) {
- throw new IllegalStateException("No batch application is defined in the servlet context!");
- }
-
- flowRunnerManager = application.getFlowRunnerManager();
- }
-
- protected void writeJSON(HttpServletResponse resp, Object obj) throws IOException {
- resp.setContentType(JSON_MIME_TYPE);
- ObjectMapper mapper = new ObjectMapper();
- OutputStream stream = resp.getOutputStream();
- mapper.writeValue(stream, obj);
- }
-
- @Override
- public void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
- HashMap<String,Object> respMap= new HashMap<String,Object>();
- //logger.info("ExecutorServer called by " + req.getRemoteAddr());
- try {
- if (!hasParam(req, ACTION_PARAM)) {
- logger.error("Parameter action not set");
- respMap.put("error", "Parameter action not set");
- }
- else {
- String action = getParam(req, ACTION_PARAM);
- if (action.equals(UPDATE_ACTION)) {
- //logger.info("Updated called");
- handleAjaxUpdateRequest(req, respMap);
- }
- else if (action.equals(PING_ACTION)) {
- respMap.put("status", "alive");
- }
- else if (action.equals(RELOAD_JOBTYPE_PLUGINS_ACTION)) {
- logger.info("Reloading Jobtype plugins");
- handleReloadJobTypePlugins(respMap);
- }
- else {
- int execid = Integer.parseInt(getParam(req, EXECID_PARAM));
- String user = getParam(req, USER_PARAM, null);
-
- logger.info("User " + user + " has called action " + action + " on " + execid);
- if (action.equals(METADATA_ACTION)) {
- handleFetchMetaDataEvent(execid, req, resp, respMap);
- }
- else if (action.equals(LOG_ACTION)) {
- handleFetchLogEvent(execid, req, resp, respMap);
- }
- else if (action.equals(ATTACHMENTS_ACTION)) {
- handleFetchAttachmentsEvent(execid, req, resp, respMap);
- }
- else if (action.equals(EXECUTE_ACTION)) {
- handleAjaxExecute(req, respMap, execid);
- }
- else if (action.equals(STATUS_ACTION)) {
- handleAjaxFlowStatus(respMap, execid);
- }
- else if (action.equals(CANCEL_ACTION)) {
- logger.info("Cancel called.");
- handleAjaxCancel(respMap, execid, user);
- }
- else if (action.equals(PAUSE_ACTION)) {
- logger.info("Paused called.");
- handleAjaxPause(respMap, execid, user);
- }
- else if (action.equals(RESUME_ACTION)) {
- logger.info("Resume called.");
- handleAjaxResume(respMap, execid, user);
- }
- else if (action.equals(MODIFY_EXECUTION_ACTION)) {
- logger.info("Modify Execution Action");
- handleModifyExecutionRequest(respMap, execid, user, req);
- }
- else {
- logger.error("action: '" + action + "' not supported.");
- respMap.put("error", "action: '" + action + "' not supported.");
- }
- }
- }
- } catch (Exception e) {
- logger.error(e);
- respMap.put(RESPONSE_ERROR, e.getMessage());
- }
- writeJSON(resp, respMap);
- resp.flushBuffer();
- }
-
- private void handleModifyExecutionRequest(Map<String, Object> respMap, int execId, String user, HttpServletRequest req) throws ServletException {
- if (!hasParam(req, MODIFY_EXECUTION_ACTION_TYPE)) {
- respMap.put(RESPONSE_ERROR, "Modification type not set.");
- }
- String modificationType = getParam(req, MODIFY_EXECUTION_ACTION_TYPE);
-
-
- try {
- if (MODIFY_RETRY_FAILURES.equals(modificationType)) {
- flowRunnerManager.retryFailures(execId, user);
- }
- else {
-// String modifiedJobList = getParam(req, MODIFY_JOBS_LIST);
-// String[] jobIds = modifiedJobList.split("\\s*,\\s*");
-//
-// if (MODIFY_RETRY_JOBS.equals(modificationType)) {
-// }
-// else if (MODIFY_CANCEL_JOBS.equals(modificationType)) {
-// }
-// else if (MODIFY_DISABLE_JOBS.equals(modificationType)) {
-// }
-// else if (MODIFY_ENABLE_JOBS.equals(modificationType)) {
-// }
-// else if (MODIFY_PAUSE_JOBS.equals(modificationType)) {
-// }
-// else if (MODIFY_RESUME_JOBS.equals(modificationType)) {
-// }
- }
- } catch (ExecutorManagerException e) {
- logger.error(e);
- respMap.put("error", e.getMessage());
- }
- }
-
- private void handleFetchLogEvent(
- int execId,
- HttpServletRequest req,
- HttpServletResponse resp,
- Map<String, Object> respMap) throws ServletException {
- String type = getParam(req, "type");
- int startByte = getIntParam(req, "offset");
- int length = getIntParam(req, "length");
-
- resp.setContentType("text/plain");
- resp.setCharacterEncoding("utf-8");
-
- if (type.equals("flow")) {
- LogData result;
- try {
- result = flowRunnerManager.readFlowLogs(execId, startByte, length);
- respMap.putAll(result.toObject());
- } catch (Exception e) {
- logger.error(e);
- respMap.put(RESPONSE_ERROR, e.getMessage());
- }
- }
- else {
- int attempt = getIntParam(req, "attempt", 0);
- String jobId = getParam(req, "jobId");
- try {
- LogData result = flowRunnerManager.readJobLogs(execId, jobId, attempt, startByte, length);
- respMap.putAll(result.toObject());
- } catch (Exception e) {
- logger.error(e);
- respMap.put("error", e.getMessage());
- }
- }
- }
-
- private void handleFetchAttachmentsEvent(
- int execId,
- HttpServletRequest req,
- HttpServletResponse resp,
- Map<String, Object> respMap) throws ServletException {
-
- String jobId = getParam(req, "jobId");
- int attempt = getIntParam(req, "attempt", 0);
- try {
- List<Object> result = flowRunnerManager.readJobAttachments(
- execId, jobId, attempt);
- respMap.put("attachments", result);
- }
- catch (Exception e) {
- logger.error(e);
- respMap.put("error", e.getMessage());
- }
- }
-
- private void handleFetchMetaDataEvent(int execId, HttpServletRequest req, HttpServletResponse resp, Map<String, Object> respMap) throws ServletException {
- int startByte = getIntParam(req, "offset");
- int length = getIntParam(req, "length");
-
- resp.setContentType("text/plain");
- resp.setCharacterEncoding("utf-8");
-
- int attempt = getIntParam(req, "attempt", 0);
- String jobId = getParam(req, "jobId");
- try {
- JobMetaData result = flowRunnerManager.readJobMetaData(execId, jobId, attempt, startByte, length);
- respMap.putAll(result.toObject());
- } catch (Exception e) {
- logger.error(e);
- respMap.put("error", e.getMessage());
- }
- }
-
- @SuppressWarnings("unchecked")
- private void handleAjaxUpdateRequest(HttpServletRequest req, Map<String, Object> respMap) throws ServletException, IOException {
- ArrayList<Object> updateTimesList = (ArrayList<Object>)JSONUtils.parseJSONFromString(getParam(req, UPDATE_TIME_LIST_PARAM));
- ArrayList<Object> execIDList = (ArrayList<Object>)JSONUtils.parseJSONFromString(getParam(req, EXEC_ID_LIST_PARAM));
-
- ArrayList<Object> updateList = new ArrayList<Object>();
- for (int i = 0; i < execIDList.size(); ++i) {
- long updateTime = JSONUtils.getLongFromObject(updateTimesList.get(i));
- int execId = (Integer)execIDList.get(i);
-
- ExecutableFlowBase flow = flowRunnerManager.getExecutableFlow(execId);
- if (flow == null) {
- Map<String, Object> errorResponse = new HashMap<String,Object>();
- errorResponse.put(RESPONSE_ERROR, "Flow does not exist");
- errorResponse.put(UPDATE_MAP_EXEC_ID, execId);
- updateList.add(errorResponse);
- continue;
- }
-
- if (flow.getUpdateTime() > updateTime) {
- updateList.add(flow.toUpdateObject(updateTime));
- }
- }
-
- respMap.put(RESPONSE_UPDATED_FLOWS, updateList);
- }
-
- private void handleAjaxExecute(HttpServletRequest req, Map<String, Object> respMap, int execId) throws ServletException {
- try {
- flowRunnerManager.submitFlow(execId);
- } catch (ExecutorManagerException e) {
- e.printStackTrace();
- logger.error(e);
- respMap.put(RESPONSE_ERROR, e.getMessage());
- }
- }
-
- private void handleAjaxFlowStatus(Map<String, Object> respMap, int execid) {
- ExecutableFlowBase flow = flowRunnerManager.getExecutableFlow(execid);
- if (flow == null) {
- respMap.put(STATUS_PARAM, RESPONSE_NOTFOUND);
- }
- else {
- respMap.put(STATUS_PARAM, flow.getStatus().toString());
- respMap.put(RESPONSE_UPDATETIME, flow.getUpdateTime());
- }
- }
-
- private void handleAjaxPause(Map<String, Object> respMap, int execid, String user) throws ServletException {
- if (user == null) {
- respMap.put(RESPONSE_ERROR, "user has not been set");
- return;
- }
-
- try {
- flowRunnerManager.pauseFlow(execid, user);
- respMap.put(STATUS_PARAM, RESPONSE_SUCCESS);
- } catch (ExecutorManagerException e) {
- logger.error(e);
- respMap.put(RESPONSE_ERROR, e.getMessage());
- }
- }
-
- private void handleAjaxResume(Map<String, Object> respMap, int execid, String user) throws ServletException {
- if (user == null) {
- respMap.put(RESPONSE_ERROR, "user has not been set");
- return;
- }
-
- try {
- flowRunnerManager.resumeFlow(execid, user);
- respMap.put(STATUS_PARAM, RESPONSE_SUCCESS);
- } catch (ExecutorManagerException e) {
- e.printStackTrace();
- respMap.put(RESPONSE_ERROR, e.getMessage());
- }
- }
-
- private void handleAjaxCancel(Map<String, Object> respMap, int execid, String user) throws ServletException {
- if (user == null) {
- respMap.put(RESPONSE_ERROR, "user has not been set");
- return;
- }
-
- try {
- flowRunnerManager.cancelFlow(execid, user);
- respMap.put(STATUS_PARAM, RESPONSE_SUCCESS);
- } catch (ExecutorManagerException e) {
- logger.error(e);
- respMap.put(RESPONSE_ERROR, e.getMessage());
- }
- }
-
- private void handleReloadJobTypePlugins(Map<String, Object> respMap) throws ServletException {
- try {
- flowRunnerManager.reloadJobTypePlugins();
- respMap.put(STATUS_PARAM, RESPONSE_SUCCESS);
- }
- catch (Exception e) {
- logger.error(e);
- respMap.put(RESPONSE_ERROR, e.getMessage());
- }
- }
-
- @Override
- public void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
-
- }
-
- /**
- * Duplicated code with AbstractAzkabanServlet, but ne
- */
- public boolean hasParam(HttpServletRequest request, String param) {
- return request.getParameter(param) != null;
- }
-
- public String getParam(HttpServletRequest request, String name)
- throws ServletException {
- String p = request.getParameter(name);
- if (p == null)
- throw new ServletException("Missing required parameter '" + name + "'.");
- else
- return p;
- }
-
- public String getParam(HttpServletRequest request, String name, String defaultVal ) {
- String p = request.getParameter(name);
- if (p == null) {
- return defaultVal;
- }
-
- return p;
- }
-
- public int getIntParam(HttpServletRequest request, String name) throws ServletException {
- String p = getParam(request, name);
- return Integer.parseInt(p);
- }
-
- public int getIntParam(HttpServletRequest request, String name, int defaultVal) {
- if (hasParam(request, name)) {
- try {
- return getIntParam(request, name);
- } catch (Exception e) {
- return defaultVal;
- }
- }
-
- return defaultVal;
- }
+ private static final long serialVersionUID = 1L;
+ private static final Logger logger = Logger.getLogger(ExecutorServlet.class
+ .getName());
+ public static final String JSON_MIME_TYPE = "application/json";
+
+ private AzkabanExecutorServer application;
+ private FlowRunnerManager flowRunnerManager;
+
+ public ExecutorServlet() {
+ super();
+ }
+
+ @Override
+ public void init(ServletConfig config) throws ServletException {
+ application =
+ (AzkabanExecutorServer) config.getServletContext().getAttribute(
+ AzkabanServletContextListener.AZKABAN_SERVLET_CONTEXT_KEY);
+
+ if (application == null) {
+ throw new IllegalStateException(
+ "No batch application is defined in the servlet context!");
+ }
+
+ flowRunnerManager = application.getFlowRunnerManager();
+ }
+
+ protected void writeJSON(HttpServletResponse resp, Object obj)
+ throws IOException {
+ resp.setContentType(JSON_MIME_TYPE);
+ ObjectMapper mapper = new ObjectMapper();
+ OutputStream stream = resp.getOutputStream();
+ mapper.writeValue(stream, obj);
+ }
+
+ @Override
+ public void doGet(HttpServletRequest req, HttpServletResponse resp)
+ throws ServletException, IOException {
+ HashMap<String, Object> respMap = new HashMap<String, Object>();
+ // logger.info("ExecutorServer called by " + req.getRemoteAddr());
+ try {
+ if (!hasParam(req, ACTION_PARAM)) {
+ logger.error("Parameter action not set");
+ respMap.put("error", "Parameter action not set");
+ } else {
+ String action = getParam(req, ACTION_PARAM);
+ if (action.equals(UPDATE_ACTION)) {
+ // logger.info("Updated called");
+ handleAjaxUpdateRequest(req, respMap);
+ } else if (action.equals(PING_ACTION)) {
+ respMap.put("status", "alive");
+ } else if (action.equals(RELOAD_JOBTYPE_PLUGINS_ACTION)) {
+ logger.info("Reloading Jobtype plugins");
+ handleReloadJobTypePlugins(respMap);
+ } else {
+ int execid = Integer.parseInt(getParam(req, EXECID_PARAM));
+ String user = getParam(req, USER_PARAM, null);
+
+ logger.info("User " + user + " has called action " + action + " on "
+ + execid);
+ if (action.equals(METADATA_ACTION)) {
+ handleFetchMetaDataEvent(execid, req, resp, respMap);
+ } else if (action.equals(LOG_ACTION)) {
+ handleFetchLogEvent(execid, req, resp, respMap);
+ } else if (action.equals(ATTACHMENTS_ACTION)) {
+ handleFetchAttachmentsEvent(execid, req, resp, respMap);
+ } else if (action.equals(EXECUTE_ACTION)) {
+ handleAjaxExecute(req, respMap, execid);
+ } else if (action.equals(STATUS_ACTION)) {
+ handleAjaxFlowStatus(respMap, execid);
+ } else if (action.equals(CANCEL_ACTION)) {
+ logger.info("Cancel called.");
+ handleAjaxCancel(respMap, execid, user);
+ } else if (action.equals(PAUSE_ACTION)) {
+ logger.info("Paused called.");
+ handleAjaxPause(respMap, execid, user);
+ } else if (action.equals(RESUME_ACTION)) {
+ logger.info("Resume called.");
+ handleAjaxResume(respMap, execid, user);
+ } else if (action.equals(MODIFY_EXECUTION_ACTION)) {
+ logger.info("Modify Execution Action");
+ handleModifyExecutionRequest(respMap, execid, user, req);
+ } else {
+ logger.error("action: '" + action + "' not supported.");
+ respMap.put("error", "action: '" + action + "' not supported.");
+ }
+ }
+ }
+ } catch (Exception e) {
+ logger.error(e);
+ respMap.put(RESPONSE_ERROR, e.getMessage());
+ }
+ writeJSON(resp, respMap);
+ resp.flushBuffer();
+ }
+
+ private void handleModifyExecutionRequest(Map<String, Object> respMap,
+ int execId, String user, HttpServletRequest req) throws ServletException {
+ if (!hasParam(req, MODIFY_EXECUTION_ACTION_TYPE)) {
+ respMap.put(RESPONSE_ERROR, "Modification type not set.");
+ }
+ String modificationType = getParam(req, MODIFY_EXECUTION_ACTION_TYPE);
+
+ try {
+ if (MODIFY_RETRY_FAILURES.equals(modificationType)) {
+ flowRunnerManager.retryFailures(execId, user);
+ } else {
+ // String modifiedJobList = getParam(req, MODIFY_JOBS_LIST);
+ // String[] jobIds = modifiedJobList.split("\\s*,\\s*");
+ //
+ // if (MODIFY_RETRY_JOBS.equals(modificationType)) {
+ // }
+ // else if (MODIFY_CANCEL_JOBS.equals(modificationType)) {
+ // }
+ // else if (MODIFY_DISABLE_JOBS.equals(modificationType)) {
+ // }
+ // else if (MODIFY_ENABLE_JOBS.equals(modificationType)) {
+ // }
+ // else if (MODIFY_PAUSE_JOBS.equals(modificationType)) {
+ // }
+ // else if (MODIFY_RESUME_JOBS.equals(modificationType)) {
+ // }
+ }
+ } catch (ExecutorManagerException e) {
+ logger.error(e);
+ respMap.put("error", e.getMessage());
+ }
+ }
+
+ private void handleFetchLogEvent(int execId, HttpServletRequest req,
+ HttpServletResponse resp, Map<String, Object> respMap)
+ throws ServletException {
+ String type = getParam(req, "type");
+ int startByte = getIntParam(req, "offset");
+ int length = getIntParam(req, "length");
+
+ resp.setContentType("text/plain");
+ resp.setCharacterEncoding("utf-8");
+
+ if (type.equals("flow")) {
+ LogData result;
+ try {
+ result = flowRunnerManager.readFlowLogs(execId, startByte, length);
+ respMap.putAll(result.toObject());
+ } catch (Exception e) {
+ logger.error(e);
+ respMap.put(RESPONSE_ERROR, e.getMessage());
+ }
+ } else {
+ int attempt = getIntParam(req, "attempt", 0);
+ String jobId = getParam(req, "jobId");
+ try {
+ LogData result =
+ flowRunnerManager.readJobLogs(execId, jobId, attempt, startByte,
+ length);
+ respMap.putAll(result.toObject());
+ } catch (Exception e) {
+ logger.error(e);
+ respMap.put("error", e.getMessage());
+ }
+ }
+ }
+
+ private void handleFetchAttachmentsEvent(int execId, HttpServletRequest req,
+ HttpServletResponse resp, Map<String, Object> respMap)
+ throws ServletException {
+
+ String jobId = getParam(req, "jobId");
+ int attempt = getIntParam(req, "attempt", 0);
+ try {
+ List<Object> result =
+ flowRunnerManager.readJobAttachments(execId, jobId, attempt);
+ respMap.put("attachments", result);
+ } catch (Exception e) {
+ logger.error(e);
+ respMap.put("error", e.getMessage());
+ }
+ }
+
+ private void handleFetchMetaDataEvent(int execId, HttpServletRequest req,
+ HttpServletResponse resp, Map<String, Object> respMap)
+ throws ServletException {
+ int startByte = getIntParam(req, "offset");
+ int length = getIntParam(req, "length");
+
+ resp.setContentType("text/plain");
+ resp.setCharacterEncoding("utf-8");
+
+ int attempt = getIntParam(req, "attempt", 0);
+ String jobId = getParam(req, "jobId");
+ try {
+ JobMetaData result =
+ flowRunnerManager.readJobMetaData(execId, jobId, attempt, startByte,
+ length);
+ respMap.putAll(result.toObject());
+ } catch (Exception e) {
+ logger.error(e);
+ respMap.put("error", e.getMessage());
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private void handleAjaxUpdateRequest(HttpServletRequest req,
+ Map<String, Object> respMap) throws ServletException, IOException {
+ ArrayList<Object> updateTimesList =
+ (ArrayList<Object>) JSONUtils.parseJSONFromString(getParam(req,
+ UPDATE_TIME_LIST_PARAM));
+ ArrayList<Object> execIDList =
+ (ArrayList<Object>) JSONUtils.parseJSONFromString(getParam(req,
+ EXEC_ID_LIST_PARAM));
+
+ ArrayList<Object> updateList = new ArrayList<Object>();
+ for (int i = 0; i < execIDList.size(); ++i) {
+ long updateTime = JSONUtils.getLongFromObject(updateTimesList.get(i));
+ int execId = (Integer) execIDList.get(i);
+
+ ExecutableFlowBase flow = flowRunnerManager.getExecutableFlow(execId);
+ if (flow == null) {
+ Map<String, Object> errorResponse = new HashMap<String, Object>();
+ errorResponse.put(RESPONSE_ERROR, "Flow does not exist");
+ errorResponse.put(UPDATE_MAP_EXEC_ID, execId);
+ updateList.add(errorResponse);
+ continue;
+ }
+
+ if (flow.getUpdateTime() > updateTime) {
+ updateList.add(flow.toUpdateObject(updateTime));
+ }
+ }
+
+ respMap.put(RESPONSE_UPDATED_FLOWS, updateList);
+ }
+
+ private void handleAjaxExecute(HttpServletRequest req,
+ Map<String, Object> respMap, int execId) throws ServletException {
+ try {
+ flowRunnerManager.submitFlow(execId);
+ } catch (ExecutorManagerException e) {
+ e.printStackTrace();
+ logger.error(e);
+ respMap.put(RESPONSE_ERROR, e.getMessage());
+ }
+ }
+
+ private void handleAjaxFlowStatus(Map<String, Object> respMap, int execid) {
+ ExecutableFlowBase flow = flowRunnerManager.getExecutableFlow(execid);
+ if (flow == null) {
+ respMap.put(STATUS_PARAM, RESPONSE_NOTFOUND);
+ } else {
+ respMap.put(STATUS_PARAM, flow.getStatus().toString());
+ respMap.put(RESPONSE_UPDATETIME, flow.getUpdateTime());
+ }
+ }
+
+ private void handleAjaxPause(Map<String, Object> respMap, int execid,
+ String user) throws ServletException {
+ if (user == null) {
+ respMap.put(RESPONSE_ERROR, "user has not been set");
+ return;
+ }
+
+ try {
+ flowRunnerManager.pauseFlow(execid, user);
+ respMap.put(STATUS_PARAM, RESPONSE_SUCCESS);
+ } catch (ExecutorManagerException e) {
+ logger.error(e);
+ respMap.put(RESPONSE_ERROR, e.getMessage());
+ }
+ }
+
+ private void handleAjaxResume(Map<String, Object> respMap, int execid,
+ String user) throws ServletException {
+ if (user == null) {
+ respMap.put(RESPONSE_ERROR, "user has not been set");
+ return;
+ }
+
+ try {
+ flowRunnerManager.resumeFlow(execid, user);
+ respMap.put(STATUS_PARAM, RESPONSE_SUCCESS);
+ } catch (ExecutorManagerException e) {
+ e.printStackTrace();
+ respMap.put(RESPONSE_ERROR, e.getMessage());
+ }
+ }
+
+ private void handleAjaxCancel(Map<String, Object> respMap, int execid,
+ String user) throws ServletException {
+ if (user == null) {
+ respMap.put(RESPONSE_ERROR, "user has not been set");
+ return;
+ }
+
+ try {
+ flowRunnerManager.cancelFlow(execid, user);
+ respMap.put(STATUS_PARAM, RESPONSE_SUCCESS);
+ } catch (ExecutorManagerException e) {
+ logger.error(e);
+ respMap.put(RESPONSE_ERROR, e.getMessage());
+ }
+ }
+
+ private void handleReloadJobTypePlugins(Map<String, Object> respMap)
+ throws ServletException {
+ try {
+ flowRunnerManager.reloadJobTypePlugins();
+ respMap.put(STATUS_PARAM, RESPONSE_SUCCESS);
+ } catch (Exception e) {
+ logger.error(e);
+ respMap.put(RESPONSE_ERROR, e.getMessage());
+ }
+ }
+
+ @Override
+ public void doPost(HttpServletRequest req, HttpServletResponse resp)
+ throws ServletException, IOException {
+
+ }
+
+ /**
+ * Duplicated code with AbstractAzkabanServlet, but ne
+ */
+ public boolean hasParam(HttpServletRequest request, String param) {
+ return request.getParameter(param) != null;
+ }
+
+ public String getParam(HttpServletRequest request, String name)
+ throws ServletException {
+ String p = request.getParameter(name);
+ if (p == null)
+ throw new ServletException("Missing required parameter '" + name + "'.");
+ else
+ return p;
+ }
+
+ public String getParam(HttpServletRequest request, String name,
+ String defaultVal) {
+ String p = request.getParameter(name);
+ if (p == null) {
+ return defaultVal;
+ }
+
+ return p;
+ }
+
+ public int getIntParam(HttpServletRequest request, String name)
+ throws ServletException {
+ String p = getParam(request, name);
+ return Integer.parseInt(p);
+ }
+
+ public int getIntParam(HttpServletRequest request, String name, int defaultVal) {
+ if (hasParam(request, name)) {
+ try {
+ return getIntParam(request, name);
+ } catch (Exception e) {
+ return defaultVal;
+ }
+ }
+
+ return defaultVal;
+ }
}
src/main/java/azkaban/execapp/FlowRunner.java 2038(+1034 -1004)
diff --git a/src/main/java/azkaban/execapp/FlowRunner.java b/src/main/java/azkaban/execapp/FlowRunner.java
index c80ea63..db96388 100644
--- a/src/main/java/azkaban/execapp/FlowRunner.java
+++ b/src/main/java/azkaban/execapp/FlowRunner.java
@@ -1,12 +1,12 @@
/*
* Copyright 2013 LinkedIn Corp
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -63,1005 +63,1035 @@ import azkaban.utils.SwapQueue;
*
*/
public class FlowRunner extends EventHandler implements Runnable {
- private static final Layout DEFAULT_LAYOUT = new PatternLayout("%d{dd-MM-yyyy HH:mm:ss z} %c{1} %p - %m\n");
- // We check update every 5 minutes, just in case things get stuck. But for the most part, we'll be idling.
- private static final long CHECK_WAIT_MS = 5*60*1000;
-
- private Logger logger;
- private Layout loggerLayout = DEFAULT_LAYOUT;
- private Appender flowAppender;
- private File logFile;
-
- private ExecutorService executorService;
- private ExecutorLoader executorLoader;
- private ProjectLoader projectLoader;
-
- private int execId;
- private File execDir;
- private final ExecutableFlow flow;
- private Thread flowRunnerThread;
- private int numJobThreads = 10;
- private ExecutionOptions.FailureAction failureAction;
-
- // Sync object for queuing
- private Object mainSyncObj = new Object();
-
- // Properties map
- private Map<String, Props> sharedProps = new HashMap<String, Props>();
- private final JobTypeManager jobtypeManager;
-
- private JobRunnerEventListener listener = new JobRunnerEventListener();
- private Set<JobRunner> activeJobRunners = Collections.newSetFromMap(new ConcurrentHashMap<JobRunner, Boolean>());
-
- // Thread safe swap queue for finishedExecutions.
- private SwapQueue<ExecutableNode> finishedNodes;
-
- // Used for pipelining
- private Integer pipelineLevel = null;
- private Integer pipelineExecId = null;
-
- // Watches external flows for execution.
- private FlowWatcher watcher = null;
-
- private Set<String> proxyUsers = null;
- private boolean validateUserProxy;
-
- private String jobLogFileSize = "5MB";
- private int jobLogNumFiles = 4;
-
- private boolean flowPaused = false;
- private boolean flowFailed = false;
- private boolean flowFinished = false;
- private boolean flowKilled = false;
-
- // The following is state that will trigger a retry of all failed jobs
- private boolean retryFailedJobs = false;
-
-
- /**
- * Constructor.
- * This will create its own ExecutorService for thread pools
- *
- * @param flow
- * @param executorLoader
- * @param projectLoader
- * @param jobtypeManager
- * @throws ExecutorManagerException
- */
- public FlowRunner(ExecutableFlow flow, ExecutorLoader executorLoader, ProjectLoader projectLoader, JobTypeManager jobtypeManager) throws ExecutorManagerException {
- this(flow, executorLoader, projectLoader, jobtypeManager, null);
- }
-
- /**
- * Constructor.
- * If executorService is null, then it will create it's own for thread pools.
- *
- * @param flow
- * @param executorLoader
- * @param projectLoader
- * @param jobtypeManager
- * @param executorService
- * @throws ExecutorManagerException
- */
- public FlowRunner(ExecutableFlow flow, ExecutorLoader executorLoader, ProjectLoader projectLoader, JobTypeManager jobtypeManager, ExecutorService executorService) throws ExecutorManagerException {
- this.execId = flow.getExecutionId();
- this.flow = flow;
- this.executorLoader = executorLoader;
- this.projectLoader = projectLoader;
- this.execDir = new File(flow.getExecutionPath());
- this.jobtypeManager = jobtypeManager;
-
- ExecutionOptions options = flow.getExecutionOptions();
- this.pipelineLevel = options.getPipelineLevel();
- this.pipelineExecId = options.getPipelineExecutionId();
- this.failureAction = options.getFailureAction();
- this.proxyUsers = flow.getProxyUsers();
- this.executorService = executorService;
- this.finishedNodes = new SwapQueue<ExecutableNode>();
- }
-
- public FlowRunner setFlowWatcher(FlowWatcher watcher) {
- this.watcher = watcher;
- return this;
- }
-
- public FlowRunner setNumJobThreads(int jobs) {
- numJobThreads = jobs;
- return this;
- }
-
- public FlowRunner setJobLogSettings(String jobLogFileSize, int jobLogNumFiles) {
- this.jobLogFileSize = jobLogFileSize;
- this.jobLogNumFiles = jobLogNumFiles;
-
- return this;
- }
-
- public FlowRunner setValidateProxyUser(boolean validateUserProxy) {
- this.validateUserProxy = validateUserProxy;
- return this;
- }
-
- public File getExecutionDir() {
- return execDir;
- }
-
- public void run() {
- try {
- if (this.executorService == null) {
- this.executorService = Executors.newFixedThreadPool(numJobThreads);
- }
- setupFlowExecution();
- flow.setStartTime(System.currentTimeMillis());
-
- updateFlowReference();
-
- logger.info("Updating initial flow directory.");
- updateFlow();
- logger.info("Fetching job and shared properties.");
- loadAllProperties();
-
- this.fireEventListeners(Event.create(this, Type.FLOW_STARTED));
- runFlow();
- } catch (Throwable t) {
- if (logger != null) {
- logger.error("An error has occurred during the running of the flow. Quiting.", t);
- }
- flow.setStatus(Status.FAILED);
- }
- finally {
- if (watcher != null) {
- logger.info("Watcher is attached. Stopping watcher.");
- watcher.stopWatcher();
- logger.info("Watcher cancelled status is " + watcher.isWatchCancelled());
- }
-
- flow.setEndTime(System.currentTimeMillis());
- logger.info("Setting end time for flow " + execId + " to " + System.currentTimeMillis());
- closeLogger();
-
- updateFlow();
- this.fireEventListeners(Event.create(this, Type.FLOW_FINISHED));
- }
- }
-
- @SuppressWarnings("unchecked")
- private void setupFlowExecution() {
- int projectId = flow.getProjectId();
- int version = flow.getVersion();
- String flowId = flow.getFlowId();
-
- // Add a bunch of common azkaban properties
- Props commonFlowProps = PropsUtils.addCommonFlowProperties(null, flow);
-
- if (flow.getJobSource() != null) {
- String source = flow.getJobSource();
- Props flowProps = sharedProps.get(source);
- flowProps.setParent(commonFlowProps);
- commonFlowProps = flowProps;
- }
-
- // If there are flow overrides, we apply them now.
- Map<String,String> flowParam = flow.getExecutionOptions().getFlowParameters();
- if (flowParam != null && !flowParam.isEmpty()) {
- commonFlowProps = new Props(commonFlowProps, flowParam);
- }
- flow.setInputProps(commonFlowProps);
-
- // Create execution dir
- createLogger(flowId);
-
- if (this.watcher != null) {
- this.watcher.setLogger(logger);
- }
-
- logger.info("Running execid:" + execId + " flow:" + flowId + " project:" + projectId + " version:" + version);
- if (pipelineExecId != null) {
- logger.info("Running simulateously with " + pipelineExecId + ". Pipelining level " + pipelineLevel);
- }
-
- // The current thread is used for interrupting blocks
- flowRunnerThread = Thread.currentThread();
- flowRunnerThread.setName("FlowRunner-exec-" + flow.getExecutionId());
- }
-
- private void updateFlowReference() throws ExecutorManagerException {
- logger.info("Update active reference");
- if (!executorLoader.updateExecutableReference(execId, System.currentTimeMillis())) {
- throw new ExecutorManagerException("The executor reference doesn't exist. May have been killed prematurely.");
- }
- }
-
- private void updateFlow() {
- updateFlow(System.currentTimeMillis());
- }
-
- private synchronized void updateFlow(long time) {
- try {
- flow.setUpdateTime(time);
- executorLoader.updateExecutableFlow(flow);
- } catch (ExecutorManagerException e) {
- logger.error("Error updating flow.", e);
- }
- }
-
- private void createLogger(String flowId) {
- // Create logger
- String loggerName = execId + "." + flowId;
- logger = Logger.getLogger(loggerName);
-
- // Create file appender
- String logName = "_flow." + loggerName + ".log";
- logFile = new File(execDir, logName);
- String absolutePath = logFile.getAbsolutePath();
-
- flowAppender = null;
- try {
- flowAppender = new FileAppender(loggerLayout, absolutePath, false);
- logger.addAppender(flowAppender);
- } catch (IOException e) {
- logger.error("Could not open log file in " + execDir, e);
- }
- }
-
- private void closeLogger() {
- if (logger != null) {
- logger.removeAppender(flowAppender);
- flowAppender.close();
-
- try {
- executorLoader.uploadLogFile(execId, "", 0, logFile);
- } catch (ExecutorManagerException e) {
- e.printStackTrace();
- }
- }
- }
-
- private void loadAllProperties() throws IOException {
- // First load all the properties
- for (FlowProps fprops : flow.getFlowProps()) {
- String source = fprops.getSource();
- File propsPath = new File(execDir, source);
- Props props = new Props(null, propsPath);
- sharedProps.put(source, props);
- }
-
- // Resolve parents
- for (FlowProps fprops : flow.getFlowProps()) {
- if (fprops.getInheritedSource() != null) {
- String source = fprops.getSource();
- String inherit = fprops.getInheritedSource();
-
- Props props = sharedProps.get(source);
- Props inherits = sharedProps.get(inherit);
-
- props.setParent(inherits);
- }
- }
- }
-
- /**
- * Main method that executes the jobs.
- *
- * @throws Exception
- */
- private void runFlow() throws Exception {
- logger.info("Starting flows");
- runReadyJob(this.flow);
- updateFlow();
-
- while (!flowFinished) {
- synchronized(mainSyncObj) {
- if (flowPaused) {
- try {
- mainSyncObj.wait(CHECK_WAIT_MS);
- } catch (InterruptedException e) {
- }
-
- continue;
- }
- else {
- if (retryFailedJobs) {
- retryAllFailures();
- }
- else if (!progressGraph()) {
- try {
- mainSyncObj.wait(CHECK_WAIT_MS);
- } catch (InterruptedException e) {
- }
- }
- }
- }
- }
-
- logger.info("Finishing up flow. Awaiting Termination");
- executorService.shutdown();
-
- updateFlow();
- logger.info("Finished Flow");
- }
-
- private void retryAllFailures() throws IOException {
- logger.info("Restarting all failed jobs");
-
- this.retryFailedJobs = false;
- this.flowKilled = false;
- this.flowFailed = false;
- this.flow.setStatus(Status.RUNNING);
-
- ArrayList<ExecutableNode> retryJobs = new ArrayList<ExecutableNode>();
- resetFailedState(this.flow, retryJobs);
-
- for (ExecutableNode node: retryJobs) {
- if (node.getStatus() == Status.READY || node.getStatus() == Status.DISABLED) {
- runReadyJob(node);
- }
- else if (node.getStatus() == Status.SUCCEEDED){
- for (String outNodeId: node.getOutNodes()) {
- ExecutableFlowBase base = node.getParentFlow();
- runReadyJob(base.getExecutableNode(outNodeId));
- }
- }
-
- runReadyJob(node);
- }
-
- updateFlow();
- }
-
- private boolean progressGraph() throws IOException {
- finishedNodes.swap();
-
- // The following nodes are finished, so we'll collect a list of outnodes
- // that are candidates for running next.
- HashSet<ExecutableNode> nodesToCheck = new HashSet<ExecutableNode>();
- for (ExecutableNode node: finishedNodes) {
- Set<String> outNodeIds = node.getOutNodes();
- ExecutableFlowBase parentFlow = node.getParentFlow();
-
- // If a job is seen as failed, then we set the parent flow to FAILED_FINISHING
- if (node.getStatus() == Status.FAILED) {
- // The job cannot be retried or has run out of retry attempts. We will
- // fail the job and its flow now.
- if (!retryJobIfPossible(node)) {
- propagateStatus(node.getParentFlow(), Status.FAILED_FINISHING);
- if (failureAction == FailureAction.CANCEL_ALL) {
- this.kill();
- }
- this.flowFailed = true;
- }
- else {
- nodesToCheck.add(node);
- continue;
- }
- }
-
- if (outNodeIds.isEmpty()) {
- // There's no outnodes means it's the end of a flow, so we finalize
- // and fire an event.
- finalizeFlow(parentFlow);
- finishExecutableNode(parentFlow);
-
- // If the parent has a parent, then we process
- if (!(parentFlow instanceof ExecutableFlow)) {
- outNodeIds = parentFlow.getOutNodes();
- parentFlow = parentFlow.getParentFlow();
- }
- }
-
- // Add all out nodes from the finished job. We'll check against this set to
- // see if any are candidates for running.
- for (String nodeId: outNodeIds) {
- ExecutableNode outNode = parentFlow.getExecutableNode(nodeId);
- nodesToCheck.add(outNode);
- }
- }
-
- // Runs candidate jobs. The code will check to see if they are ready to run before
- // Instant kill or skip if necessary.
- boolean jobsRun = false;
- for (ExecutableNode node: nodesToCheck) {
- if (Status.isStatusFinished(node.getStatus()) ||
- Status.isStatusRunning(node.getStatus())) {
- // Really shouldn't get in here.
- continue;
- }
-
- jobsRun |= runReadyJob(node);
- }
-
- if (jobsRun || finishedNodes.getSize() > 0 ) {
- updateFlow();
- return true;
- }
-
- return false;
- }
- private boolean runReadyJob(ExecutableNode node) throws IOException {
- if (Status.isStatusFinished(node.getStatus()) ||
- Status.isStatusRunning(node.getStatus())) {
- return false;
- }
-
- Status nextNodeStatus = getImpliedStatus(node);
- if (nextNodeStatus == null) {
- return false;
- }
-
- if (nextNodeStatus == Status.CANCELLED) {
- logger.info("Cancelling '" + node.getNestedId() + "' due to prior errors.");
- node.cancelNode(System.currentTimeMillis());
- finishExecutableNode(node);
- }
- else if (nextNodeStatus == Status.SKIPPED) {
- logger.info("Skipping disabled job '" + node.getId() + "'.");
- node.skipNode(System.currentTimeMillis());
- finishExecutableNode(node);
- }
- else if (nextNodeStatus == Status.READY) {
- if (node instanceof ExecutableFlowBase) {
- ExecutableFlowBase flow = ((ExecutableFlowBase) node);
- logger.info("Running flow '" + flow.getNestedId() + "'.");
- flow.setStatus(Status.RUNNING);
- flow.setStartTime(System.currentTimeMillis());
- prepareJobProperties(flow);
-
- for (String startNodeId: ((ExecutableFlowBase) node).getStartNodes()) {
- ExecutableNode startNode = flow.getExecutableNode(startNodeId);
- runReadyJob(startNode);
- }
- }
- else {
- runExecutableNode(node);
- }
- }
- return true;
- }
-
- private boolean retryJobIfPossible(ExecutableNode node) {
- if (node instanceof ExecutableFlowBase) {
- return false;
- }
-
- if (node.getRetries() > node.getAttempt()) {
- logger.info("Job '" + node.getId() + "' will be retried. Attempt " + node.getAttempt() + " of " + node.getRetries());
- node.setDelayedExecution(node.getRetryBackoff());
- node.resetForRetry();
- return true;
- }
- else {
- if (node.getRetries() > 0) {
- logger.info("Job '" + node.getId() + "' has run out of retry attempts");
- // Setting delayed execution to 0 in case this is manually re-tried.
- node.setDelayedExecution(0);
- }
-
- return false;
- }
- }
-
- private void propagateStatus(ExecutableFlowBase base, Status status) {
- if (!Status.isStatusFinished(base.getStatus())) {
- logger.info("Setting " + base.getNestedId() + " to " + status);
- base.setStatus(status);
- if (base.getParentFlow() != null) {
- propagateStatus(base.getParentFlow(), status);
- }
- }
- }
-
- private void finishExecutableNode(ExecutableNode node) {
- finishedNodes.add(node);
- fireEventListeners(Event.create(this, Type.JOB_FINISHED, node));
- }
-
- private void finalizeFlow(ExecutableFlowBase flow) {
- String id = flow == this.flow ? "" : flow.getNestedId();
-
- // If it's not the starting flow, we'll create set of output props
- // for the finished flow.
- boolean succeeded = true;
- Props previousOutput = null;
-
- for (String end: flow.getEndNodes()) {
- ExecutableNode node = flow.getExecutableNode(end);
-
- if (node.getStatus() == Status.KILLED ||
- node.getStatus() == Status.FAILED ||
- node.getStatus() == Status.CANCELLED) {
- succeeded = false;
- }
-
- Props output = node.getOutputProps();
- if (output != null) {
- output = Props.clone(output);
- output.setParent(previousOutput);
- previousOutput = output;
- }
- }
-
- flow.setOutputProps(previousOutput);
- if (!succeeded && (flow.getStatus() == Status.RUNNING)) {
- flow.setStatus(Status.KILLED);
- }
-
- flow.setEndTime(System.currentTimeMillis());
- flow.setUpdateTime(System.currentTimeMillis());
- long durationSec = (flow.getEndTime() - flow.getStartTime()) / 1000;
- switch(flow.getStatus()) {
- case FAILED_FINISHING:
- logger.info("Setting flow '" + id + "' status to FAILED in " + durationSec + " seconds");
- flow.setStatus(Status.FAILED);
- break;
- case FAILED:
- case KILLED:
- case CANCELLED:
- case FAILED_SUCCEEDED:
- logger.info("Flow '" + id + "' is set to " + flow.getStatus().toString() + " in " + durationSec + " seconds");
- break;
- default:
- flow.setStatus(Status.SUCCEEDED);
- logger.info("Flow '" + id + "' is set to " + flow.getStatus().toString() + " in " + durationSec + " seconds");
- }
-
- // If the finalized flow is actually the top level flow, than we finish
- // the main loop.
- if (flow instanceof ExecutableFlow) {
- flowFinished = true;
- }
- }
-
- private void prepareJobProperties(ExecutableNode node) throws IOException {
- if (node instanceof ExecutableFlow) {
- return;
- }
-
- Props props = null;
- // 1. Shared properties (i.e. *.properties) for the jobs only. This takes the
- // least precedence
- if (!(node instanceof ExecutableFlowBase)) {
- String sharedProps = node.getPropsSource();
- if (sharedProps != null) {
- props = this.sharedProps.get(sharedProps);
- }
- }
-
- // The following is the hiearchical ordering of dependency resolution
- // 2. Parent Flow Properties
- ExecutableFlowBase parentFlow = node.getParentFlow();
- if (parentFlow != null) {
- Props flowProps = Props.clone(parentFlow.getInputProps());
- flowProps.setEarliestAncestor(props);
- props = flowProps;
- }
-
- // 3. Output Properties. The call creates a clone, so we can overwrite it.
- Props outputProps = collectOutputProps(node);
- if (outputProps != null) {
- outputProps.setEarliestAncestor(props);
- props = outputProps;
- }
-
- // 4. The job source.
- Props jobSource = loadJobProps(node);
- if (jobSource != null) {
- jobSource.setParent(props);
- props = jobSource;
- }
-
- node.setInputProps(props);
- }
-
- private Props loadJobProps(ExecutableNode node) throws IOException {
- Props props = null;
- String source = node.getJobSource();
- if (source == null) {
- return null;
- }
-
- // load the override props if any
- try {
- props = projectLoader.fetchProjectProperty(flow.getProjectId(), flow.getVersion(), node.getId()+".jor");
- }
- catch(ProjectManagerException e) {
- e.printStackTrace();
- logger.error("Error loading job override property for job " + node.getId());
- }
-
- File path = new File(execDir, source);
- if (props == null) {
- // if no override prop, load the original one on disk
- try {
- props = new Props(null, path);
- } catch (IOException e) {
- e.printStackTrace();
- logger.error("Error loading job file " + source + " for job " + node.getId());
- }
- }
- // setting this fake source as this will be used to determine the location of log files.
- if (path.getPath() != null) {
- props.setSource(path.getPath());
- }
- return props;
- }
-
- private void runExecutableNode(ExecutableNode node) throws IOException {
- // Collect output props from the job's dependencies.
- prepareJobProperties(node);
-
- node.setStatus(Status.QUEUED);
- JobRunner runner = createJobRunner(node);
- logger.info("Submitting job '" + node.getNestedId() + "' to run.");
- try {
- executorService.submit(runner);
- activeJobRunners.add(runner);
- } catch (RejectedExecutionException e) {
- logger.error(e);
- };
- }
-
- /**
- * Determines what the state of the next node should be. Returns null if
- * the node should not be run.
- *
- * @param node
- * @return
- */
- public Status getImpliedStatus(ExecutableNode node) {
- // If it's running or finished with 'SUCCEEDED', than don't even
- // bother starting this job.
- if (Status.isStatusRunning(node.getStatus()) ||
- node.getStatus() == Status.SUCCEEDED) {
- return null;
- }
-
- // Go through the node's dependencies. If all of the previous job's
- // statuses is finished and not FAILED or KILLED, than we can safely
- // run this job.
- ExecutableFlowBase flow = node.getParentFlow();
- boolean shouldKill = false;
- for (String dependency: node.getInNodes()) {
- ExecutableNode dependencyNode = flow.getExecutableNode(dependency);
- Status depStatus = dependencyNode.getStatus();
-
- if (!Status.isStatusFinished(depStatus)) {
- return null;
- }
- else if (depStatus == Status.FAILED ||
- depStatus == Status.CANCELLED ||
- depStatus == Status.KILLED) {
- // We propagate failures as KILLED states.
- shouldKill = true;
- }
- }
-
- // If it's disabled but ready to run, we want to make sure it continues being disabled.
- if (node.getStatus() == Status.DISABLED || node.getStatus() == Status.SKIPPED) {
- return Status.SKIPPED;
- }
-
- // If the flow has failed, and we want to finish only the currently running jobs, we just
- // kill everything else. We also kill, if the flow has been cancelled.
- if (flowFailed && failureAction == ExecutionOptions.FailureAction.FINISH_CURRENTLY_RUNNING) {
- return Status.CANCELLED;
- }
- else if (shouldKill || isKilled()) {
- return Status.CANCELLED;
- }
-
- // All good to go, ready to run.
- return Status.READY;
- }
-
- private Props collectOutputProps(ExecutableNode node) {
- Props previousOutput = null;
- // Iterate the in nodes again and create the dependencies
- for (String dependency : node.getInNodes()) {
- Props output = node.getParentFlow().getExecutableNode(dependency).getOutputProps();
- if (output != null) {
- output = Props.clone(output);
- output.setParent(previousOutput);
- previousOutput = output;
- }
- }
-
- return previousOutput;
- }
-
- private JobRunner createJobRunner(ExecutableNode node) {
- // Load job file.
- File path = new File(execDir, node.getJobSource());
-
- JobRunner jobRunner = new JobRunner(node, path.getParentFile(), executorLoader, jobtypeManager);
- if (watcher != null) {
- jobRunner.setPipeline(watcher, pipelineLevel);
- }
- if (validateUserProxy) {
- jobRunner.setValidatedProxyUsers(proxyUsers);
- }
-
- jobRunner.setDelayStart(node.getDelayedExecution());
- jobRunner.setLogSettings(logger, jobLogFileSize, jobLogNumFiles);
- jobRunner.addListener(listener);
-
- return jobRunner;
- }
-
- public void pause(String user) {
- synchronized(mainSyncObj) {
- if (!flowFinished) {
- logger.info("Flow paused by " + user);
- flowPaused = true;
- flow.setStatus(Status.PAUSED);
-
- updateFlow();
- }
- else {
- logger.info("Cannot pause finished flow. Called by user " + user);
- }
- }
-
- interrupt();
- }
-
- public void resume(String user) {
- synchronized(mainSyncObj) {
- if (!flowPaused) {
- logger.info("Cannot resume flow that isn't paused");
- }
- else {
- logger.info("Flow resumed by " + user);
- flowPaused = false;
- if (flowFailed) {
- flow.setStatus(Status.FAILED_FINISHING);
- }
- else if (flowKilled) {
- flow.setStatus(Status.KILLED);
- }
- else {
- flow.setStatus(Status.RUNNING);
- }
-
- updateFlow();
- }
- }
-
- interrupt();
- }
-
- public void kill(String user) {
- synchronized(mainSyncObj) {
- logger.info("Flow killed by " + user);
- flow.setStatus(Status.KILLED);
- kill();
- updateFlow();
- }
- interrupt();
- }
-
- private void kill() {
- synchronized(mainSyncObj) {
- logger.info("Kill has been called on flow " + execId);
-
- // If the flow is paused, then we'll also unpause
- flowPaused = false;
- flowKilled = true;
-
- if (watcher != null) {
- logger.info("Watcher is attached. Stopping watcher.");
- watcher.stopWatcher();
- logger.info("Watcher cancelled status is " + watcher.isWatchCancelled());
- }
-
- logger.info("Killing " + activeJobRunners.size() + " jobs.");
- for (JobRunner runner : activeJobRunners) {
- runner.kill();
- }
- }
- }
-
- public void retryFailures(String user) {
- synchronized(mainSyncObj) {
- logger.info("Retrying failures invoked by " + user);
- retryFailedJobs = true;
- interrupt();
- }
- }
-
- private void resetFailedState(ExecutableFlowBase flow, List<ExecutableNode> nodesToRetry) {
- //bottom up
- LinkedList<ExecutableNode> queue = new LinkedList<ExecutableNode>();
- for (String id : flow.getEndNodes()) {
- ExecutableNode node = flow.getExecutableNode(id);
- queue.add(node);
- }
-
- long maxStartTime = -1;
- while (!queue.isEmpty()) {
- ExecutableNode node = queue.poll();
- Status oldStatus = node.getStatus();
- maxStartTime = Math.max(node.getStartTime(), maxStartTime);
-
- long currentTime = System.currentTimeMillis();
- if (node.getStatus() == Status.SUCCEEDED) {
- // This is a candidate parent for restart
- nodesToRetry.add(node);
- continue;
- }
- else if (node.getStatus() == Status.RUNNING) {
- continue;
- }
- else if (node.getStatus() == Status.SKIPPED) {
- node.setStatus(Status.DISABLED);
- node.setEndTime(-1);
- node.setStartTime(-1);
- node.setUpdateTime(currentTime);
- }
- else if (node instanceof ExecutableFlowBase) {
- ExecutableFlowBase base = (ExecutableFlowBase)node;
- switch (base.getStatus()) {
- case CANCELLED:
- node.setStatus(Status.READY);
- node.setEndTime(-1);
- node.setStartTime(-1);
- node.setUpdateTime(currentTime);
- // Break out of the switch. We'll reset the flow just like a normal node
- break;
- case KILLED:
- case FAILED:
- case FAILED_FINISHING:
- resetFailedState(base, nodesToRetry);
- continue;
- default:
- // Continue the while loop. If the job is in a finished state that's not
- // a failure, we don't want to reset the job.
- continue;
- }
- }
- else if (node.getStatus() == Status.CANCELLED) {
- // Not a flow, but killed
- node.setStatus(Status.READY);
- node.setStartTime(-1);
- node.setEndTime(-1);
- node.setUpdateTime(currentTime);
- }
- else if (node.getStatus() == Status.FAILED || node.getStatus() == Status.KILLED) {
- node.resetForRetry();
- nodesToRetry.add(node);
- }
-
- if (!(node instanceof ExecutableFlowBase) && node.getStatus() != oldStatus) {
- logger.info("Resetting job '" + node.getNestedId() + "' from " + oldStatus + " to " + node.getStatus());
- }
-
- for (String inId: node.getInNodes()) {
- ExecutableNode nodeUp = flow.getExecutableNode(inId);
- queue.add(nodeUp);
- }
- }
-
- // At this point, the following code will reset the flow
- Status oldFlowState = flow.getStatus();
- if (maxStartTime == -1) {
- // Nothing has run inside the flow, so we assume the flow hasn't even started running yet.
- flow.setStatus(Status.READY);
- }
- else {
- flow.setStatus(Status.RUNNING);
-
- // Add any READY start nodes. Usually it means the flow started, but the start node has not.
- for (String id: flow.getStartNodes()) {
- ExecutableNode node = flow.getExecutableNode(id);
- if (node.getStatus() == Status.READY || node.getStatus() == Status.DISABLED) {
- nodesToRetry.add(node);
- }
- }
- }
- flow.setUpdateTime(System.currentTimeMillis());
- flow.setEndTime(-1);
- logger.info("Resetting flow '" + flow.getNestedId() + "' from " + oldFlowState + " to " + flow.getStatus());
- }
-
- private void interrupt() {
- flowRunnerThread.interrupt();
- }
-
- private class JobRunnerEventListener implements EventListener {
- public JobRunnerEventListener() {
- }
-
- @Override
- public synchronized void handleEvent(Event event) {
- JobRunner runner = (JobRunner)event.getRunner();
-
- if (event.getType() == Type.JOB_STATUS_CHANGED) {
- updateFlow();
- }
- else if (event.getType() == Type.JOB_FINISHED) {
- ExecutableNode node = runner.getNode();
- long seconds = (node.getEndTime() - node.getStartTime())/1000;
- synchronized(mainSyncObj) {
- logger.info("Job " + node.getNestedId() + " finished with status " + node.getStatus() + " in " + seconds + " seconds");
-
- // Cancellation is handled in the main thread, but if the flow is paused, the main thread is paused too.
- // This unpauses the flow for cancellation.
- if (flowPaused && node.getStatus() == Status.FAILED && failureAction == FailureAction.CANCEL_ALL) {
- flowPaused = false;
- }
-
- finishedNodes.add(node);
- node.getParentFlow().setUpdateTime(System.currentTimeMillis());
- interrupt();
- fireEventListeners(event);
- }
- }
- }
- }
-
- public boolean isKilled() {
- return flowKilled;
- }
-
- public ExecutableFlow getExecutableFlow() {
- return flow;
- }
-
- public File getFlowLogFile() {
- return logFile;
- }
-
- public File getJobLogFile(String jobId, int attempt) {
- ExecutableNode node = flow.getExecutableNodePath(jobId);
- File path = new File(execDir, node.getJobSource());
-
- String logFileName = JobRunner.createLogFileName(node, attempt);
- File logFile = new File(path.getParentFile(), logFileName);
-
- if (!logFile.exists()) {
- return null;
- }
-
- return logFile;
- }
-
- public File getJobAttachmentFile(String jobId, int attempt) {
- ExecutableNode node = flow.getExecutableNodePath(jobId);
- File path = new File(execDir, node.getJobSource());
-
- String attachmentFileName = JobRunner.createAttachmentFileName(node, attempt);
- File attachmentFile = new File(path.getParentFile(), attachmentFileName);
- if (!attachmentFile.exists()) {
- return null;
- }
- return attachmentFile;
- }
-
- public File getJobMetaDataFile(String jobId, int attempt) {
- ExecutableNode node = flow.getExecutableNodePath(jobId);
- File path = new File(execDir, node.getJobSource());
-
- String metaDataFileName = JobRunner.createMetaDataFileName(node, attempt);
- File metaDataFile = new File(path.getParentFile(), metaDataFileName);
-
- if (!metaDataFile.exists()) {
- return null;
- }
-
- return metaDataFile;
- }
-
- public boolean isRunnerThreadAlive() {
- if (flowRunnerThread != null) {
- return flowRunnerThread.isAlive();
- }
- return false;
- }
-
- public boolean isThreadPoolShutdown() {
- return executorService.isShutdown();
- }
-
- public int getNumRunningJobs() {
- return activeJobRunners.size();
- }
+ private static final Layout DEFAULT_LAYOUT = new PatternLayout(
+ "%d{dd-MM-yyyy HH:mm:ss z} %c{1} %p - %m\n");
+ // We check update every 5 minutes, just in case things get stuck. But for the
+ // most part, we'll be idling.
+ private static final long CHECK_WAIT_MS = 5 * 60 * 1000;
+
+ private Logger logger;
+ private Layout loggerLayout = DEFAULT_LAYOUT;
+ private Appender flowAppender;
+ private File logFile;
+
+ private ExecutorService executorService;
+ private ExecutorLoader executorLoader;
+ private ProjectLoader projectLoader;
+
+ private int execId;
+ private File execDir;
+ private final ExecutableFlow flow;
+ private Thread flowRunnerThread;
+ private int numJobThreads = 10;
+ private ExecutionOptions.FailureAction failureAction;
+
+ // Sync object for queuing
+ private Object mainSyncObj = new Object();
+
+ // Properties map
+ private Map<String, Props> sharedProps = new HashMap<String, Props>();
+ private final JobTypeManager jobtypeManager;
+
+ private JobRunnerEventListener listener = new JobRunnerEventListener();
+ private Set<JobRunner> activeJobRunners = Collections
+ .newSetFromMap(new ConcurrentHashMap<JobRunner, Boolean>());
+
+ // Thread safe swap queue for finishedExecutions.
+ private SwapQueue<ExecutableNode> finishedNodes;
+
+ // Used for pipelining
+ private Integer pipelineLevel = null;
+ private Integer pipelineExecId = null;
+
+ // Watches external flows for execution.
+ private FlowWatcher watcher = null;
+
+ private Set<String> proxyUsers = null;
+ private boolean validateUserProxy;
+
+ private String jobLogFileSize = "5MB";
+ private int jobLogNumFiles = 4;
+
+ private boolean flowPaused = false;
+ private boolean flowFailed = false;
+ private boolean flowFinished = false;
+ private boolean flowKilled = false;
+
+ // The following is state that will trigger a retry of all failed jobs
+ private boolean retryFailedJobs = false;
+
+ /**
+ * Constructor. This will create its own ExecutorService for thread pools
+ *
+ * @param flow
+ * @param executorLoader
+ * @param projectLoader
+ * @param jobtypeManager
+ * @throws ExecutorManagerException
+ */
+ public FlowRunner(ExecutableFlow flow, ExecutorLoader executorLoader,
+ ProjectLoader projectLoader, JobTypeManager jobtypeManager)
+ throws ExecutorManagerException {
+ this(flow, executorLoader, projectLoader, jobtypeManager, null);
+ }
+
+ /**
+ * Constructor. If executorService is null, then it will create it's own for
+ * thread pools.
+ *
+ * @param flow
+ * @param executorLoader
+ * @param projectLoader
+ * @param jobtypeManager
+ * @param executorService
+ * @throws ExecutorManagerException
+ */
+ public FlowRunner(ExecutableFlow flow, ExecutorLoader executorLoader,
+ ProjectLoader projectLoader, JobTypeManager jobtypeManager,
+ ExecutorService executorService) throws ExecutorManagerException {
+ this.execId = flow.getExecutionId();
+ this.flow = flow;
+ this.executorLoader = executorLoader;
+ this.projectLoader = projectLoader;
+ this.execDir = new File(flow.getExecutionPath());
+ this.jobtypeManager = jobtypeManager;
+
+ ExecutionOptions options = flow.getExecutionOptions();
+ this.pipelineLevel = options.getPipelineLevel();
+ this.pipelineExecId = options.getPipelineExecutionId();
+ this.failureAction = options.getFailureAction();
+ this.proxyUsers = flow.getProxyUsers();
+ this.executorService = executorService;
+ this.finishedNodes = new SwapQueue<ExecutableNode>();
+ }
+
+ public FlowRunner setFlowWatcher(FlowWatcher watcher) {
+ this.watcher = watcher;
+ return this;
+ }
+
+ public FlowRunner setNumJobThreads(int jobs) {
+ numJobThreads = jobs;
+ return this;
+ }
+
+ public FlowRunner setJobLogSettings(String jobLogFileSize, int jobLogNumFiles) {
+ this.jobLogFileSize = jobLogFileSize;
+ this.jobLogNumFiles = jobLogNumFiles;
+
+ return this;
+ }
+
+ public FlowRunner setValidateProxyUser(boolean validateUserProxy) {
+ this.validateUserProxy = validateUserProxy;
+ return this;
+ }
+
+ public File getExecutionDir() {
+ return execDir;
+ }
+
+ public void run() {
+ try {
+ if (this.executorService == null) {
+ this.executorService = Executors.newFixedThreadPool(numJobThreads);
+ }
+ setupFlowExecution();
+ flow.setStartTime(System.currentTimeMillis());
+
+ updateFlowReference();
+
+ logger.info("Updating initial flow directory.");
+ updateFlow();
+ logger.info("Fetching job and shared properties.");
+ loadAllProperties();
+
+ this.fireEventListeners(Event.create(this, Type.FLOW_STARTED));
+ runFlow();
+ } catch (Throwable t) {
+ if (logger != null) {
+ logger
+ .error(
+ "An error has occurred during the running of the flow. Quiting.",
+ t);
+ }
+ flow.setStatus(Status.FAILED);
+ } finally {
+ if (watcher != null) {
+ logger.info("Watcher is attached. Stopping watcher.");
+ watcher.stopWatcher();
+ logger
+ .info("Watcher cancelled status is " + watcher.isWatchCancelled());
+ }
+
+ flow.setEndTime(System.currentTimeMillis());
+ logger.info("Setting end time for flow " + execId + " to "
+ + System.currentTimeMillis());
+ closeLogger();
+
+ updateFlow();
+ this.fireEventListeners(Event.create(this, Type.FLOW_FINISHED));
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private void setupFlowExecution() {
+ int projectId = flow.getProjectId();
+ int version = flow.getVersion();
+ String flowId = flow.getFlowId();
+
+ // Add a bunch of common azkaban properties
+ Props commonFlowProps = PropsUtils.addCommonFlowProperties(null, flow);
+
+ if (flow.getJobSource() != null) {
+ String source = flow.getJobSource();
+ Props flowProps = sharedProps.get(source);
+ flowProps.setParent(commonFlowProps);
+ commonFlowProps = flowProps;
+ }
+
+ // If there are flow overrides, we apply them now.
+ Map<String, String> flowParam =
+ flow.getExecutionOptions().getFlowParameters();
+ if (flowParam != null && !flowParam.isEmpty()) {
+ commonFlowProps = new Props(commonFlowProps, flowParam);
+ }
+ flow.setInputProps(commonFlowProps);
+
+ // Create execution dir
+ createLogger(flowId);
+
+ if (this.watcher != null) {
+ this.watcher.setLogger(logger);
+ }
+
+ logger.info("Running execid:" + execId + " flow:" + flowId + " project:"
+ + projectId + " version:" + version);
+ if (pipelineExecId != null) {
+ logger.info("Running simulateously with " + pipelineExecId
+ + ". Pipelining level " + pipelineLevel);
+ }
+
+ // The current thread is used for interrupting blocks
+ flowRunnerThread = Thread.currentThread();
+ flowRunnerThread.setName("FlowRunner-exec-" + flow.getExecutionId());
+ }
+
+ private void updateFlowReference() throws ExecutorManagerException {
+ logger.info("Update active reference");
+ if (!executorLoader.updateExecutableReference(execId,
+ System.currentTimeMillis())) {
+ throw new ExecutorManagerException(
+ "The executor reference doesn't exist. May have been killed prematurely.");
+ }
+ }
+
+ private void updateFlow() {
+ updateFlow(System.currentTimeMillis());
+ }
+
+ private synchronized void updateFlow(long time) {
+ try {
+ flow.setUpdateTime(time);
+ executorLoader.updateExecutableFlow(flow);
+ } catch (ExecutorManagerException e) {
+ logger.error("Error updating flow.", e);
+ }
+ }
+
+ private void createLogger(String flowId) {
+ // Create logger
+ String loggerName = execId + "." + flowId;
+ logger = Logger.getLogger(loggerName);
+
+ // Create file appender
+ String logName = "_flow." + loggerName + ".log";
+ logFile = new File(execDir, logName);
+ String absolutePath = logFile.getAbsolutePath();
+
+ flowAppender = null;
+ try {
+ flowAppender = new FileAppender(loggerLayout, absolutePath, false);
+ logger.addAppender(flowAppender);
+ } catch (IOException e) {
+ logger.error("Could not open log file in " + execDir, e);
+ }
+ }
+
+ private void closeLogger() {
+ if (logger != null) {
+ logger.removeAppender(flowAppender);
+ flowAppender.close();
+
+ try {
+ executorLoader.uploadLogFile(execId, "", 0, logFile);
+ } catch (ExecutorManagerException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ private void loadAllProperties() throws IOException {
+ // First load all the properties
+ for (FlowProps fprops : flow.getFlowProps()) {
+ String source = fprops.getSource();
+ File propsPath = new File(execDir, source);
+ Props props = new Props(null, propsPath);
+ sharedProps.put(source, props);
+ }
+
+ // Resolve parents
+ for (FlowProps fprops : flow.getFlowProps()) {
+ if (fprops.getInheritedSource() != null) {
+ String source = fprops.getSource();
+ String inherit = fprops.getInheritedSource();
+
+ Props props = sharedProps.get(source);
+ Props inherits = sharedProps.get(inherit);
+
+ props.setParent(inherits);
+ }
+ }
+ }
+
+ /**
+ * Main method that executes the jobs.
+ *
+ * @throws Exception
+ */
+ private void runFlow() throws Exception {
+ logger.info("Starting flows");
+ runReadyJob(this.flow);
+ updateFlow();
+
+ while (!flowFinished) {
+ synchronized (mainSyncObj) {
+ if (flowPaused) {
+ try {
+ mainSyncObj.wait(CHECK_WAIT_MS);
+ } catch (InterruptedException e) {
+ }
+
+ continue;
+ } else {
+ if (retryFailedJobs) {
+ retryAllFailures();
+ } else if (!progressGraph()) {
+ try {
+ mainSyncObj.wait(CHECK_WAIT_MS);
+ } catch (InterruptedException e) {
+ }
+ }
+ }
+ }
+ }
+
+ logger.info("Finishing up flow. Awaiting Termination");
+ executorService.shutdown();
+
+ updateFlow();
+ logger.info("Finished Flow");
+ }
+
+ private void retryAllFailures() throws IOException {
+ logger.info("Restarting all failed jobs");
+
+ this.retryFailedJobs = false;
+ this.flowKilled = false;
+ this.flowFailed = false;
+ this.flow.setStatus(Status.RUNNING);
+
+ ArrayList<ExecutableNode> retryJobs = new ArrayList<ExecutableNode>();
+ resetFailedState(this.flow, retryJobs);
+
+ for (ExecutableNode node : retryJobs) {
+ if (node.getStatus() == Status.READY
+ || node.getStatus() == Status.DISABLED) {
+ runReadyJob(node);
+ } else if (node.getStatus() == Status.SUCCEEDED) {
+ for (String outNodeId : node.getOutNodes()) {
+ ExecutableFlowBase base = node.getParentFlow();
+ runReadyJob(base.getExecutableNode(outNodeId));
+ }
+ }
+
+ runReadyJob(node);
+ }
+
+ updateFlow();
+ }
+
+ private boolean progressGraph() throws IOException {
+ finishedNodes.swap();
+
+ // The following nodes are finished, so we'll collect a list of outnodes
+ // that are candidates for running next.
+ HashSet<ExecutableNode> nodesToCheck = new HashSet<ExecutableNode>();
+ for (ExecutableNode node : finishedNodes) {
+ Set<String> outNodeIds = node.getOutNodes();
+ ExecutableFlowBase parentFlow = node.getParentFlow();
+
+ // If a job is seen as failed, then we set the parent flow to
+ // FAILED_FINISHING
+ if (node.getStatus() == Status.FAILED) {
+ // The job cannot be retried or has run out of retry attempts. We will
+ // fail the job and its flow now.
+ if (!retryJobIfPossible(node)) {
+ propagateStatus(node.getParentFlow(), Status.FAILED_FINISHING);
+ if (failureAction == FailureAction.CANCEL_ALL) {
+ this.kill();
+ }
+ this.flowFailed = true;
+ } else {
+ nodesToCheck.add(node);
+ continue;
+ }
+ }
+
+ if (outNodeIds.isEmpty()) {
+ // There's no outnodes means it's the end of a flow, so we finalize
+ // and fire an event.
+ finalizeFlow(parentFlow);
+ finishExecutableNode(parentFlow);
+
+ // If the parent has a parent, then we process
+ if (!(parentFlow instanceof ExecutableFlow)) {
+ outNodeIds = parentFlow.getOutNodes();
+ parentFlow = parentFlow.getParentFlow();
+ }
+ }
+
+ // Add all out nodes from the finished job. We'll check against this set
+ // to
+ // see if any are candidates for running.
+ for (String nodeId : outNodeIds) {
+ ExecutableNode outNode = parentFlow.getExecutableNode(nodeId);
+ nodesToCheck.add(outNode);
+ }
+ }
+
+ // Runs candidate jobs. The code will check to see if they are ready to run
+ // before
+ // Instant kill or skip if necessary.
+ boolean jobsRun = false;
+ for (ExecutableNode node : nodesToCheck) {
+ if (Status.isStatusFinished(node.getStatus())
+ || Status.isStatusRunning(node.getStatus())) {
+ // Really shouldn't get in here.
+ continue;
+ }
+
+ jobsRun |= runReadyJob(node);
+ }
+
+ if (jobsRun || finishedNodes.getSize() > 0) {
+ updateFlow();
+ return true;
+ }
+
+ return false;
+ }
+
+ private boolean runReadyJob(ExecutableNode node) throws IOException {
+ if (Status.isStatusFinished(node.getStatus())
+ || Status.isStatusRunning(node.getStatus())) {
+ return false;
+ }
+
+ Status nextNodeStatus = getImpliedStatus(node);
+ if (nextNodeStatus == null) {
+ return false;
+ }
+
+ if (nextNodeStatus == Status.CANCELLED) {
+ logger.info("Cancelling '" + node.getNestedId()
+ + "' due to prior errors.");
+ node.cancelNode(System.currentTimeMillis());
+ finishExecutableNode(node);
+ } else if (nextNodeStatus == Status.SKIPPED) {
+ logger.info("Skipping disabled job '" + node.getId() + "'.");
+ node.skipNode(System.currentTimeMillis());
+ finishExecutableNode(node);
+ } else if (nextNodeStatus == Status.READY) {
+ if (node instanceof ExecutableFlowBase) {
+ ExecutableFlowBase flow = ((ExecutableFlowBase) node);
+ logger.info("Running flow '" + flow.getNestedId() + "'.");
+ flow.setStatus(Status.RUNNING);
+ flow.setStartTime(System.currentTimeMillis());
+ prepareJobProperties(flow);
+
+ for (String startNodeId : ((ExecutableFlowBase) node).getStartNodes()) {
+ ExecutableNode startNode = flow.getExecutableNode(startNodeId);
+ runReadyJob(startNode);
+ }
+ } else {
+ runExecutableNode(node);
+ }
+ }
+ return true;
+ }
+
+ private boolean retryJobIfPossible(ExecutableNode node) {
+ if (node instanceof ExecutableFlowBase) {
+ return false;
+ }
+
+ if (node.getRetries() > node.getAttempt()) {
+ logger.info("Job '" + node.getId() + "' will be retried. Attempt "
+ + node.getAttempt() + " of " + node.getRetries());
+ node.setDelayedExecution(node.getRetryBackoff());
+ node.resetForRetry();
+ return true;
+ } else {
+ if (node.getRetries() > 0) {
+ logger.info("Job '" + node.getId() + "' has run out of retry attempts");
+ // Setting delayed execution to 0 in case this is manually re-tried.
+ node.setDelayedExecution(0);
+ }
+
+ return false;
+ }
+ }
+
+ private void propagateStatus(ExecutableFlowBase base, Status status) {
+ if (!Status.isStatusFinished(base.getStatus())) {
+ logger.info("Setting " + base.getNestedId() + " to " + status);
+ base.setStatus(status);
+ if (base.getParentFlow() != null) {
+ propagateStatus(base.getParentFlow(), status);
+ }
+ }
+ }
+
+ private void finishExecutableNode(ExecutableNode node) {
+ finishedNodes.add(node);
+ fireEventListeners(Event.create(this, Type.JOB_FINISHED, node));
+ }
+
+ private void finalizeFlow(ExecutableFlowBase flow) {
+ String id = flow == this.flow ? "" : flow.getNestedId();
+
+ // If it's not the starting flow, we'll create set of output props
+ // for the finished flow.
+ boolean succeeded = true;
+ Props previousOutput = null;
+
+ for (String end : flow.getEndNodes()) {
+ ExecutableNode node = flow.getExecutableNode(end);
+
+ if (node.getStatus() == Status.KILLED
+ || node.getStatus() == Status.FAILED
+ || node.getStatus() == Status.CANCELLED) {
+ succeeded = false;
+ }
+
+ Props output = node.getOutputProps();
+ if (output != null) {
+ output = Props.clone(output);
+ output.setParent(previousOutput);
+ previousOutput = output;
+ }
+ }
+
+ flow.setOutputProps(previousOutput);
+ if (!succeeded && (flow.getStatus() == Status.RUNNING)) {
+ flow.setStatus(Status.KILLED);
+ }
+
+ flow.setEndTime(System.currentTimeMillis());
+ flow.setUpdateTime(System.currentTimeMillis());
+ long durationSec = (flow.getEndTime() - flow.getStartTime()) / 1000;
+ switch (flow.getStatus()) {
+ case FAILED_FINISHING:
+ logger.info("Setting flow '" + id + "' status to FAILED in "
+ + durationSec + " seconds");
+ flow.setStatus(Status.FAILED);
+ break;
+ case FAILED:
+ case KILLED:
+ case CANCELLED:
+ case FAILED_SUCCEEDED:
+ logger.info("Flow '" + id + "' is set to " + flow.getStatus().toString()
+ + " in " + durationSec + " seconds");
+ break;
+ default:
+ flow.setStatus(Status.SUCCEEDED);
+ logger.info("Flow '" + id + "' is set to " + flow.getStatus().toString()
+ + " in " + durationSec + " seconds");
+ }
+
+ // If the finalized flow is actually the top level flow, than we finish
+ // the main loop.
+ if (flow instanceof ExecutableFlow) {
+ flowFinished = true;
+ }
+ }
+
+ private void prepareJobProperties(ExecutableNode node) throws IOException {
+ if (node instanceof ExecutableFlow) {
+ return;
+ }
+
+ Props props = null;
+ // 1. Shared properties (i.e. *.properties) for the jobs only. This takes
+ // the
+ // least precedence
+ if (!(node instanceof ExecutableFlowBase)) {
+ String sharedProps = node.getPropsSource();
+ if (sharedProps != null) {
+ props = this.sharedProps.get(sharedProps);
+ }
+ }
+
+ // The following is the hiearchical ordering of dependency resolution
+ // 2. Parent Flow Properties
+ ExecutableFlowBase parentFlow = node.getParentFlow();
+ if (parentFlow != null) {
+ Props flowProps = Props.clone(parentFlow.getInputProps());
+ flowProps.setEarliestAncestor(props);
+ props = flowProps;
+ }
+
+ // 3. Output Properties. The call creates a clone, so we can overwrite it.
+ Props outputProps = collectOutputProps(node);
+ if (outputProps != null) {
+ outputProps.setEarliestAncestor(props);
+ props = outputProps;
+ }
+
+ // 4. The job source.
+ Props jobSource = loadJobProps(node);
+ if (jobSource != null) {
+ jobSource.setParent(props);
+ props = jobSource;
+ }
+
+ node.setInputProps(props);
+ }
+
+ private Props loadJobProps(ExecutableNode node) throws IOException {
+ Props props = null;
+ String source = node.getJobSource();
+ if (source == null) {
+ return null;
+ }
+
+ // load the override props if any
+ try {
+ props =
+ projectLoader.fetchProjectProperty(flow.getProjectId(),
+ flow.getVersion(), node.getId() + ".jor");
+ } catch (ProjectManagerException e) {
+ e.printStackTrace();
+ logger.error("Error loading job override property for job "
+ + node.getId());
+ }
+
+ File path = new File(execDir, source);
+ if (props == null) {
+ // if no override prop, load the original one on disk
+ try {
+ props = new Props(null, path);
+ } catch (IOException e) {
+ e.printStackTrace();
+ logger.error("Error loading job file " + source + " for job "
+ + node.getId());
+ }
+ }
+ // setting this fake source as this will be used to determine the location
+ // of log files.
+ if (path.getPath() != null) {
+ props.setSource(path.getPath());
+ }
+ return props;
+ }
+
+ private void runExecutableNode(ExecutableNode node) throws IOException {
+ // Collect output props from the job's dependencies.
+ prepareJobProperties(node);
+
+ node.setStatus(Status.QUEUED);
+ JobRunner runner = createJobRunner(node);
+ logger.info("Submitting job '" + node.getNestedId() + "' to run.");
+ try {
+ executorService.submit(runner);
+ activeJobRunners.add(runner);
+ } catch (RejectedExecutionException e) {
+ logger.error(e);
+ }
+ ;
+ }
+
+ /**
+ * Determines what the state of the next node should be. Returns null if the
+ * node should not be run.
+ *
+ * @param node
+ * @return
+ */
+ public Status getImpliedStatus(ExecutableNode node) {
+ // If it's running or finished with 'SUCCEEDED', than don't even
+ // bother starting this job.
+ if (Status.isStatusRunning(node.getStatus())
+ || node.getStatus() == Status.SUCCEEDED) {
+ return null;
+ }
+
+ // Go through the node's dependencies. If all of the previous job's
+ // statuses is finished and not FAILED or KILLED, than we can safely
+ // run this job.
+ ExecutableFlowBase flow = node.getParentFlow();
+ boolean shouldKill = false;
+ for (String dependency : node.getInNodes()) {
+ ExecutableNode dependencyNode = flow.getExecutableNode(dependency);
+ Status depStatus = dependencyNode.getStatus();
+
+ if (!Status.isStatusFinished(depStatus)) {
+ return null;
+ } else if (depStatus == Status.FAILED || depStatus == Status.CANCELLED
+ || depStatus == Status.KILLED) {
+ // We propagate failures as KILLED states.
+ shouldKill = true;
+ }
+ }
+
+ // If it's disabled but ready to run, we want to make sure it continues
+ // being disabled.
+ if (node.getStatus() == Status.DISABLED
+ || node.getStatus() == Status.SKIPPED) {
+ return Status.SKIPPED;
+ }
+
+ // If the flow has failed, and we want to finish only the currently running
+ // jobs, we just
+ // kill everything else. We also kill, if the flow has been cancelled.
+ if (flowFailed
+ && failureAction == ExecutionOptions.FailureAction.FINISH_CURRENTLY_RUNNING) {
+ return Status.CANCELLED;
+ } else if (shouldKill || isKilled()) {
+ return Status.CANCELLED;
+ }
+
+ // All good to go, ready to run.
+ return Status.READY;
+ }
+
+ private Props collectOutputProps(ExecutableNode node) {
+ Props previousOutput = null;
+ // Iterate the in nodes again and create the dependencies
+ for (String dependency : node.getInNodes()) {
+ Props output =
+ node.getParentFlow().getExecutableNode(dependency).getOutputProps();
+ if (output != null) {
+ output = Props.clone(output);
+ output.setParent(previousOutput);
+ previousOutput = output;
+ }
+ }
+
+ return previousOutput;
+ }
+
+ private JobRunner createJobRunner(ExecutableNode node) {
+ // Load job file.
+ File path = new File(execDir, node.getJobSource());
+
+ JobRunner jobRunner =
+ new JobRunner(node, path.getParentFile(), executorLoader,
+ jobtypeManager);
+ if (watcher != null) {
+ jobRunner.setPipeline(watcher, pipelineLevel);
+ }
+ if (validateUserProxy) {
+ jobRunner.setValidatedProxyUsers(proxyUsers);
+ }
+
+ jobRunner.setDelayStart(node.getDelayedExecution());
+ jobRunner.setLogSettings(logger, jobLogFileSize, jobLogNumFiles);
+ jobRunner.addListener(listener);
+
+ return jobRunner;
+ }
+
+ public void pause(String user) {
+ synchronized (mainSyncObj) {
+ if (!flowFinished) {
+ logger.info("Flow paused by " + user);
+ flowPaused = true;
+ flow.setStatus(Status.PAUSED);
+
+ updateFlow();
+ } else {
+ logger.info("Cannot pause finished flow. Called by user " + user);
+ }
+ }
+
+ interrupt();
+ }
+
+ public void resume(String user) {
+ synchronized (mainSyncObj) {
+ if (!flowPaused) {
+ logger.info("Cannot resume flow that isn't paused");
+ } else {
+ logger.info("Flow resumed by " + user);
+ flowPaused = false;
+ if (flowFailed) {
+ flow.setStatus(Status.FAILED_FINISHING);
+ } else if (flowKilled) {
+ flow.setStatus(Status.KILLED);
+ } else {
+ flow.setStatus(Status.RUNNING);
+ }
+
+ updateFlow();
+ }
+ }
+
+ interrupt();
+ }
+
+ public void kill(String user) {
+ synchronized (mainSyncObj) {
+ logger.info("Flow killed by " + user);
+ flow.setStatus(Status.KILLED);
+ kill();
+ updateFlow();
+ }
+ interrupt();
+ }
+
+ private void kill() {
+ synchronized (mainSyncObj) {
+ logger.info("Kill has been called on flow " + execId);
+
+ // If the flow is paused, then we'll also unpause
+ flowPaused = false;
+ flowKilled = true;
+
+ if (watcher != null) {
+ logger.info("Watcher is attached. Stopping watcher.");
+ watcher.stopWatcher();
+ logger
+ .info("Watcher cancelled status is " + watcher.isWatchCancelled());
+ }
+
+ logger.info("Killing " + activeJobRunners.size() + " jobs.");
+ for (JobRunner runner : activeJobRunners) {
+ runner.kill();
+ }
+ }
+ }
+
+ public void retryFailures(String user) {
+ synchronized (mainSyncObj) {
+ logger.info("Retrying failures invoked by " + user);
+ retryFailedJobs = true;
+ interrupt();
+ }
+ }
+
+ private void resetFailedState(ExecutableFlowBase flow,
+ List<ExecutableNode> nodesToRetry) {
+ // bottom up
+ LinkedList<ExecutableNode> queue = new LinkedList<ExecutableNode>();
+ for (String id : flow.getEndNodes()) {
+ ExecutableNode node = flow.getExecutableNode(id);
+ queue.add(node);
+ }
+
+ long maxStartTime = -1;
+ while (!queue.isEmpty()) {
+ ExecutableNode node = queue.poll();
+ Status oldStatus = node.getStatus();
+ maxStartTime = Math.max(node.getStartTime(), maxStartTime);
+
+ long currentTime = System.currentTimeMillis();
+ if (node.getStatus() == Status.SUCCEEDED) {
+ // This is a candidate parent for restart
+ nodesToRetry.add(node);
+ continue;
+ } else if (node.getStatus() == Status.RUNNING) {
+ continue;
+ } else if (node.getStatus() == Status.SKIPPED) {
+ node.setStatus(Status.DISABLED);
+ node.setEndTime(-1);
+ node.setStartTime(-1);
+ node.setUpdateTime(currentTime);
+ } else if (node instanceof ExecutableFlowBase) {
+ ExecutableFlowBase base = (ExecutableFlowBase) node;
+ switch (base.getStatus()) {
+ case CANCELLED:
+ node.setStatus(Status.READY);
+ node.setEndTime(-1);
+ node.setStartTime(-1);
+ node.setUpdateTime(currentTime);
+ // Break out of the switch. We'll reset the flow just like a normal
+ // node
+ break;
+ case KILLED:
+ case FAILED:
+ case FAILED_FINISHING:
+ resetFailedState(base, nodesToRetry);
+ continue;
+ default:
+ // Continue the while loop. If the job is in a finished state that's
+ // not
+ // a failure, we don't want to reset the job.
+ continue;
+ }
+ } else if (node.getStatus() == Status.CANCELLED) {
+ // Not a flow, but killed
+ node.setStatus(Status.READY);
+ node.setStartTime(-1);
+ node.setEndTime(-1);
+ node.setUpdateTime(currentTime);
+ } else if (node.getStatus() == Status.FAILED
+ || node.getStatus() == Status.KILLED) {
+ node.resetForRetry();
+ nodesToRetry.add(node);
+ }
+
+ if (!(node instanceof ExecutableFlowBase)
+ && node.getStatus() != oldStatus) {
+ logger.info("Resetting job '" + node.getNestedId() + "' from "
+ + oldStatus + " to " + node.getStatus());
+ }
+
+ for (String inId : node.getInNodes()) {
+ ExecutableNode nodeUp = flow.getExecutableNode(inId);
+ queue.add(nodeUp);
+ }
+ }
+
+ // At this point, the following code will reset the flow
+ Status oldFlowState = flow.getStatus();
+ if (maxStartTime == -1) {
+ // Nothing has run inside the flow, so we assume the flow hasn't even
+ // started running yet.
+ flow.setStatus(Status.READY);
+ } else {
+ flow.setStatus(Status.RUNNING);
+
+ // Add any READY start nodes. Usually it means the flow started, but the
+ // start node has not.
+ for (String id : flow.getStartNodes()) {
+ ExecutableNode node = flow.getExecutableNode(id);
+ if (node.getStatus() == Status.READY
+ || node.getStatus() == Status.DISABLED) {
+ nodesToRetry.add(node);
+ }
+ }
+ }
+ flow.setUpdateTime(System.currentTimeMillis());
+ flow.setEndTime(-1);
+ logger.info("Resetting flow '" + flow.getNestedId() + "' from "
+ + oldFlowState + " to " + flow.getStatus());
+ }
+
+ private void interrupt() {
+ flowRunnerThread.interrupt();
+ }
+
+ private class JobRunnerEventListener implements EventListener {
+ public JobRunnerEventListener() {
+ }
+
+ @Override
+ public synchronized void handleEvent(Event event) {
+ JobRunner runner = (JobRunner) event.getRunner();
+
+ if (event.getType() == Type.JOB_STATUS_CHANGED) {
+ updateFlow();
+ } else if (event.getType() == Type.JOB_FINISHED) {
+ ExecutableNode node = runner.getNode();
+ long seconds = (node.getEndTime() - node.getStartTime()) / 1000;
+ synchronized (mainSyncObj) {
+ logger.info("Job " + node.getNestedId() + " finished with status "
+ + node.getStatus() + " in " + seconds + " seconds");
+
+ // Cancellation is handled in the main thread, but if the flow is
+ // paused, the main thread is paused too.
+ // This unpauses the flow for cancellation.
+ if (flowPaused && node.getStatus() == Status.FAILED
+ && failureAction == FailureAction.CANCEL_ALL) {
+ flowPaused = false;
+ }
+
+ finishedNodes.add(node);
+ node.getParentFlow().setUpdateTime(System.currentTimeMillis());
+ interrupt();
+ fireEventListeners(event);
+ }
+ }
+ }
+ }
+
+ public boolean isKilled() {
+ return flowKilled;
+ }
+
+ public ExecutableFlow getExecutableFlow() {
+ return flow;
+ }
+
+ public File getFlowLogFile() {
+ return logFile;
+ }
+
+ public File getJobLogFile(String jobId, int attempt) {
+ ExecutableNode node = flow.getExecutableNodePath(jobId);
+ File path = new File(execDir, node.getJobSource());
+
+ String logFileName = JobRunner.createLogFileName(node, attempt);
+ File logFile = new File(path.getParentFile(), logFileName);
+
+ if (!logFile.exists()) {
+ return null;
+ }
+
+ return logFile;
+ }
+
+ public File getJobAttachmentFile(String jobId, int attempt) {
+ ExecutableNode node = flow.getExecutableNodePath(jobId);
+ File path = new File(execDir, node.getJobSource());
+
+ String attachmentFileName =
+ JobRunner.createAttachmentFileName(node, attempt);
+ File attachmentFile = new File(path.getParentFile(), attachmentFileName);
+ if (!attachmentFile.exists()) {
+ return null;
+ }
+ return attachmentFile;
+ }
+
+ public File getJobMetaDataFile(String jobId, int attempt) {
+ ExecutableNode node = flow.getExecutableNodePath(jobId);
+ File path = new File(execDir, node.getJobSource());
+
+ String metaDataFileName = JobRunner.createMetaDataFileName(node, attempt);
+ File metaDataFile = new File(path.getParentFile(), metaDataFileName);
+
+ if (!metaDataFile.exists()) {
+ return null;
+ }
+
+ return metaDataFile;
+ }
+
+ public boolean isRunnerThreadAlive() {
+ if (flowRunnerThread != null) {
+ return flowRunnerThread.isAlive();
+ }
+ return false;
+ }
+
+ public boolean isThreadPoolShutdown() {
+ return executorService.isShutdown();
+ }
+
+ public int getNumRunningJobs() {
+ return activeJobRunners.size();
+ }
}
src/main/java/azkaban/execapp/FlowRunnerManager.java 1344(+702 -642)
diff --git a/src/main/java/azkaban/execapp/FlowRunnerManager.java b/src/main/java/azkaban/execapp/FlowRunnerManager.java
index 7b75756..67c02eb 100644
--- a/src/main/java/azkaban/execapp/FlowRunnerManager.java
+++ b/src/main/java/azkaban/execapp/FlowRunnerManager.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -61,643 +61,703 @@ import azkaban.utils.Props;
*
*/
public class FlowRunnerManager implements EventListener {
- private static Logger logger = Logger.getLogger(FlowRunnerManager.class);
- private File executionDirectory;
- private File projectDirectory;
-
- private static final long RECENTLY_FINISHED_TIME_TO_LIVE = 60*1000; // recently finished secs to clean up. 1 minute
-
- private static final int DEFAULT_NUM_EXECUTING_FLOWS = 30;
- private Map<Pair<Integer,Integer>, ProjectVersion> installedProjects = new ConcurrentHashMap<Pair<Integer,Integer>, ProjectVersion>();
- private Map<Integer, FlowRunner> runningFlows = new ConcurrentHashMap<Integer, FlowRunner>();
- private Map<Integer, ExecutableFlow> recentlyFinishedFlows = new ConcurrentHashMap<Integer, ExecutableFlow>();
- private LinkedBlockingQueue<FlowRunner> flowQueue = new LinkedBlockingQueue<FlowRunner>();
- private int numThreads = DEFAULT_NUM_EXECUTING_FLOWS;
-
- private ExecutorService executorService;
- private SubmitterThread submitterThread;
- private CleanerThread cleanerThread;
- private int numJobThreadPerFlow = 10;
-
- private ExecutorLoader executorLoader;
- private ProjectLoader projectLoader;
-
- private JobTypeManager jobtypeManager;
-
- private Props globalProps = null;
-
- private final Props azkabanProps;
-
- private long lastSubmitterThreadCheckTime = -1;
- private long lastCleanerThreadCheckTime = -1;
- private long executionDirRetention = 1*24*60*60*1000;
-
- // We want to limit the log sizes to about 20 megs
- private String jobLogChunkSize = "5MB";
- private int jobLogNumFiles = 4;
-
- // If true, jobs will validate proxy user against a list of valid proxy users.
- private boolean validateProxyUser = false;
-
- private Object executionDirDeletionSync = new Object();
-
- public FlowRunnerManager(Props props, ExecutorLoader executorLoader, ProjectLoader projectLoader, ClassLoader parentClassLoader) throws IOException {
- executionDirectory = new File(props.getString("azkaban.execution.dir", "executions"));
- projectDirectory = new File(props.getString("azkaban.project.dir", "projects"));
-
- azkabanProps = props;
-
- //JobWrappingFactory.init(props, getClass().getClassLoader());
- executionDirRetention = props.getLong("execution.dir.retention", executionDirRetention);
- logger.info("Execution dir retention set to " + executionDirRetention + " ms");
-
- if (!executionDirectory.exists()) {
- executionDirectory.mkdirs();
- }
- if (!projectDirectory.exists()) {
- projectDirectory.mkdirs();
- }
-
- installedProjects = loadExistingProjects();
-
- //azkaban.temp.dir
- numThreads = props.getInt("executor.flow.threads", DEFAULT_NUM_EXECUTING_FLOWS);
- numJobThreadPerFlow = props.getInt("flow.num.job.threads", numJobThreadPerFlow);
- executorService = Executors.newFixedThreadPool(numThreads);
-
- this.executorLoader = executorLoader;
- this.projectLoader = projectLoader;
-
- this.jobLogChunkSize = azkabanProps.getString("job.log.chunk.size", "5MB");
- this.jobLogNumFiles = azkabanProps.getInt("job.log.backup.index", 4);
-
- this.validateProxyUser = azkabanProps.getBoolean("proxy.user.lock.down", false);
-
- submitterThread = new SubmitterThread(flowQueue);
- submitterThread.start();
-
- cleanerThread = new CleanerThread();
- cleanerThread.start();
-
- String globalPropsPath = props.getString("executor.global.properties", null);
- if (globalPropsPath != null) {
- globalProps = new Props(null, globalPropsPath);
- }
-
- jobtypeManager = new JobTypeManager(
- props.getString(AzkabanExecutorServer.JOBTYPE_PLUGIN_DIR, JobTypeManager.DEFAULT_JOBTYPEPLUGINDIR),
- globalProps,
- parentClassLoader);
- }
-
- private Map<Pair<Integer, Integer>, ProjectVersion> loadExistingProjects() {
- Map<Pair<Integer, Integer>, ProjectVersion> allProjects = new HashMap<Pair<Integer,Integer>, ProjectVersion>();
- for(File project : projectDirectory.listFiles(new FilenameFilter() {
-
- String pattern = "[0-9]+\\.[0-9]+";
- @Override
- public boolean accept(File dir, String name) {
- return name.matches(pattern);
- }
- })) {
- if(project.isDirectory()) {
- try {
- String fileName = new File(project.getAbsolutePath()).getName();
- int projectId = Integer.parseInt(fileName.split("\\.")[0]);
- int versionNum = Integer.parseInt(fileName.split("\\.")[1]);
- ProjectVersion version = new ProjectVersion(projectId, versionNum, project);
- allProjects.put(new Pair<Integer, Integer>(projectId, versionNum), version);
- }
- catch (Exception e) {
- e.printStackTrace();
- }
- }
- }
- return allProjects;
- }
-
- public Props getGlobalProps() {
- return globalProps;
- }
-
- public void setGlobalProps(Props globalProps) {
- this.globalProps = globalProps;
- }
-
- private class SubmitterThread extends Thread {
- private BlockingQueue<FlowRunner> queue;
- private boolean shutdown = false;
-
- public SubmitterThread(BlockingQueue<FlowRunner> queue) {
- this.setName("FlowRunnerManager-Submitter-Thread");
- this.queue = queue;
- }
-
- @SuppressWarnings("unused")
- public void shutdown() {
- shutdown = true;
- this.interrupt();
- }
-
- public void run() {
- while (!shutdown) {
- try {
- lastSubmitterThreadCheckTime = System.currentTimeMillis();
- FlowRunner flowRunner = queue.take();
- executorService.submit(flowRunner);
- } catch (InterruptedException e) {
- logger.info("Interrupted. Probably to shut down.");
- }
- }
- }
- }
-
- private class CleanerThread extends Thread {
- // Every hour, clean execution dir.
- private static final long EXECUTION_DIR_CLEAN_INTERVAL_MS = 60*60*1000;
- // Every 5 mins clean the old project dir
- private static final long OLD_PROJECT_DIR_INTERVAL_MS = 5*60*1000;
- // Every 2 mins clean the recently finished list
- private static final long RECENTLY_FINISHED_INTERVAL_MS = 2*60*1000;
-
- private boolean shutdown = false;
- private long lastExecutionDirCleanTime = -1;
- private long lastOldProjectCleanTime = -1;
- private long lastRecentlyFinishedCleanTime = -1;
-
- public CleanerThread() {
- this.setName("FlowRunnerManager-Cleaner-Thread");
- }
-
- @SuppressWarnings("unused")
- public void shutdown() {
- shutdown = true;
- this.interrupt();
- }
-
- public void run() {
- while (!shutdown) {
- synchronized (this) {
- try {
- lastCleanerThreadCheckTime = System.currentTimeMillis();
-
- // Cleanup old stuff.
- long currentTime = System.currentTimeMillis();
- if (currentTime - RECENTLY_FINISHED_INTERVAL_MS > lastRecentlyFinishedCleanTime) {
- logger.info("Cleaning recently finished");
- cleanRecentlyFinished();
- lastRecentlyFinishedCleanTime = currentTime;
- }
-
- if (currentTime - OLD_PROJECT_DIR_INTERVAL_MS > lastOldProjectCleanTime) {
- logger.info("Cleaning old projects");
- cleanOlderProjects();
-
- lastOldProjectCleanTime = currentTime;
- }
-
- if (currentTime - EXECUTION_DIR_CLEAN_INTERVAL_MS > lastExecutionDirCleanTime) {
- logger.info("Cleaning old execution dirs");
- cleanOlderExecutionDirs();
- lastExecutionDirCleanTime = currentTime;
- }
-
- wait(RECENTLY_FINISHED_TIME_TO_LIVE);
- } catch (InterruptedException e) {
- logger.info("Interrupted. Probably to shut down.");
- }
- }
- }
- }
-
- private void cleanOlderExecutionDirs() {
- File dir = executionDirectory;
-
- final long pastTimeThreshold = System.currentTimeMillis() - executionDirRetention;
- File[] executionDirs = dir.listFiles(new FileFilter() {
- @Override
- public boolean accept(File path) {
- if (path.isDirectory() && path.lastModified() < pastTimeThreshold) {
- return true;
- }
- return false;
- }
- });
-
- for (File exDir : executionDirs) {
- try {
- int execId = Integer.valueOf(exDir.getName());
- if (runningFlows.containsKey(execId) || recentlyFinishedFlows.containsKey(execId)) {
- continue;
- }
- }
- catch (NumberFormatException e) {
- logger.error("Can't delete exec dir " + exDir.getName() + " it is not a number");
- continue;
- }
-
- synchronized(executionDirDeletionSync) {
- try {
- FileUtils.deleteDirectory(exDir);
- } catch (IOException e) {
- logger.error("Error cleaning execution dir " + exDir.getPath(), e);
- }
- }
- }
- }
-
- private void cleanRecentlyFinished() {
- long cleanupThreshold = System.currentTimeMillis() - RECENTLY_FINISHED_TIME_TO_LIVE;
- ArrayList<Integer> executionToKill = new ArrayList<Integer>();
- for (ExecutableFlow flow : recentlyFinishedFlows.values()) {
- if (flow.getEndTime() < cleanupThreshold) {
- executionToKill.add(flow.getExecutionId());
- }
- }
-
- for (Integer id: executionToKill) {
- logger.info("Cleaning execution " + id + " from recently finished flows list.");
- recentlyFinishedFlows.remove(id);
- }
- }
-
- private void cleanOlderProjects() {
- Map<Integer, ArrayList<ProjectVersion>> projectVersions = new HashMap<Integer, ArrayList<ProjectVersion>>();
- for (ProjectVersion version : installedProjects.values() ) {
- ArrayList<ProjectVersion> versionList = projectVersions.get(version.getProjectId());
- if (versionList == null) {
- versionList = new ArrayList<ProjectVersion>();
- projectVersions.put(version.getProjectId(), versionList);
- }
- versionList.add(version);
- }
-
- HashSet<Pair<Integer,Integer>> activeProjectVersions = new HashSet<Pair<Integer,Integer>>();
- for(FlowRunner runner: runningFlows.values()) {
- ExecutableFlow flow = runner.getExecutableFlow();
- activeProjectVersions.add(new Pair<Integer,Integer>(flow.getProjectId(), flow.getVersion()));
- }
-
- for (Map.Entry<Integer, ArrayList<ProjectVersion>> entry: projectVersions.entrySet()) {
- //Integer projectId = entry.getKey();
- ArrayList<ProjectVersion> installedVersions = entry.getValue();
-
- // Keep one version of the project around.
- if (installedVersions.size() == 1) {
- continue;
- }
-
- Collections.sort(installedVersions);
- for (int i = 0; i < installedVersions.size() - 1; ++i) {
- ProjectVersion version = installedVersions.get(i);
- Pair<Integer,Integer> versionKey = new Pair<Integer,Integer>(version.getProjectId(), version.getVersion());
- if (!activeProjectVersions.contains(versionKey)) {
- try {
- logger.info("Removing old unused installed project " + version.getProjectId() + ":" + version.getVersion());
- version.deleteDirectory();
- installedProjects.remove(new Pair<Integer, Integer>(version.getProjectId(), version.getVersion()));
- } catch (IOException e) {
- e.printStackTrace();
- }
-
- installedVersions.remove(versionKey);
- }
- }
- }
- }
- }
-
-
- public void submitFlow(int execId) throws ExecutorManagerException {
- // Load file and submit
- if (runningFlows.containsKey(execId)) {
- throw new ExecutorManagerException("Execution " + execId + " is already running.");
- }
-
- ExecutableFlow flow = null;
- flow = executorLoader.fetchExecutableFlow(execId);
- if (flow == null) {
- throw new ExecutorManagerException("Error loading flow with exec " + execId);
- }
-
- // Sets up the project files and execution directory.
- setupFlow(flow);
-
- // Setup flow runner
- FlowWatcher watcher = null;
- ExecutionOptions options = flow.getExecutionOptions();
- if (options.getPipelineExecutionId() != null) {
- Integer pipelineExecId = options.getPipelineExecutionId();
- FlowRunner runner = runningFlows.get(pipelineExecId);
-
- if (runner != null) {
- watcher = new LocalFlowWatcher(runner);
- }
- else {
- watcher = new RemoteFlowWatcher(pipelineExecId, executorLoader);
- }
- }
-
- int numJobThreads = numJobThreadPerFlow;
- if(options.getFlowParameters().containsKey("flow.num.job.threads")) {
- try{
- int numJobs = Integer.valueOf(options.getFlowParameters().get("flow.num.job.threads"));
- if(numJobs > 0 && numJobs <= numJobThreads) {
- numJobThreads = numJobs;
- }
- } catch (Exception e) {
- throw new ExecutorManagerException("Failed to set the number of job threads " + options.getFlowParameters().get("flow.num.job.threads") + " for flow " + execId, e);
- }
- }
-
- FlowRunner runner = new FlowRunner(flow, executorLoader, projectLoader, jobtypeManager);
- runner.setFlowWatcher(watcher)
- .setJobLogSettings(jobLogChunkSize, jobLogNumFiles)
- .setValidateProxyUser(validateProxyUser)
- .setNumJobThreads(numJobThreads)
- .addListener(this);
-
- // Check again.
- if (runningFlows.containsKey(execId)) {
- throw new ExecutorManagerException("Execution " + execId + " is already running.");
- }
-
- // Finally, queue the sucker.
- runningFlows.put(execId, runner);
- flowQueue.add(runner);
- }
-
- private void setupFlow(ExecutableFlow flow) throws ExecutorManagerException {
- int execId = flow.getExecutionId();
- File execPath = new File(executionDirectory, String.valueOf(execId));
- flow.setExecutionPath(execPath.getPath());
- logger.info("Flow " + execId + " submitted with path " + execPath.getPath());
- execPath.mkdirs();
-
- // We're setting up the installed projects. First time, it may take a while to set up.
- Pair<Integer, Integer> projectVersionKey = new Pair<Integer,Integer>(flow.getProjectId(), flow.getVersion());
-
- // We set up project versions this way
- ProjectVersion projectVersion = null;
- synchronized(installedProjects) {
- projectVersion = installedProjects.get(projectVersionKey);
- if (projectVersion == null) {
- projectVersion = new ProjectVersion(flow.getProjectId(), flow.getVersion());
- installedProjects.put(projectVersionKey, projectVersion);
- }
- }
-
- try {
- projectVersion.setupProjectFiles(projectLoader, projectDirectory, logger);
- projectVersion.copyCreateSymlinkDirectory(execPath);
- } catch (Exception e) {
- e.printStackTrace();
- if (execPath.exists()) {
- try {
- FileUtils.deleteDirectory(execPath);
- }
- catch (IOException e1) {
- e1.printStackTrace();
- }
- }
- throw new ExecutorManagerException(e);
- }
- }
-
- public void cancelFlow(int execId, String user) throws ExecutorManagerException {
- FlowRunner runner = runningFlows.get(execId);
-
- if (runner == null) {
- throw new ExecutorManagerException("Execution " + execId + " is not running.");
- }
-
- runner.kill(user);
- }
-
- public void pauseFlow(int execId, String user) throws ExecutorManagerException {
- FlowRunner runner = runningFlows.get(execId);
-
- if (runner == null) {
- throw new ExecutorManagerException("Execution " + execId + " is not running.");
- }
-
- runner.pause(user);
- }
-
- public void resumeFlow(int execId, String user) throws ExecutorManagerException {
- FlowRunner runner = runningFlows.get(execId);
-
- if (runner == null) {
- throw new ExecutorManagerException("Execution " + execId + " is not running.");
- }
-
- runner.resume(user);
- }
-
- public void retryFailures(int execId, String user) throws ExecutorManagerException {
- FlowRunner runner = runningFlows.get(execId);
-
- if (runner == null) {
- throw new ExecutorManagerException("Execution " + execId + " is not running.");
- }
-
- runner.retryFailures(user);
- }
-
- public ExecutableFlow getExecutableFlow(int execId) {
- FlowRunner runner = runningFlows.get(execId);
- if (runner == null) {
- return recentlyFinishedFlows.get(execId);
- }
- return runner.getExecutableFlow();
- }
-
- @Override
- public void handleEvent(Event event) {
- if (event.getType() == Event.Type.FLOW_FINISHED) {
-
- FlowRunner flowRunner = (FlowRunner)event.getRunner();
- ExecutableFlow flow = flowRunner.getExecutableFlow();
-
- recentlyFinishedFlows.put(flow.getExecutionId(), flow);
- logger.info("Flow " + flow.getExecutionId() + " is finished. Adding it to recently finished flows list.");
- runningFlows.remove(flow.getExecutionId());
- }
- }
-
- public LogData readFlowLogs(int execId, int startByte, int length) throws ExecutorManagerException {
- FlowRunner runner = runningFlows.get(execId);
- if (runner == null) {
- throw new ExecutorManagerException("Running flow " + execId + " not found.");
- }
-
- File dir = runner.getExecutionDir();
- if (dir != null && dir.exists()) {
- try {
- synchronized(executionDirDeletionSync) {
- if (!dir.exists()) {
- throw new ExecutorManagerException("Execution dir file doesn't exist. Probably has beend deleted");
- }
-
- File logFile = runner.getFlowLogFile();
- if (logFile != null && logFile.exists()) {
- return FileIOUtils.readUtf8File(logFile, startByte, length);
- }
- else {
- throw new ExecutorManagerException("Flow log file doesn't exist.");
- }
- }
- } catch (IOException e) {
- throw new ExecutorManagerException(e);
- }
- }
-
- throw new ExecutorManagerException("Error reading file. Log directory doesn't exist.");
- }
-
- public LogData readJobLogs(int execId, String jobId, int attempt, int startByte, int length) throws ExecutorManagerException {
- FlowRunner runner = runningFlows.get(execId);
- if (runner == null) {
- throw new ExecutorManagerException("Running flow " + execId + " not found.");
- }
-
- File dir = runner.getExecutionDir();
- if (dir != null && dir.exists()) {
- try {
- synchronized (executionDirDeletionSync) {
- if (!dir.exists()) {
- throw new ExecutorManagerException("Execution dir file doesn't exist. Probably has beend deleted");
- }
- File logFile = runner.getJobLogFile(jobId, attempt);
- if (logFile != null && logFile.exists()) {
- return FileIOUtils.readUtf8File(logFile, startByte, length);
- }
- else {
- throw new ExecutorManagerException("Job log file doesn't exist.");
- }
- }
- } catch (IOException e) {
- throw new ExecutorManagerException(e);
- }
- }
-
- throw new ExecutorManagerException("Error reading file. Log directory doesn't exist.");
- }
-
- public List<Object> readJobAttachments(int execId, String jobId, int attempt)
- throws ExecutorManagerException {
- FlowRunner runner = runningFlows.get(execId);
- if (runner == null) {
- throw new ExecutorManagerException(
- "Running flow " + execId + " not found.");
- }
-
- File dir = runner.getExecutionDir();
- if (dir == null || !dir.exists()) {
- throw new ExecutorManagerException(
- "Error reading file. Log directory doesn't exist.");
- }
-
- try {
- synchronized (executionDirDeletionSync) {
- if (!dir.exists()) {
- throw new ExecutorManagerException(
- "Execution dir file doesn't exist. Probably has beend deleted");
- }
-
- File attachmentFile = runner.getJobAttachmentFile(jobId, attempt);
- if (attachmentFile == null || !attachmentFile.exists()) {
- return null;
- }
-
- @SuppressWarnings("unchecked")
- List<Object> jobAttachments = (ArrayList<Object>) JSONUtils.parseJSONFromFile(attachmentFile);
-
- return jobAttachments;
- }
- }
- catch (IOException e) {
- throw new ExecutorManagerException(e);
- }
- }
-
- public JobMetaData readJobMetaData(int execId, String jobId, int attempt, int startByte, int length) throws ExecutorManagerException {
- FlowRunner runner = runningFlows.get(execId);
- if (runner == null) {
- throw new ExecutorManagerException("Running flow " + execId + " not found.");
- }
-
- File dir = runner.getExecutionDir();
- if (dir != null && dir.exists()) {
- try {
- synchronized(executionDirDeletionSync) {
- if (!dir.exists()) {
- throw new ExecutorManagerException("Execution dir file doesn't exist. Probably has beend deleted");
- }
- File metaDataFile = runner.getJobMetaDataFile(jobId, attempt);
- if (metaDataFile != null && metaDataFile.exists()) {
- return FileIOUtils.readUtf8MetaDataFile(metaDataFile, startByte, length);
- }
- else {
- throw new ExecutorManagerException("Job log file doesn't exist.");
- }
- }
- } catch (IOException e) {
- throw new ExecutorManagerException(e);
- }
- }
-
- throw new ExecutorManagerException("Error reading file. Log directory doesn't exist.");
- }
-
- public long getLastCleanerThreadCheckTime() {
- return lastCleanerThreadCheckTime;
- }
-
- public long getLastSubmitterThreadCheckTime() {
- return lastSubmitterThreadCheckTime;
- }
-
- public boolean isSubmitterThreadActive() {
- return this.submitterThread.isAlive();
- }
-
- public boolean isCleanerThreadActive() {
- return this.cleanerThread.isAlive();
- }
-
- public State getSubmitterThreadState() {
- return this.submitterThread.getState();
- }
-
- public State getCleanerThreadState() {
- return this.cleanerThread.getState();
- }
-
- public boolean isExecutorThreadPoolShutdown() {
- return executorService.isShutdown();
- }
-
- public int getNumExecutingFlows() {
- return runningFlows.size();
- }
-
- public String getRunningFlowIds() {
- ArrayList<Integer> ids = new ArrayList<Integer>(runningFlows.keySet());
- Collections.sort(ids);
- return ids.toString();
- }
-
- public int getNumExecutingJobs() {
- int jobCount = 0;
- for (FlowRunner runner: runningFlows.values()) {
- jobCount += runner.getNumRunningJobs();
- }
-
- return jobCount;
- }
-
- public void reloadJobTypePlugins() throws JobTypeManagerException {
- jobtypeManager.loadPlugins();
- }
+ private static Logger logger = Logger.getLogger(FlowRunnerManager.class);
+ private File executionDirectory;
+ private File projectDirectory;
+
+ // recently finished secs to clean up. 1 minute
+ private static final long RECENTLY_FINISHED_TIME_TO_LIVE = 60 * 1000;
+
+ private static final int DEFAULT_NUM_EXECUTING_FLOWS = 30;
+ private Map<Pair<Integer, Integer>, ProjectVersion> installedProjects =
+ new ConcurrentHashMap<Pair<Integer, Integer>, ProjectVersion>();
+ private Map<Integer, FlowRunner> runningFlows =
+ new ConcurrentHashMap<Integer, FlowRunner>();
+ private Map<Integer, ExecutableFlow> recentlyFinishedFlows =
+ new ConcurrentHashMap<Integer, ExecutableFlow>();
+ private LinkedBlockingQueue<FlowRunner> flowQueue =
+ new LinkedBlockingQueue<FlowRunner>();
+ private int numThreads = DEFAULT_NUM_EXECUTING_FLOWS;
+
+ private ExecutorService executorService;
+ private SubmitterThread submitterThread;
+ private CleanerThread cleanerThread;
+ private int numJobThreadPerFlow = 10;
+
+ private ExecutorLoader executorLoader;
+ private ProjectLoader projectLoader;
+
+ private JobTypeManager jobtypeManager;
+
+ private Props globalProps = null;
+
+ private final Props azkabanProps;
+
+ private long lastSubmitterThreadCheckTime = -1;
+ private long lastCleanerThreadCheckTime = -1;
+ private long executionDirRetention = 1 * 24 * 60 * 60 * 1000;
+
+ // We want to limit the log sizes to about 20 megs
+ private String jobLogChunkSize = "5MB";
+ private int jobLogNumFiles = 4;
+
+ // If true, jobs will validate proxy user against a list of valid proxy users.
+ private boolean validateProxyUser = false;
+
+ private Object executionDirDeletionSync = new Object();
+
+ public FlowRunnerManager(Props props, ExecutorLoader executorLoader,
+ ProjectLoader projectLoader, ClassLoader parentClassLoader)
+ throws IOException {
+ executionDirectory =
+ new File(props.getString("azkaban.execution.dir", "executions"));
+ projectDirectory =
+ new File(props.getString("azkaban.project.dir", "projects"));
+
+ azkabanProps = props;
+
+ // JobWrappingFactory.init(props, getClass().getClassLoader());
+ executionDirRetention =
+ props.getLong("execution.dir.retention", executionDirRetention);
+ logger.info("Execution dir retention set to " + executionDirRetention
+ + " ms");
+
+ if (!executionDirectory.exists()) {
+ executionDirectory.mkdirs();
+ }
+ if (!projectDirectory.exists()) {
+ projectDirectory.mkdirs();
+ }
+
+ installedProjects = loadExistingProjects();
+
+ // azkaban.temp.dir
+ numThreads =
+ props.getInt("executor.flow.threads", DEFAULT_NUM_EXECUTING_FLOWS);
+ numJobThreadPerFlow =
+ props.getInt("flow.num.job.threads", numJobThreadPerFlow);
+ executorService = Executors.newFixedThreadPool(numThreads);
+
+ this.executorLoader = executorLoader;
+ this.projectLoader = projectLoader;
+
+ this.jobLogChunkSize = azkabanProps.getString("job.log.chunk.size", "5MB");
+ this.jobLogNumFiles = azkabanProps.getInt("job.log.backup.index", 4);
+
+ this.validateProxyUser =
+ azkabanProps.getBoolean("proxy.user.lock.down", false);
+
+ submitterThread = new SubmitterThread(flowQueue);
+ submitterThread.start();
+
+ cleanerThread = new CleanerThread();
+ cleanerThread.start();
+
+ String globalPropsPath =
+ props.getString("executor.global.properties", null);
+ if (globalPropsPath != null) {
+ globalProps = new Props(null, globalPropsPath);
+ }
+
+ jobtypeManager =
+ new JobTypeManager(props.getString(
+ AzkabanExecutorServer.JOBTYPE_PLUGIN_DIR,
+ JobTypeManager.DEFAULT_JOBTYPEPLUGINDIR), globalProps,
+ parentClassLoader);
+ }
+
+ private Map<Pair<Integer, Integer>, ProjectVersion> loadExistingProjects() {
+ Map<Pair<Integer, Integer>, ProjectVersion> allProjects =
+ new HashMap<Pair<Integer, Integer>, ProjectVersion>();
+ for (File project : projectDirectory.listFiles(new FilenameFilter() {
+
+ String pattern = "[0-9]+\\.[0-9]+";
+
+ @Override
+ public boolean accept(File dir, String name) {
+ return name.matches(pattern);
+ }
+ })) {
+ if (project.isDirectory()) {
+ try {
+ String fileName = new File(project.getAbsolutePath()).getName();
+ int projectId = Integer.parseInt(fileName.split("\\.")[0]);
+ int versionNum = Integer.parseInt(fileName.split("\\.")[1]);
+ ProjectVersion version =
+ new ProjectVersion(projectId, versionNum, project);
+ allProjects.put(new Pair<Integer, Integer>(projectId, versionNum),
+ version);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ return allProjects;
+ }
+
+ public Props getGlobalProps() {
+ return globalProps;
+ }
+
+ public void setGlobalProps(Props globalProps) {
+ this.globalProps = globalProps;
+ }
+
+ private class SubmitterThread extends Thread {
+ private BlockingQueue<FlowRunner> queue;
+ private boolean shutdown = false;
+
+ public SubmitterThread(BlockingQueue<FlowRunner> queue) {
+ this.setName("FlowRunnerManager-Submitter-Thread");
+ this.queue = queue;
+ }
+
+ @SuppressWarnings("unused")
+ public void shutdown() {
+ shutdown = true;
+ this.interrupt();
+ }
+
+ public void run() {
+ while (!shutdown) {
+ try {
+ lastSubmitterThreadCheckTime = System.currentTimeMillis();
+ FlowRunner flowRunner = queue.take();
+ executorService.submit(flowRunner);
+ } catch (InterruptedException e) {
+ logger.info("Interrupted. Probably to shut down.");
+ }
+ }
+ }
+ }
+
+ private class CleanerThread extends Thread {
+ // Every hour, clean execution dir.
+ private static final long EXECUTION_DIR_CLEAN_INTERVAL_MS = 60 * 60 * 1000;
+ // Every 5 mins clean the old project dir
+ private static final long OLD_PROJECT_DIR_INTERVAL_MS = 5 * 60 * 1000;
+ // Every 2 mins clean the recently finished list
+ private static final long RECENTLY_FINISHED_INTERVAL_MS = 2 * 60 * 1000;
+
+ private boolean shutdown = false;
+ private long lastExecutionDirCleanTime = -1;
+ private long lastOldProjectCleanTime = -1;
+ private long lastRecentlyFinishedCleanTime = -1;
+
+ public CleanerThread() {
+ this.setName("FlowRunnerManager-Cleaner-Thread");
+ }
+
+ @SuppressWarnings("unused")
+ public void shutdown() {
+ shutdown = true;
+ this.interrupt();
+ }
+
+ public void run() {
+ while (!shutdown) {
+ synchronized (this) {
+ try {
+ lastCleanerThreadCheckTime = System.currentTimeMillis();
+
+ // Cleanup old stuff.
+ long currentTime = System.currentTimeMillis();
+ if (currentTime - RECENTLY_FINISHED_INTERVAL_MS > lastRecentlyFinishedCleanTime) {
+ logger.info("Cleaning recently finished");
+ cleanRecentlyFinished();
+ lastRecentlyFinishedCleanTime = currentTime;
+ }
+
+ if (currentTime - OLD_PROJECT_DIR_INTERVAL_MS > lastOldProjectCleanTime) {
+ logger.info("Cleaning old projects");
+ cleanOlderProjects();
+
+ lastOldProjectCleanTime = currentTime;
+ }
+
+ if (currentTime - EXECUTION_DIR_CLEAN_INTERVAL_MS > lastExecutionDirCleanTime) {
+ logger.info("Cleaning old execution dirs");
+ cleanOlderExecutionDirs();
+ lastExecutionDirCleanTime = currentTime;
+ }
+
+ wait(RECENTLY_FINISHED_TIME_TO_LIVE);
+ } catch (InterruptedException e) {
+ logger.info("Interrupted. Probably to shut down.");
+ }
+ }
+ }
+ }
+
+ private void cleanOlderExecutionDirs() {
+ File dir = executionDirectory;
+
+ final long pastTimeThreshold =
+ System.currentTimeMillis() - executionDirRetention;
+ File[] executionDirs = dir.listFiles(new FileFilter() {
+ @Override
+ public boolean accept(File path) {
+ if (path.isDirectory() && path.lastModified() < pastTimeThreshold) {
+ return true;
+ }
+ return false;
+ }
+ });
+
+ for (File exDir : executionDirs) {
+ try {
+ int execId = Integer.valueOf(exDir.getName());
+ if (runningFlows.containsKey(execId)
+ || recentlyFinishedFlows.containsKey(execId)) {
+ continue;
+ }
+ } catch (NumberFormatException e) {
+ logger.error("Can't delete exec dir " + exDir.getName()
+ + " it is not a number");
+ continue;
+ }
+
+ synchronized (executionDirDeletionSync) {
+ try {
+ FileUtils.deleteDirectory(exDir);
+ } catch (IOException e) {
+ logger.error("Error cleaning execution dir " + exDir.getPath(), e);
+ }
+ }
+ }
+ }
+
+ private void cleanRecentlyFinished() {
+ long cleanupThreshold =
+ System.currentTimeMillis() - RECENTLY_FINISHED_TIME_TO_LIVE;
+ ArrayList<Integer> executionToKill = new ArrayList<Integer>();
+ for (ExecutableFlow flow : recentlyFinishedFlows.values()) {
+ if (flow.getEndTime() < cleanupThreshold) {
+ executionToKill.add(flow.getExecutionId());
+ }
+ }
+
+ for (Integer id : executionToKill) {
+ logger.info("Cleaning execution " + id
+ + " from recently finished flows list.");
+ recentlyFinishedFlows.remove(id);
+ }
+ }
+
+ private void cleanOlderProjects() {
+ Map<Integer, ArrayList<ProjectVersion>> projectVersions =
+ new HashMap<Integer, ArrayList<ProjectVersion>>();
+ for (ProjectVersion version : installedProjects.values()) {
+ ArrayList<ProjectVersion> versionList =
+ projectVersions.get(version.getProjectId());
+ if (versionList == null) {
+ versionList = new ArrayList<ProjectVersion>();
+ projectVersions.put(version.getProjectId(), versionList);
+ }
+ versionList.add(version);
+ }
+
+ HashSet<Pair<Integer, Integer>> activeProjectVersions =
+ new HashSet<Pair<Integer, Integer>>();
+ for (FlowRunner runner : runningFlows.values()) {
+ ExecutableFlow flow = runner.getExecutableFlow();
+ activeProjectVersions.add(new Pair<Integer, Integer>(flow
+ .getProjectId(), flow.getVersion()));
+ }
+
+ for (Map.Entry<Integer, ArrayList<ProjectVersion>> entry : projectVersions
+ .entrySet()) {
+ // Integer projectId = entry.getKey();
+ ArrayList<ProjectVersion> installedVersions = entry.getValue();
+
+ // Keep one version of the project around.
+ if (installedVersions.size() == 1) {
+ continue;
+ }
+
+ Collections.sort(installedVersions);
+ for (int i = 0; i < installedVersions.size() - 1; ++i) {
+ ProjectVersion version = installedVersions.get(i);
+ Pair<Integer, Integer> versionKey =
+ new Pair<Integer, Integer>(version.getProjectId(),
+ version.getVersion());
+ if (!activeProjectVersions.contains(versionKey)) {
+ try {
+ logger.info("Removing old unused installed project "
+ + version.getProjectId() + ":" + version.getVersion());
+ version.deleteDirectory();
+ installedProjects.remove(new Pair<Integer, Integer>(version
+ .getProjectId(), version.getVersion()));
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+
+ installedVersions.remove(versionKey);
+ }
+ }
+ }
+ }
+ }
+
+ public void submitFlow(int execId) throws ExecutorManagerException {
+ // Load file and submit
+ if (runningFlows.containsKey(execId)) {
+ throw new ExecutorManagerException("Execution " + execId
+ + " is already running.");
+ }
+
+ ExecutableFlow flow = null;
+ flow = executorLoader.fetchExecutableFlow(execId);
+ if (flow == null) {
+ throw new ExecutorManagerException("Error loading flow with exec "
+ + execId);
+ }
+
+ // Sets up the project files and execution directory.
+ setupFlow(flow);
+
+ // Setup flow runner
+ FlowWatcher watcher = null;
+ ExecutionOptions options = flow.getExecutionOptions();
+ if (options.getPipelineExecutionId() != null) {
+ Integer pipelineExecId = options.getPipelineExecutionId();
+ FlowRunner runner = runningFlows.get(pipelineExecId);
+
+ if (runner != null) {
+ watcher = new LocalFlowWatcher(runner);
+ } else {
+ watcher = new RemoteFlowWatcher(pipelineExecId, executorLoader);
+ }
+ }
+
+ int numJobThreads = numJobThreadPerFlow;
+ if (options.getFlowParameters().containsKey("flow.num.job.threads")) {
+ try {
+ int numJobs =
+ Integer.valueOf(options.getFlowParameters().get(
+ "flow.num.job.threads"));
+ if (numJobs > 0 && numJobs <= numJobThreads) {
+ numJobThreads = numJobs;
+ }
+ } catch (Exception e) {
+ throw new ExecutorManagerException(
+ "Failed to set the number of job threads "
+ + options.getFlowParameters().get("flow.num.job.threads")
+ + " for flow " + execId, e);
+ }
+ }
+
+ FlowRunner runner =
+ new FlowRunner(flow, executorLoader, projectLoader, jobtypeManager);
+ runner.setFlowWatcher(watcher)
+ .setJobLogSettings(jobLogChunkSize, jobLogNumFiles)
+ .setValidateProxyUser(validateProxyUser)
+ .setNumJobThreads(numJobThreads).addListener(this);
+
+ // Check again.
+ if (runningFlows.containsKey(execId)) {
+ throw new ExecutorManagerException("Execution " + execId
+ + " is already running.");
+ }
+
+ // Finally, queue the sucker.
+ runningFlows.put(execId, runner);
+ flowQueue.add(runner);
+ }
+
+ private void setupFlow(ExecutableFlow flow) throws ExecutorManagerException {
+ int execId = flow.getExecutionId();
+ File execPath = new File(executionDirectory, String.valueOf(execId));
+ flow.setExecutionPath(execPath.getPath());
+ logger
+ .info("Flow " + execId + " submitted with path " + execPath.getPath());
+ execPath.mkdirs();
+
+ // We're setting up the installed projects. First time, it may take a while
+ // to set up.
+ Pair<Integer, Integer> projectVersionKey =
+ new Pair<Integer, Integer>(flow.getProjectId(), flow.getVersion());
+
+ // We set up project versions this way
+ ProjectVersion projectVersion = null;
+ synchronized (installedProjects) {
+ projectVersion = installedProjects.get(projectVersionKey);
+ if (projectVersion == null) {
+ projectVersion =
+ new ProjectVersion(flow.getProjectId(), flow.getVersion());
+ installedProjects.put(projectVersionKey, projectVersion);
+ }
+ }
+
+ try {
+ projectVersion.setupProjectFiles(projectLoader, projectDirectory, logger);
+ projectVersion.copyCreateSymlinkDirectory(execPath);
+ } catch (Exception e) {
+ e.printStackTrace();
+ if (execPath.exists()) {
+ try {
+ FileUtils.deleteDirectory(execPath);
+ } catch (IOException e1) {
+ e1.printStackTrace();
+ }
+ }
+ throw new ExecutorManagerException(e);
+ }
+ }
+
+ public void cancelFlow(int execId, String user)
+ throws ExecutorManagerException {
+ FlowRunner runner = runningFlows.get(execId);
+
+ if (runner == null) {
+ throw new ExecutorManagerException("Execution " + execId
+ + " is not running.");
+ }
+
+ runner.kill(user);
+ }
+
+ public void pauseFlow(int execId, String user)
+ throws ExecutorManagerException {
+ FlowRunner runner = runningFlows.get(execId);
+
+ if (runner == null) {
+ throw new ExecutorManagerException("Execution " + execId
+ + " is not running.");
+ }
+
+ runner.pause(user);
+ }
+
+ public void resumeFlow(int execId, String user)
+ throws ExecutorManagerException {
+ FlowRunner runner = runningFlows.get(execId);
+
+ if (runner == null) {
+ throw new ExecutorManagerException("Execution " + execId
+ + " is not running.");
+ }
+
+ runner.resume(user);
+ }
+
+ public void retryFailures(int execId, String user)
+ throws ExecutorManagerException {
+ FlowRunner runner = runningFlows.get(execId);
+
+ if (runner == null) {
+ throw new ExecutorManagerException("Execution " + execId
+ + " is not running.");
+ }
+
+ runner.retryFailures(user);
+ }
+
+ public ExecutableFlow getExecutableFlow(int execId) {
+ FlowRunner runner = runningFlows.get(execId);
+ if (runner == null) {
+ return recentlyFinishedFlows.get(execId);
+ }
+ return runner.getExecutableFlow();
+ }
+
+ @Override
+ public void handleEvent(Event event) {
+ if (event.getType() == Event.Type.FLOW_FINISHED) {
+
+ FlowRunner flowRunner = (FlowRunner) event.getRunner();
+ ExecutableFlow flow = flowRunner.getExecutableFlow();
+
+ recentlyFinishedFlows.put(flow.getExecutionId(), flow);
+ logger.info("Flow " + flow.getExecutionId()
+ + " is finished. Adding it to recently finished flows list.");
+ runningFlows.remove(flow.getExecutionId());
+ }
+ }
+
+ public LogData readFlowLogs(int execId, int startByte, int length)
+ throws ExecutorManagerException {
+ FlowRunner runner = runningFlows.get(execId);
+ if (runner == null) {
+ throw new ExecutorManagerException("Running flow " + execId
+ + " not found.");
+ }
+
+ File dir = runner.getExecutionDir();
+ if (dir != null && dir.exists()) {
+ try {
+ synchronized (executionDirDeletionSync) {
+ if (!dir.exists()) {
+ throw new ExecutorManagerException(
+ "Execution dir file doesn't exist. Probably has beend deleted");
+ }
+
+ File logFile = runner.getFlowLogFile();
+ if (logFile != null && logFile.exists()) {
+ return FileIOUtils.readUtf8File(logFile, startByte, length);
+ } else {
+ throw new ExecutorManagerException("Flow log file doesn't exist.");
+ }
+ }
+ } catch (IOException e) {
+ throw new ExecutorManagerException(e);
+ }
+ }
+
+ throw new ExecutorManagerException(
+ "Error reading file. Log directory doesn't exist.");
+ }
+
+ public LogData readJobLogs(int execId, String jobId, int attempt,
+ int startByte, int length) throws ExecutorManagerException {
+ FlowRunner runner = runningFlows.get(execId);
+ if (runner == null) {
+ throw new ExecutorManagerException("Running flow " + execId
+ + " not found.");
+ }
+
+ File dir = runner.getExecutionDir();
+ if (dir != null && dir.exists()) {
+ try {
+ synchronized (executionDirDeletionSync) {
+ if (!dir.exists()) {
+ throw new ExecutorManagerException(
+ "Execution dir file doesn't exist. Probably has beend deleted");
+ }
+ File logFile = runner.getJobLogFile(jobId, attempt);
+ if (logFile != null && logFile.exists()) {
+ return FileIOUtils.readUtf8File(logFile, startByte, length);
+ } else {
+ throw new ExecutorManagerException("Job log file doesn't exist.");
+ }
+ }
+ } catch (IOException e) {
+ throw new ExecutorManagerException(e);
+ }
+ }
+
+ throw new ExecutorManagerException(
+ "Error reading file. Log directory doesn't exist.");
+ }
+
+ public List<Object> readJobAttachments(int execId, String jobId, int attempt)
+ throws ExecutorManagerException {
+ FlowRunner runner = runningFlows.get(execId);
+ if (runner == null) {
+ throw new ExecutorManagerException("Running flow " + execId
+ + " not found.");
+ }
+
+ File dir = runner.getExecutionDir();
+ if (dir == null || !dir.exists()) {
+ throw new ExecutorManagerException(
+ "Error reading file. Log directory doesn't exist.");
+ }
+
+ try {
+ synchronized (executionDirDeletionSync) {
+ if (!dir.exists()) {
+ throw new ExecutorManagerException(
+ "Execution dir file doesn't exist. Probably has beend deleted");
+ }
+
+ File attachmentFile = runner.getJobAttachmentFile(jobId, attempt);
+ if (attachmentFile == null || !attachmentFile.exists()) {
+ return null;
+ }
+
+ @SuppressWarnings("unchecked")
+ List<Object> jobAttachments =
+ (ArrayList<Object>) JSONUtils.parseJSONFromFile(attachmentFile);
+
+ return jobAttachments;
+ }
+ } catch (IOException e) {
+ throw new ExecutorManagerException(e);
+ }
+ }
+
+ public JobMetaData readJobMetaData(int execId, String jobId, int attempt,
+ int startByte, int length) throws ExecutorManagerException {
+ FlowRunner runner = runningFlows.get(execId);
+ if (runner == null) {
+ throw new ExecutorManagerException("Running flow " + execId
+ + " not found.");
+ }
+
+ File dir = runner.getExecutionDir();
+ if (dir != null && dir.exists()) {
+ try {
+ synchronized (executionDirDeletionSync) {
+ if (!dir.exists()) {
+ throw new ExecutorManagerException(
+ "Execution dir file doesn't exist. Probably has beend deleted");
+ }
+ File metaDataFile = runner.getJobMetaDataFile(jobId, attempt);
+ if (metaDataFile != null && metaDataFile.exists()) {
+ return FileIOUtils.readUtf8MetaDataFile(metaDataFile, startByte,
+ length);
+ } else {
+ throw new ExecutorManagerException("Job log file doesn't exist.");
+ }
+ }
+ } catch (IOException e) {
+ throw new ExecutorManagerException(e);
+ }
+ }
+
+ throw new ExecutorManagerException(
+ "Error reading file. Log directory doesn't exist.");
+ }
+
+ public long getLastCleanerThreadCheckTime() {
+ return lastCleanerThreadCheckTime;
+ }
+
+ public long getLastSubmitterThreadCheckTime() {
+ return lastSubmitterThreadCheckTime;
+ }
+
+ public boolean isSubmitterThreadActive() {
+ return this.submitterThread.isAlive();
+ }
+
+ public boolean isCleanerThreadActive() {
+ return this.cleanerThread.isAlive();
+ }
+
+ public State getSubmitterThreadState() {
+ return this.submitterThread.getState();
+ }
+
+ public State getCleanerThreadState() {
+ return this.cleanerThread.getState();
+ }
+
+ public boolean isExecutorThreadPoolShutdown() {
+ return executorService.isShutdown();
+ }
+
+ public int getNumExecutingFlows() {
+ return runningFlows.size();
+ }
+
+ public String getRunningFlowIds() {
+ ArrayList<Integer> ids = new ArrayList<Integer>(runningFlows.keySet());
+ Collections.sort(ids);
+ return ids.toString();
+ }
+
+ public int getNumExecutingJobs() {
+ int jobCount = 0;
+ for (FlowRunner runner : runningFlows.values()) {
+ jobCount += runner.getNumRunningJobs();
+ }
+
+ return jobCount;
+ }
+
+ public void reloadJobTypePlugins() throws JobTypeManagerException {
+ jobtypeManager.loadPlugins();
+ }
}
src/main/java/azkaban/execapp/JMXHttpServlet.java 102(+52 -50)
diff --git a/src/main/java/azkaban/execapp/JMXHttpServlet.java b/src/main/java/azkaban/execapp/JMXHttpServlet.java
index 20f9aef..2279f2b 100644
--- a/src/main/java/azkaban/execapp/JMXHttpServlet.java
+++ b/src/main/java/azkaban/execapp/JMXHttpServlet.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -36,54 +36,56 @@ import azkaban.webapp.servlet.AzkabanServletContextListener;
import azkaban.webapp.servlet.HttpRequestUtils;
public class JMXHttpServlet extends HttpServlet implements ConnectorParams {
- private static final long serialVersionUID = -3085603824826446270L;
- private static final Logger logger = Logger.getLogger(JMXHttpServlet.class);
- private AzkabanExecutorServer server;
-
- public void init(ServletConfig config) throws ServletException {
- server = (AzkabanExecutorServer) config.getServletContext().getAttribute(AzkabanServletContextListener.AZKABAN_SERVLET_CONTEXT_KEY);
- }
-
- public boolean hasParam(HttpServletRequest request, String param) {
- return HttpRequestUtils.hasParam(request, param);
- }
-
- public String getParam(HttpServletRequest request, String name) throws ServletException {
- return HttpRequestUtils.getParam(request, name);
- }
-
- protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
- HashMap<String,Object> ret = new HashMap<String,Object>();
+ private static final long serialVersionUID = -3085603824826446270L;
+ private static final Logger logger = Logger.getLogger(JMXHttpServlet.class);
+ private AzkabanExecutorServer server;
- if (hasParam(req, JMX_GET_MBEANS)) {
- ret.put("mbeans", server.getMbeanNames());
- }
- else if (hasParam(req, JMX_GET_ALL_MBEAN_ATTRIBUTES)) {
- if (!hasParam(req, JMX_MBEAN)) {
- ret.put("error", "Parameters 'mbean' must be set");
- }
- else {
- String mbeanName = getParam(req, JMX_MBEAN);
- try {
- ObjectName name = new ObjectName(mbeanName);
- MBeanInfo info = server.getMBeanInfo(name);
-
- MBeanAttributeInfo[] mbeanAttrs = info.getAttributes();
- HashMap<String, Object> attributes = new HashMap<String,Object>();
+ public void init(ServletConfig config) throws ServletException {
+ server =
+ (AzkabanExecutorServer) config.getServletContext().getAttribute(
+ AzkabanServletContextListener.AZKABAN_SERVLET_CONTEXT_KEY);
+ }
- for (MBeanAttributeInfo attrInfo: mbeanAttrs) {
- Object obj = server.getMBeanAttribute(name, attrInfo.getName());
- attributes.put(attrInfo.getName(), obj);
- }
-
- ret.put("attributes", attributes);
- } catch (Exception e) {
- logger.error(e);
- ret.put("error", "'" + mbeanName + "' is not a valid mBean name");
- }
- }
- }
+ public boolean hasParam(HttpServletRequest request, String param) {
+ return HttpRequestUtils.hasParam(request, param);
+ }
- JSONUtils.toJSON(ret, resp.getOutputStream(), true);
- }
+ public String getParam(HttpServletRequest request, String name)
+ throws ServletException {
+ return HttpRequestUtils.getParam(request, name);
+ }
+
+ protected void doGet(HttpServletRequest req, HttpServletResponse resp)
+ throws ServletException, IOException {
+ HashMap<String, Object> ret = new HashMap<String, Object>();
+
+ if (hasParam(req, JMX_GET_MBEANS)) {
+ ret.put("mbeans", server.getMbeanNames());
+ } else if (hasParam(req, JMX_GET_ALL_MBEAN_ATTRIBUTES)) {
+ if (!hasParam(req, JMX_MBEAN)) {
+ ret.put("error", "Parameters 'mbean' must be set");
+ } else {
+ String mbeanName = getParam(req, JMX_MBEAN);
+ try {
+ ObjectName name = new ObjectName(mbeanName);
+ MBeanInfo info = server.getMBeanInfo(name);
+
+ MBeanAttributeInfo[] mbeanAttrs = info.getAttributes();
+ HashMap<String, Object> attributes = new HashMap<String, Object>();
+
+ for (MBeanAttributeInfo attrInfo : mbeanAttrs) {
+ Object obj = server.getMBeanAttribute(name, attrInfo.getName());
+ attributes.put(attrInfo.getName(), obj);
+ }
+
+ ret.put("attributes", attributes);
+ } catch (Exception e) {
+ logger.error(e);
+ ret.put("error", "'" + mbeanName + "' is not a valid mBean name");
+ }
+ }
+ }
+
+ JSONUtils.toJSON(ret, resp.getOutputStream(), true);
+ }
}
src/main/java/azkaban/execapp/JobRunner.java 1239(+625 -614)
diff --git a/src/main/java/azkaban/execapp/JobRunner.java b/src/main/java/azkaban/execapp/JobRunner.java
index 3a2b8a4..0408a39 100644
--- a/src/main/java/azkaban/execapp/JobRunner.java
+++ b/src/main/java/azkaban/execapp/JobRunner.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -52,615 +52,626 @@ import azkaban.jobtype.JobTypeManagerException;
import azkaban.utils.Props;
public class JobRunner extends EventHandler implements Runnable {
- private final Layout DEFAULT_LAYOUT = new EnhancedPatternLayout("%d{dd-MM-yyyy HH:mm:ss z} %c{1} %p - %m\n");
-
- private ExecutorLoader loader;
- private Props props;
- private ExecutableNode node;
- private File workingDir;
-
- private Logger logger = null;
- private Layout loggerLayout = DEFAULT_LAYOUT;
- private Logger flowLogger = null;
-
- private Appender jobAppender;
- private File logFile;
- private String attachmentFileName;
-
- private Job job;
- private int executionId = -1;
- private String jobId;
-
- private static final Object logCreatorLock = new Object();
- private Object syncObject = new Object();
-
- private final JobTypeManager jobtypeManager;
-
- // Used by the job to watch and block against another flow
- private Integer pipelineLevel = null;
- private FlowWatcher watcher = null;
- private Set<String> pipelineJobs = new HashSet<String>();
-
- private Set<String> proxyUsers = null;
-
- private String jobLogChunkSize;
- private int jobLogBackupIndex;
-
- private long delayStartMs = 0;
- private boolean killed = false;
- private BlockingStatus currentBlockStatus = null;
-
- public JobRunner(ExecutableNode node, File workingDir, ExecutorLoader loader, JobTypeManager jobtypeManager) {
- this.props = node.getInputProps();
- this.node = node;
- this.workingDir = workingDir;
-
- this.executionId = node.getParentFlow().getExecutionId();
- this.jobId = node.getId();
- this.loader = loader;
- this.jobtypeManager = jobtypeManager;
- }
-
- public void setValidatedProxyUsers(Set<String> proxyUsers) {
- this.proxyUsers = proxyUsers;
- }
-
- public void setLogSettings(Logger flowLogger, String logFileChuckSize, int numLogBackup ) {
- this.flowLogger = flowLogger;
- this.jobLogChunkSize = logFileChuckSize;
- this.jobLogBackupIndex = numLogBackup;
- }
-
- public Props getProps() {
- return props;
- }
-
- public void setPipeline(FlowWatcher watcher, int pipelineLevel) {
- this.watcher = watcher;
- this.pipelineLevel = pipelineLevel;
-
- if (this.pipelineLevel == 1) {
- pipelineJobs.add(node.getNestedId());
- }
- else if (this.pipelineLevel == 2) {
- pipelineJobs.add(node.getNestedId());
- ExecutableFlowBase parentFlow = node.getParentFlow();
-
- if (parentFlow.getEndNodes().contains(node.getId())) {
- if (!parentFlow.getOutNodes().isEmpty()) {
- ExecutableFlowBase grandParentFlow = parentFlow.getParentFlow();
- for (String outNode: parentFlow.getOutNodes()) {
- ExecutableNode nextNode = grandParentFlow.getExecutableNode(outNode);
-
- // If the next node is a nested flow, then we add the nested starting nodes
- if (nextNode instanceof ExecutableFlowBase) {
- ExecutableFlowBase nextFlow = (ExecutableFlowBase)nextNode;
- findAllStartingNodes(nextFlow, pipelineJobs);
- }
- else {
- pipelineJobs.add(nextNode.getNestedId());
- }
- }
- }
- }
- else {
- for (String outNode : node.getOutNodes()) {
- ExecutableNode nextNode = parentFlow.getExecutableNode(outNode);
-
- // If the next node is a nested flow, then we add the nested starting nodes
- if (nextNode instanceof ExecutableFlowBase) {
- ExecutableFlowBase nextFlow = (ExecutableFlowBase)nextNode;
- findAllStartingNodes(nextFlow, pipelineJobs);
- }
- else {
- pipelineJobs.add(nextNode.getNestedId());
- }
- }
- }
- }
- }
-
- private void findAllStartingNodes(ExecutableFlowBase flow, Set<String> pipelineJobs) {
- for (String startingNode: flow.getStartNodes()) {
- ExecutableNode node = flow.getExecutableNode(startingNode);
- if (node instanceof ExecutableFlowBase) {
- findAllStartingNodes((ExecutableFlowBase)node, pipelineJobs);
- }
- else {
- pipelineJobs.add(node.getNestedId());
- }
- }
- }
-
- /**
- * Returns a list of jobs that this JobRunner will wait upon to finish before starting.
- * It is only relevant if pipeline is turned on.
- *
- * @return
- */
- public Set<String> getPipelineWatchedJobs() {
- return pipelineJobs;
- }
-
- public void setDelayStart(long delayMS) {
- delayStartMs = delayMS;
- }
-
- public long getDelayStart() {
- return delayStartMs;
- }
-
- public ExecutableNode getNode() {
- return node;
- }
-
- public String getLogFilePath() {
- return logFile == null ? null : logFile.getPath();
- }
-
- private void createLogger() {
- // Create logger
- synchronized (logCreatorLock) {
- String loggerName = System.currentTimeMillis() + "." + this.executionId + "." + this.jobId;
- logger = Logger.getLogger(loggerName);
-
- // Create file appender
- String logName = createLogFileName(node);
- logFile = new File(workingDir, logName);
-
- String absolutePath = logFile.getAbsolutePath();
-
- jobAppender = null;
- try {
- RollingFileAppender fileAppender = new RollingFileAppender(loggerLayout, absolutePath, true);
- fileAppender.setMaxBackupIndex(jobLogBackupIndex);
- fileAppender.setMaxFileSize(jobLogChunkSize);
- jobAppender = fileAppender;
- logger.addAppender(jobAppender);
- logger.setAdditivity(false);
- }
- catch (IOException e) {
- flowLogger.error("Could not open log file in " + workingDir + " for job " + this.jobId, e);
- }
- }
- }
-
- private void createAttachmentFile() {
- String fileName = createAttachmentFileName(node);
- File file = new File(workingDir, fileName);
- attachmentFileName = file.getAbsolutePath();
- }
-
- private void closeLogger() {
- if (jobAppender != null) {
- logger.removeAppender(jobAppender);
- jobAppender.close();
- }
- }
-
- private void writeStatus() {
- try {
- node.setUpdateTime(System.currentTimeMillis());
- loader.updateExecutableNode(node);
- }
- catch (ExecutorManagerException e) {
- flowLogger.error("Could not update job properties in db for " + this.jobId, e);
- }
- }
-
- /**
- * Used to handle non-ready and special status's (i.e. KILLED). Returns true
- * if they handled anything.
- *
- * @return
- */
- private boolean handleNonReadyStatus() {
- Status nodeStatus = node.getStatus();
- boolean quickFinish = false;
- long time = System.currentTimeMillis();
-
- if (Status.isStatusFinished(nodeStatus)) {
- quickFinish = true;
- }
- else if (nodeStatus == Status.DISABLED) {
- changeStatus(Status.SKIPPED, time);
- quickFinish = true;
- }
- else if (this.isKilled()) {
- changeStatus(Status.KILLED, time);
- quickFinish = true;
- }
-
- if (quickFinish) {
- node.setStartTime(time);
- fireEvent(Event.create(this, Type.JOB_STARTED, null, false));
- node.setEndTime(time);
- fireEvent(Event.create(this, Type.JOB_FINISHED));
- return true;
- }
-
- return false;
- }
-
- /**
- * If pipelining is set, will block on another flow's jobs.
- */
- private boolean blockOnPipeLine() {
- if (this.isKilled()) {
- return true;
- }
-
- // For pipelining of jobs. Will watch other jobs.
- if (!pipelineJobs.isEmpty()) {
- String blockedList = "";
- ArrayList<BlockingStatus> blockingStatus = new ArrayList<BlockingStatus>();
- for (String waitingJobId : pipelineJobs) {
- Status status = watcher.peekStatus(waitingJobId);
- if (status != null && !Status.isStatusFinished(status)) {
- BlockingStatus block = watcher.getBlockingStatus(waitingJobId);
- blockingStatus.add(block);
- blockedList += waitingJobId + ",";
- }
- }
- if (!blockingStatus.isEmpty()) {
- logger.info("Pipeline job " + this.jobId + " waiting on " + blockedList + " in execution " + watcher.getExecId());
-
- for (BlockingStatus bStatus: blockingStatus) {
- logger.info("Waiting on pipelined job " + bStatus.getJobId());
- currentBlockStatus = bStatus;
- bStatus.blockOnFinishedStatus();
- if (this.isKilled()) {
- logger.info("Job was killed while waiting on pipeline. Quiting.");
- return true;
- }
- else {
- logger.info("Pipelined job " + bStatus.getJobId() + " finished.");
- }
- }
- }
- }
-
- currentBlockStatus = null;
- return false;
- }
-
- private boolean delayExecution() {
- if (this.isKilled()) {
- return true;
- }
-
- long currentTime = System.currentTimeMillis();
- if (delayStartMs > 0) {
- logger.info("Delaying start of execution for " + delayStartMs + " milliseconds.");
- synchronized (this) {
- try {
- this.wait(delayStartMs);
- logger.info("Execution has been delayed for " + delayStartMs + " ms. Continuing with execution.");
- }
- catch (InterruptedException e) {
- logger.error("Job " + this.jobId + " was to be delayed for " + delayStartMs + ". Interrupted after " + (System.currentTimeMillis() - currentTime));
- }
- }
-
- if (this.isKilled()) {
- logger.info("Job was killed while in delay. Quiting.");
- return true;
- }
- }
-
- return false;
- }
-
- private void finalizeLogFile() {
- closeLogger();
- if (logFile == null) {
- flowLogger.info("Log file for job " + this.jobId + " is null");
- return;
- }
-
- try {
- File[] files = logFile.getParentFile().listFiles(new FilenameFilter() {
- @Override
- public boolean accept(File dir, String name) {
- return name.startsWith(logFile.getName());
- }
- });
- Arrays.sort(files, Collections.reverseOrder());
-
- loader.uploadLogFile(executionId, this.node.getNestedId(), node.getAttempt(), files);
- }
- catch (ExecutorManagerException e) {
- flowLogger.error("Error writing out logs for job " + this.node.getNestedId(), e);
- }
- }
-
- private void finalizeAttachmentFile() {
- if (attachmentFileName == null) {
- flowLogger.info("Attachment file for job " + this.jobId + " is null");
- return;
- }
-
- try {
- File file = new File(attachmentFileName);
- if (!file.exists()) {
- flowLogger.info("No attachment file for job " + this.jobId +
- " written.");
- return;
- }
- loader.uploadAttachmentFile(node, file);
- }
- catch (ExecutorManagerException e) {
- flowLogger.error("Error writing out attachment for job " +
- this.node.getNestedId(), e);
- }
- }
-
- /**
- * The main run thread.
- *
- */
- @Override
- public void run() {
- Thread.currentThread().setName("JobRunner-" + this.jobId + "-" + executionId);
-
- // If the job is cancelled, disabled, killed. No log is created in this case
- if (handleNonReadyStatus()) {
- return;
- }
-
- createAttachmentFile();
- createLogger();
- boolean errorFound = false;
- // Delay execution if necessary. Will return a true if something went wrong.
- errorFound |= delayExecution();
-
- // For pipelining of jobs. Will watch other jobs. Will return true if
- // something went wrong.
- errorFound |= blockOnPipeLine();
-
- // Start the node.
- node.setStartTime(System.currentTimeMillis());
- if (!errorFound && !isKilled()) {
- fireEvent(Event.create(this, Type.JOB_STARTED, null, false));
- try {
- loader.uploadExecutableNode(node, props);
- }
- catch (ExecutorManagerException e1) {
- logger.error("Error writing initial node properties");
- }
-
- if (prepareJob()) {
- // Writes status to the db
- writeStatus();
- fireEvent(Event.create(this, Type.JOB_STATUS_CHANGED), false);
- runJob();
- }
- else {
- changeStatus(Status.FAILED);
- logError("Job run failed preparing the job.");
- }
- }
- node.setEndTime(System.currentTimeMillis());
-
- if (isKilled()) {
- // even if it's killed, there is a chance that the job failed is marked as failure,
- // So we set it to KILLED to make sure we know that we forced kill it rather than
- // it being a legitimate failure.
- changeStatus(Status.KILLED);
- }
- logInfo("Finishing job " + this.jobId + " at " + node.getEndTime() + " with status " + node.getStatus());
-
- fireEvent(Event.create(this, Type.JOB_FINISHED), false);
- finalizeLogFile();
- finalizeAttachmentFile();
- writeStatus();
- }
-
- private boolean prepareJob() throws RuntimeException {
- // Check pre conditions
- if (props == null || this.isKilled()) {
- logError("Failing job. The job properties don't exist");
- return false;
- }
-
- synchronized (syncObject) {
- if (node.getStatus() == Status.FAILED || this.isKilled()) {
- return false;
- }
-
- if (node.getAttempt() > 0) {
- logInfo("Starting job " + this.jobId + " attempt " + node.getAttempt() + " at " + node.getStartTime());
- }
- else {
- logInfo("Starting job " + this.jobId + " at " + node.getStartTime());
- }
-
- // If it's an embedded flow, we'll add the nested flow info to the job conf
- if (node.getExecutableFlow() != node.getParentFlow()) {
- String subFlow = node.getPrintableId(":");
- props.put(CommonJobProperties.NESTED_FLOW_PATH, subFlow);
- }
-
- props.put(CommonJobProperties.JOB_ATTEMPT, node.getAttempt());
- props.put(CommonJobProperties.JOB_METADATA_FILE, createMetaDataFileName(node));
- props.put(CommonJobProperties.JOB_ATTACHMENT_FILE, attachmentFileName);
- changeStatus(Status.RUNNING);
-
- // Ability to specify working directory
- if (!props.containsKey(AbstractProcessJob.WORKING_DIR)) {
- props.put(AbstractProcessJob.WORKING_DIR, workingDir.getAbsolutePath());
- }
-
- if (props.containsKey("user.to.proxy")) {
- String jobProxyUser = props.getString("user.to.proxy");
- if (proxyUsers != null && !proxyUsers.contains(jobProxyUser)) {
- logger.error("User " + jobProxyUser + " has no permission to execute this job " + this.jobId + "!");
- return false;
- }
- }
-
- try {
- job = jobtypeManager.buildJobExecutor(this.jobId, props, logger);
- }
- catch (JobTypeManagerException e) {
- logger.error("Failed to build job type", e);
- return false;
- }
- }
-
- return true;
- }
-
- private void runJob() {
- try {
- job.run();
- }
- catch (Exception e) {
- e.printStackTrace();
-
- if (props.getBoolean("job.succeed.on.failure", false)) {
- changeStatus(Status.FAILED_SUCCEEDED);
- logError("Job run failed, but will treat it like success.");
- logError(e.getMessage() + e.getCause());
- }
- else {
- changeStatus(Status.FAILED);
- logError("Job run failed!");
- logError(e.getMessage() + e.getCause());
- }
- }
-
- if (job != null) {
- node.setOutputProps(job.getJobGeneratedProperties());
- }
-
- // If the job is still running, set the status to Success.
- if (!Status.isStatusFinished(node.getStatus())) {
- changeStatus(Status.SUCCEEDED);
- }
- }
-
- private void changeStatus(Status status) {
- changeStatus(status, System.currentTimeMillis());
- }
-
- private void changeStatus(Status status, long time) {
- node.setStatus(status);
- node.setUpdateTime(time);
- }
-
- private void fireEvent(Event event) {
- fireEvent(event, true);
- }
-
- private void fireEvent(Event event, boolean updateTime) {
- if (updateTime) {
- node.setUpdateTime(System.currentTimeMillis());
- }
- this.fireEventListeners(event);
- }
-
- public void kill() {
- synchronized (syncObject) {
- if (Status.isStatusFinished(node.getStatus())) {
- return;
- }
- logError("Kill has been called.");
- this.killed = true;
-
- BlockingStatus status = currentBlockStatus;
- if (status != null) {
- status.unblock();
- }
-
- // Cancel code here
- if (job == null) {
- logError("Job hasn't started yet.");
- // Just in case we're waiting on the delay
- synchronized(this) {
- this.notify();
- }
- return;
- }
-
- try {
- job.cancel();
- }
- catch (Exception e) {
- logError(e.getMessage());
- logError("Failed trying to cancel job. Maybe it hasn't started running yet or just finished.");
- }
-
- this.changeStatus(Status.KILLED);
- }
- }
-
- public boolean isKilled() {
- return killed;
- }
-
- public Status getStatus() {
- return node.getStatus();
- }
-
- private void logError(String message) {
- if (logger != null) {
- logger.error(message);
- }
- }
-
- private void logInfo(String message) {
- if (logger != null) {
- logger.info(message);
- }
- }
-
- public File getLogFile() {
- return logFile;
- }
-
- public static String createLogFileName(ExecutableNode node, int attempt) {
- int executionId = node.getExecutableFlow().getExecutionId();
- String jobId = node.getId();
- if (node.getExecutableFlow() != node.getParentFlow()) {
- // Posix safe file delimiter
- jobId = node.getPrintableId("._.");
- }
- return attempt > 0 ? "_job." + executionId + "." + attempt + "." + jobId + ".log" : "_job." + executionId + "." + jobId + ".log";
- }
-
- public static String createLogFileName(ExecutableNode node) {
- return JobRunner.createLogFileName(node, node.getAttempt());
- }
-
- public static String createMetaDataFileName(ExecutableNode node, int attempt) {
- int executionId = node.getExecutableFlow().getExecutionId();
- String jobId = node.getId();
- if (node.getExecutableFlow() != node.getParentFlow()) {
- // Posix safe file delimiter
- jobId = node.getPrintableId("._.");
- }
-
- return attempt > 0 ? "_job." + executionId + "." + attempt + "." + jobId + ".meta" : "_job." + executionId + "." + jobId + ".meta";
- }
-
- public static String createMetaDataFileName(ExecutableNode node) {
- return JobRunner.createMetaDataFileName(node, node.getAttempt());
- }
-
- public static String createAttachmentFileName(ExecutableNode node) {
-
- return JobRunner.createAttachmentFileName(node, node.getAttempt());
- }
-
- public static String createAttachmentFileName(ExecutableNode node, int attempt) {
- int executionId = node.getExecutableFlow().getExecutionId();
- String jobId = node.getId();
- if (node.getExecutableFlow() != node.getParentFlow()) {
- // Posix safe file delimiter
- jobId = node.getPrintableId("._.");
- }
-
- return attempt > 0 ? "_job." + executionId + "." + attempt + "." + jobId + ".attach" : "_job." + executionId + "." + jobId + ".attach";
- }
+ private final Layout DEFAULT_LAYOUT = new EnhancedPatternLayout(
+ "%d{dd-MM-yyyy HH:mm:ss z} %c{1} %p - %m\n");
+
+ private ExecutorLoader loader;
+ private Props props;
+ private ExecutableNode node;
+ private File workingDir;
+
+ private Logger logger = null;
+ private Layout loggerLayout = DEFAULT_LAYOUT;
+ private Logger flowLogger = null;
+
+ private Appender jobAppender;
+ private File logFile;
+ private String attachmentFileName;
+
+ private Job job;
+ private int executionId = -1;
+ private String jobId;
+
+ private static final Object logCreatorLock = new Object();
+ private Object syncObject = new Object();
+
+ private final JobTypeManager jobtypeManager;
+
+ // Used by the job to watch and block against another flow
+ private Integer pipelineLevel = null;
+ private FlowWatcher watcher = null;
+ private Set<String> pipelineJobs = new HashSet<String>();
+
+ private Set<String> proxyUsers = null;
+
+ private String jobLogChunkSize;
+ private int jobLogBackupIndex;
+
+ private long delayStartMs = 0;
+ private boolean killed = false;
+ private BlockingStatus currentBlockStatus = null;
+
+ public JobRunner(ExecutableNode node, File workingDir, ExecutorLoader loader,
+ JobTypeManager jobtypeManager) {
+ this.props = node.getInputProps();
+ this.node = node;
+ this.workingDir = workingDir;
+
+ this.executionId = node.getParentFlow().getExecutionId();
+ this.jobId = node.getId();
+ this.loader = loader;
+ this.jobtypeManager = jobtypeManager;
+ }
+
+ public void setValidatedProxyUsers(Set<String> proxyUsers) {
+ this.proxyUsers = proxyUsers;
+ }
+
+ public void setLogSettings(Logger flowLogger, String logFileChuckSize,
+ int numLogBackup) {
+ this.flowLogger = flowLogger;
+ this.jobLogChunkSize = logFileChuckSize;
+ this.jobLogBackupIndex = numLogBackup;
+ }
+
+ public Props getProps() {
+ return props;
+ }
+
+ public void setPipeline(FlowWatcher watcher, int pipelineLevel) {
+ this.watcher = watcher;
+ this.pipelineLevel = pipelineLevel;
+
+ if (this.pipelineLevel == 1) {
+ pipelineJobs.add(node.getNestedId());
+ } else if (this.pipelineLevel == 2) {
+ pipelineJobs.add(node.getNestedId());
+ ExecutableFlowBase parentFlow = node.getParentFlow();
+
+ if (parentFlow.getEndNodes().contains(node.getId())) {
+ if (!parentFlow.getOutNodes().isEmpty()) {
+ ExecutableFlowBase grandParentFlow = parentFlow.getParentFlow();
+ for (String outNode : parentFlow.getOutNodes()) {
+ ExecutableNode nextNode =
+ grandParentFlow.getExecutableNode(outNode);
+
+ // If the next node is a nested flow, then we add the nested
+ // starting nodes
+ if (nextNode instanceof ExecutableFlowBase) {
+ ExecutableFlowBase nextFlow = (ExecutableFlowBase) nextNode;
+ findAllStartingNodes(nextFlow, pipelineJobs);
+ } else {
+ pipelineJobs.add(nextNode.getNestedId());
+ }
+ }
+ }
+ } else {
+ for (String outNode : node.getOutNodes()) {
+ ExecutableNode nextNode = parentFlow.getExecutableNode(outNode);
+
+ // If the next node is a nested flow, then we add the nested starting
+ // nodes
+ if (nextNode instanceof ExecutableFlowBase) {
+ ExecutableFlowBase nextFlow = (ExecutableFlowBase) nextNode;
+ findAllStartingNodes(nextFlow, pipelineJobs);
+ } else {
+ pipelineJobs.add(nextNode.getNestedId());
+ }
+ }
+ }
+ }
+ }
+
+ private void findAllStartingNodes(ExecutableFlowBase flow,
+ Set<String> pipelineJobs) {
+ for (String startingNode : flow.getStartNodes()) {
+ ExecutableNode node = flow.getExecutableNode(startingNode);
+ if (node instanceof ExecutableFlowBase) {
+ findAllStartingNodes((ExecutableFlowBase) node, pipelineJobs);
+ } else {
+ pipelineJobs.add(node.getNestedId());
+ }
+ }
+ }
+
+ /**
+ * Returns a list of jobs that this JobRunner will wait upon to finish before
+ * starting. It is only relevant if pipeline is turned on.
+ *
+ * @return
+ */
+ public Set<String> getPipelineWatchedJobs() {
+ return pipelineJobs;
+ }
+
+ public void setDelayStart(long delayMS) {
+ delayStartMs = delayMS;
+ }
+
+ public long getDelayStart() {
+ return delayStartMs;
+ }
+
+ public ExecutableNode getNode() {
+ return node;
+ }
+
+ public String getLogFilePath() {
+ return logFile == null ? null : logFile.getPath();
+ }
+
+ private void createLogger() {
+ // Create logger
+ synchronized (logCreatorLock) {
+ String loggerName =
+ System.currentTimeMillis() + "." + this.executionId + "."
+ + this.jobId;
+ logger = Logger.getLogger(loggerName);
+
+ // Create file appender
+ String logName = createLogFileName(node);
+ logFile = new File(workingDir, logName);
+
+ String absolutePath = logFile.getAbsolutePath();
+
+ jobAppender = null;
+ try {
+ RollingFileAppender fileAppender =
+ new RollingFileAppender(loggerLayout, absolutePath, true);
+ fileAppender.setMaxBackupIndex(jobLogBackupIndex);
+ fileAppender.setMaxFileSize(jobLogChunkSize);
+ jobAppender = fileAppender;
+ logger.addAppender(jobAppender);
+ logger.setAdditivity(false);
+ } catch (IOException e) {
+ flowLogger.error("Could not open log file in " + workingDir
+ + " for job " + this.jobId, e);
+ }
+ }
+ }
+
+ private void createAttachmentFile() {
+ String fileName = createAttachmentFileName(node);
+ File file = new File(workingDir, fileName);
+ attachmentFileName = file.getAbsolutePath();
+ }
+
+ private void closeLogger() {
+ if (jobAppender != null) {
+ logger.removeAppender(jobAppender);
+ jobAppender.close();
+ }
+ }
+
+ private void writeStatus() {
+ try {
+ node.setUpdateTime(System.currentTimeMillis());
+ loader.updateExecutableNode(node);
+ } catch (ExecutorManagerException e) {
+ flowLogger.error("Could not update job properties in db for "
+ + this.jobId, e);
+ }
+ }
+
+ /**
+ * Used to handle non-ready and special status's (i.e. KILLED). Returns true
+ * if they handled anything.
+ *
+ * @return
+ */
+ private boolean handleNonReadyStatus() {
+ Status nodeStatus = node.getStatus();
+ boolean quickFinish = false;
+ long time = System.currentTimeMillis();
+
+ if (Status.isStatusFinished(nodeStatus)) {
+ quickFinish = true;
+ } else if (nodeStatus == Status.DISABLED) {
+ changeStatus(Status.SKIPPED, time);
+ quickFinish = true;
+ } else if (this.isKilled()) {
+ changeStatus(Status.KILLED, time);
+ quickFinish = true;
+ }
+
+ if (quickFinish) {
+ node.setStartTime(time);
+ fireEvent(Event.create(this, Type.JOB_STARTED, null, false));
+ node.setEndTime(time);
+ fireEvent(Event.create(this, Type.JOB_FINISHED));
+ return true;
+ }
+
+ return false;
+ }
+
+ /**
+ * If pipelining is set, will block on another flow's jobs.
+ */
+ private boolean blockOnPipeLine() {
+ if (this.isKilled()) {
+ return true;
+ }
+
+ // For pipelining of jobs. Will watch other jobs.
+ if (!pipelineJobs.isEmpty()) {
+ String blockedList = "";
+ ArrayList<BlockingStatus> blockingStatus =
+ new ArrayList<BlockingStatus>();
+ for (String waitingJobId : pipelineJobs) {
+ Status status = watcher.peekStatus(waitingJobId);
+ if (status != null && !Status.isStatusFinished(status)) {
+ BlockingStatus block = watcher.getBlockingStatus(waitingJobId);
+ blockingStatus.add(block);
+ blockedList += waitingJobId + ",";
+ }
+ }
+ if (!blockingStatus.isEmpty()) {
+ logger.info("Pipeline job " + this.jobId + " waiting on " + blockedList
+ + " in execution " + watcher.getExecId());
+
+ for (BlockingStatus bStatus : blockingStatus) {
+ logger.info("Waiting on pipelined job " + bStatus.getJobId());
+ currentBlockStatus = bStatus;
+ bStatus.blockOnFinishedStatus();
+ if (this.isKilled()) {
+ logger.info("Job was killed while waiting on pipeline. Quiting.");
+ return true;
+ } else {
+ logger.info("Pipelined job " + bStatus.getJobId() + " finished.");
+ }
+ }
+ }
+ }
+
+ currentBlockStatus = null;
+ return false;
+ }
+
+ private boolean delayExecution() {
+ if (this.isKilled()) {
+ return true;
+ }
+
+ long currentTime = System.currentTimeMillis();
+ if (delayStartMs > 0) {
+ logger.info("Delaying start of execution for " + delayStartMs
+ + " milliseconds.");
+ synchronized (this) {
+ try {
+ this.wait(delayStartMs);
+ logger.info("Execution has been delayed for " + delayStartMs
+ + " ms. Continuing with execution.");
+ } catch (InterruptedException e) {
+ logger.error("Job " + this.jobId + " was to be delayed for "
+ + delayStartMs + ". Interrupted after "
+ + (System.currentTimeMillis() - currentTime));
+ }
+ }
+
+ if (this.isKilled()) {
+ logger.info("Job was killed while in delay. Quiting.");
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ private void finalizeLogFile() {
+ closeLogger();
+ if (logFile == null) {
+ flowLogger.info("Log file for job " + this.jobId + " is null");
+ return;
+ }
+
+ try {
+ File[] files = logFile.getParentFile().listFiles(new FilenameFilter() {
+ @Override
+ public boolean accept(File dir, String name) {
+ return name.startsWith(logFile.getName());
+ }
+ });
+ Arrays.sort(files, Collections.reverseOrder());
+
+ loader.uploadLogFile(executionId, this.node.getNestedId(),
+ node.getAttempt(), files);
+ } catch (ExecutorManagerException e) {
+ flowLogger.error(
+ "Error writing out logs for job " + this.node.getNestedId(), e);
+ }
+ }
+
+ private void finalizeAttachmentFile() {
+ if (attachmentFileName == null) {
+ flowLogger.info("Attachment file for job " + this.jobId + " is null");
+ return;
+ }
+
+ try {
+ File file = new File(attachmentFileName);
+ if (!file.exists()) {
+ flowLogger.info("No attachment file for job " + this.jobId
+ + " written.");
+ return;
+ }
+ loader.uploadAttachmentFile(node, file);
+ } catch (ExecutorManagerException e) {
+ flowLogger.error(
+ "Error writing out attachment for job " + this.node.getNestedId(), e);
+ }
+ }
+
+ /**
+ * The main run thread.
+ *
+ */
+ @Override
+ public void run() {
+ Thread.currentThread().setName(
+ "JobRunner-" + this.jobId + "-" + executionId);
+
+ // If the job is cancelled, disabled, killed. No log is created in this case
+ if (handleNonReadyStatus()) {
+ return;
+ }
+
+ createAttachmentFile();
+ createLogger();
+ boolean errorFound = false;
+ // Delay execution if necessary. Will return a true if something went wrong.
+ errorFound |= delayExecution();
+
+ // For pipelining of jobs. Will watch other jobs. Will return true if
+ // something went wrong.
+ errorFound |= blockOnPipeLine();
+
+ // Start the node.
+ node.setStartTime(System.currentTimeMillis());
+ if (!errorFound && !isKilled()) {
+ fireEvent(Event.create(this, Type.JOB_STARTED, null, false));
+ try {
+ loader.uploadExecutableNode(node, props);
+ } catch (ExecutorManagerException e1) {
+ logger.error("Error writing initial node properties");
+ }
+
+ if (prepareJob()) {
+ // Writes status to the db
+ writeStatus();
+ fireEvent(Event.create(this, Type.JOB_STATUS_CHANGED), false);
+ runJob();
+ } else {
+ changeStatus(Status.FAILED);
+ logError("Job run failed preparing the job.");
+ }
+ }
+ node.setEndTime(System.currentTimeMillis());
+
+ if (isKilled()) {
+ // even if it's killed, there is a chance that the job failed is marked as
+ // failure,
+ // So we set it to KILLED to make sure we know that we forced kill it
+ // rather than
+ // it being a legitimate failure.
+ changeStatus(Status.KILLED);
+ }
+ logInfo("Finishing job " + this.jobId + " at " + node.getEndTime()
+ + " with status " + node.getStatus());
+
+ fireEvent(Event.create(this, Type.JOB_FINISHED), false);
+ finalizeLogFile();
+ finalizeAttachmentFile();
+ writeStatus();
+ }
+
+ private boolean prepareJob() throws RuntimeException {
+ // Check pre conditions
+ if (props == null || this.isKilled()) {
+ logError("Failing job. The job properties don't exist");
+ return false;
+ }
+
+ synchronized (syncObject) {
+ if (node.getStatus() == Status.FAILED || this.isKilled()) {
+ return false;
+ }
+
+ if (node.getAttempt() > 0) {
+ logInfo("Starting job " + this.jobId + " attempt " + node.getAttempt()
+ + " at " + node.getStartTime());
+ } else {
+ logInfo("Starting job " + this.jobId + " at " + node.getStartTime());
+ }
+
+ // If it's an embedded flow, we'll add the nested flow info to the job
+ // conf
+ if (node.getExecutableFlow() != node.getParentFlow()) {
+ String subFlow = node.getPrintableId(":");
+ props.put(CommonJobProperties.NESTED_FLOW_PATH, subFlow);
+ }
+
+ props.put(CommonJobProperties.JOB_ATTEMPT, node.getAttempt());
+ props.put(CommonJobProperties.JOB_METADATA_FILE,
+ createMetaDataFileName(node));
+ props.put(CommonJobProperties.JOB_ATTACHMENT_FILE, attachmentFileName);
+ changeStatus(Status.RUNNING);
+
+ // Ability to specify working directory
+ if (!props.containsKey(AbstractProcessJob.WORKING_DIR)) {
+ props.put(AbstractProcessJob.WORKING_DIR, workingDir.getAbsolutePath());
+ }
+
+ if (props.containsKey("user.to.proxy")) {
+ String jobProxyUser = props.getString("user.to.proxy");
+ if (proxyUsers != null && !proxyUsers.contains(jobProxyUser)) {
+ logger.error("User " + jobProxyUser
+ + " has no permission to execute this job " + this.jobId + "!");
+ return false;
+ }
+ }
+
+ try {
+ job = jobtypeManager.buildJobExecutor(this.jobId, props, logger);
+ } catch (JobTypeManagerException e) {
+ logger.error("Failed to build job type", e);
+ return false;
+ }
+ }
+
+ return true;
+ }
+
+ private void runJob() {
+ try {
+ job.run();
+ } catch (Exception e) {
+ e.printStackTrace();
+
+ if (props.getBoolean("job.succeed.on.failure", false)) {
+ changeStatus(Status.FAILED_SUCCEEDED);
+ logError("Job run failed, but will treat it like success.");
+ logError(e.getMessage() + e.getCause());
+ } else {
+ changeStatus(Status.FAILED);
+ logError("Job run failed!");
+ logError(e.getMessage() + e.getCause());
+ }
+ }
+
+ if (job != null) {
+ node.setOutputProps(job.getJobGeneratedProperties());
+ }
+
+ // If the job is still running, set the status to Success.
+ if (!Status.isStatusFinished(node.getStatus())) {
+ changeStatus(Status.SUCCEEDED);
+ }
+ }
+
+ private void changeStatus(Status status) {
+ changeStatus(status, System.currentTimeMillis());
+ }
+
+ private void changeStatus(Status status, long time) {
+ node.setStatus(status);
+ node.setUpdateTime(time);
+ }
+
+ private void fireEvent(Event event) {
+ fireEvent(event, true);
+ }
+
+ private void fireEvent(Event event, boolean updateTime) {
+ if (updateTime) {
+ node.setUpdateTime(System.currentTimeMillis());
+ }
+ this.fireEventListeners(event);
+ }
+
+ public void kill() {
+ synchronized (syncObject) {
+ if (Status.isStatusFinished(node.getStatus())) {
+ return;
+ }
+ logError("Kill has been called.");
+ this.killed = true;
+
+ BlockingStatus status = currentBlockStatus;
+ if (status != null) {
+ status.unblock();
+ }
+
+ // Cancel code here
+ if (job == null) {
+ logError("Job hasn't started yet.");
+ // Just in case we're waiting on the delay
+ synchronized (this) {
+ this.notify();
+ }
+ return;
+ }
+
+ try {
+ job.cancel();
+ } catch (Exception e) {
+ logError(e.getMessage());
+ logError("Failed trying to cancel job. Maybe it hasn't started running yet or just finished.");
+ }
+
+ this.changeStatus(Status.KILLED);
+ }
+ }
+
+ public boolean isKilled() {
+ return killed;
+ }
+
+ public Status getStatus() {
+ return node.getStatus();
+ }
+
+ private void logError(String message) {
+ if (logger != null) {
+ logger.error(message);
+ }
+ }
+
+ private void logInfo(String message) {
+ if (logger != null) {
+ logger.info(message);
+ }
+ }
+
+ public File getLogFile() {
+ return logFile;
+ }
+
+ public static String createLogFileName(ExecutableNode node, int attempt) {
+ int executionId = node.getExecutableFlow().getExecutionId();
+ String jobId = node.getId();
+ if (node.getExecutableFlow() != node.getParentFlow()) {
+ // Posix safe file delimiter
+ jobId = node.getPrintableId("._.");
+ }
+ return attempt > 0 ? "_job." + executionId + "." + attempt + "." + jobId
+ + ".log" : "_job." + executionId + "." + jobId + ".log";
+ }
+
+ public static String createLogFileName(ExecutableNode node) {
+ return JobRunner.createLogFileName(node, node.getAttempt());
+ }
+
+ public static String createMetaDataFileName(ExecutableNode node, int attempt) {
+ int executionId = node.getExecutableFlow().getExecutionId();
+ String jobId = node.getId();
+ if (node.getExecutableFlow() != node.getParentFlow()) {
+ // Posix safe file delimiter
+ jobId = node.getPrintableId("._.");
+ }
+
+ return attempt > 0 ? "_job." + executionId + "." + attempt + "." + jobId
+ + ".meta" : "_job." + executionId + "." + jobId + ".meta";
+ }
+
+ public static String createMetaDataFileName(ExecutableNode node) {
+ return JobRunner.createMetaDataFileName(node, node.getAttempt());
+ }
+
+ public static String createAttachmentFileName(ExecutableNode node) {
+
+ return JobRunner.createAttachmentFileName(node, node.getAttempt());
+ }
+
+ public static String createAttachmentFileName(ExecutableNode node, int attempt) {
+ int executionId = node.getExecutableFlow().getExecutionId();
+ String jobId = node.getId();
+ if (node.getExecutableFlow() != node.getParentFlow()) {
+ // Posix safe file delimiter
+ jobId = node.getPrintableId("._.");
+ }
+
+ return attempt > 0 ? "_job." + executionId + "." + attempt + "." + jobId
+ + ".attach" : "_job." + executionId + "." + jobId + ".attach";
+ }
}
src/main/java/azkaban/execapp/ProjectVersion.java 174(+89 -85)
diff --git a/src/main/java/azkaban/execapp/ProjectVersion.java b/src/main/java/azkaban/execapp/ProjectVersion.java
index 2e9faae..35efc24 100644
--- a/src/main/java/azkaban/execapp/ProjectVersion.java
+++ b/src/main/java/azkaban/execapp/ProjectVersion.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -31,86 +31,90 @@ import azkaban.utils.FileIOUtils;
import azkaban.utils.Utils;
public class ProjectVersion implements Comparable<ProjectVersion> {
- private final int projectId;
- private final int version;
- private File installedDir;
-
- public ProjectVersion(int projectId, int version) {
- this.projectId = projectId;
- this.version = version;
- }
-
- public ProjectVersion(int projectId, int version, File installedDir) {
- this.projectId = projectId;
- this.version = version;
- this.installedDir = installedDir;
- }
-
- public int getProjectId() {
- return projectId;
- }
-
- public int getVersion() {
- return version;
- }
-
- public synchronized void setupProjectFiles(ProjectLoader projectLoader, File projectDir, Logger logger) throws ProjectManagerException, IOException {
- String projectVersion = String.valueOf(projectId) + "." + String.valueOf(version);
- if (installedDir == null) {
- installedDir = new File(projectDir, projectVersion);
- }
-
- if (!installedDir.exists()) {
-
- logger.info("First time executing new project. Setting up in directory " + installedDir.getPath());
-
- File tempDir = new File(projectDir, "_temp." + projectVersion + "." + System.currentTimeMillis());
- tempDir.mkdirs();
- ProjectFileHandler projectFileHandler = null;
- try {
- projectFileHandler = projectLoader.getUploadedFile(projectId, version);
- if ("zip".equals(projectFileHandler.getFileType())) {
- logger.info("Downloading zip file.");
- ZipFile zip = new ZipFile(projectFileHandler.getLocalFile());
- Utils.unzip(zip, tempDir);
-
- tempDir.renameTo(installedDir);
- }
- else {
- throw new IOException("The file type hasn't been decided yet.");
- }
- }
- finally {
- if (projectFileHandler != null) {
- projectFileHandler.deleteLocalFile();
- }
- }
- }
- }
-
- public synchronized void copyCreateSymlinkDirectory(File executionDir) throws IOException {
- if (installedDir == null || !installedDir.exists()) {
- throw new IOException("Installed dir doesn't exist");
- }
- else if (executionDir == null || !executionDir.exists()) {
- throw new IOException("Execution dir doesn't exist");
- }
- FileIOUtils.createDeepSymlink(installedDir, executionDir);
- }
-
- public synchronized void deleteDirectory() throws IOException {
- System.out.println("Deleting old unused project versin " + installedDir);
- if (installedDir != null && installedDir.exists()) {
- FileUtils.deleteDirectory(installedDir);
- }
- }
-
- @Override
- public int compareTo(ProjectVersion o) {
- if (projectId == o.projectId) {
- return version - o.version;
- }
-
- return projectId - o.projectId;
- }
+ private final int projectId;
+ private final int version;
+ private File installedDir;
+
+ public ProjectVersion(int projectId, int version) {
+ this.projectId = projectId;
+ this.version = version;
+ }
+
+ public ProjectVersion(int projectId, int version, File installedDir) {
+ this.projectId = projectId;
+ this.version = version;
+ this.installedDir = installedDir;
+ }
+
+ public int getProjectId() {
+ return projectId;
+ }
+
+ public int getVersion() {
+ return version;
+ }
+
+ public synchronized void setupProjectFiles(ProjectLoader projectLoader,
+ File projectDir, Logger logger) throws ProjectManagerException,
+ IOException {
+ String projectVersion =
+ String.valueOf(projectId) + "." + String.valueOf(version);
+ if (installedDir == null) {
+ installedDir = new File(projectDir, projectVersion);
+ }
+
+ if (!installedDir.exists()) {
+
+ logger.info("First time executing new project. Setting up in directory "
+ + installedDir.getPath());
+
+ File tempDir =
+ new File(projectDir, "_temp." + projectVersion + "."
+ + System.currentTimeMillis());
+ tempDir.mkdirs();
+ ProjectFileHandler projectFileHandler = null;
+ try {
+ projectFileHandler = projectLoader.getUploadedFile(projectId, version);
+ if ("zip".equals(projectFileHandler.getFileType())) {
+ logger.info("Downloading zip file.");
+ ZipFile zip = new ZipFile(projectFileHandler.getLocalFile());
+ Utils.unzip(zip, tempDir);
+
+ tempDir.renameTo(installedDir);
+ } else {
+ throw new IOException("The file type hasn't been decided yet.");
+ }
+ } finally {
+ if (projectFileHandler != null) {
+ projectFileHandler.deleteLocalFile();
+ }
+ }
+ }
+ }
+
+ public synchronized void copyCreateSymlinkDirectory(File executionDir)
+ throws IOException {
+ if (installedDir == null || !installedDir.exists()) {
+ throw new IOException("Installed dir doesn't exist");
+ } else if (executionDir == null || !executionDir.exists()) {
+ throw new IOException("Execution dir doesn't exist");
+ }
+ FileIOUtils.createDeepSymlink(installedDir, executionDir);
+ }
+
+ public synchronized void deleteDirectory() throws IOException {
+ System.out.println("Deleting old unused project versin " + installedDir);
+ if (installedDir != null && installedDir.exists()) {
+ FileUtils.deleteDirectory(installedDir);
+ }
+ }
+
+ @Override
+ public int compareTo(ProjectVersion o) {
+ if (projectId == o.projectId) {
+ return version - o.version;
+ }
+
+ return projectId - o.projectId;
+ }
}
src/main/java/azkaban/executor/ConnectorParams.java 143(+73 -70)
diff --git a/src/main/java/azkaban/executor/ConnectorParams.java b/src/main/java/azkaban/executor/ConnectorParams.java
index 3b3f00e..ddd8bba 100644
--- a/src/main/java/azkaban/executor/ConnectorParams.java
+++ b/src/main/java/azkaban/executor/ConnectorParams.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -17,72 +17,75 @@
package azkaban.executor;
public interface ConnectorParams {
- public static final String ACTION_PARAM = "action";
- public static final String EXECID_PARAM = "execid";
- public static final String SHAREDTOKEN_PARAM = "token";
- public static final String USER_PARAM = "user";
-
- public static final String UPDATE_ACTION = "update";
- public static final String STATUS_ACTION = "status";
- public static final String EXECUTE_ACTION = "execute";
- public static final String CANCEL_ACTION = "cancel";
- public static final String PAUSE_ACTION = "pause";
- public static final String RESUME_ACTION = "resume";
- public static final String PING_ACTION = "ping";
- public static final String LOG_ACTION = "log";
- public static final String ATTACHMENTS_ACTION = "attachments";
- public static final String METADATA_ACTION = "metadata";
- public static final String RELOAD_JOBTYPE_PLUGINS_ACTION = "reloadJobTypePlugins";
-
- public static final String MODIFY_EXECUTION_ACTION = "modifyExecution";
- public static final String MODIFY_EXECUTION_ACTION_TYPE = "modifyType";
- public static final String MODIFY_RETRY_FAILURES = "retryFailures";
- public static final String MODIFY_RETRY_JOBS = "retryJobs";
- public static final String MODIFY_CANCEL_JOBS = "cancelJobs";
- public static final String MODIFY_DISABLE_JOBS = "skipJobs";
- public static final String MODIFY_ENABLE_JOBS = "enableJobs";
- public static final String MODIFY_PAUSE_JOBS = "pauseJobs";
- public static final String MODIFY_RESUME_JOBS = "resumeJobs";
- public static final String MODIFY_JOBS_LIST = "jobIds";
-
- public static final String START_PARAM = "start";
- public static final String END_PARAM = "end";
- public static final String STATUS_PARAM = "status";
- public static final String NODES_PARAM = "nodes";
- public static final String EXECPATH_PARAM = "execpath";
-
- public static final String RESPONSE_NOTFOUND = "notfound";
- public static final String RESPONSE_ERROR = "error";
- public static final String RESPONSE_SUCCESS = "success";
- public static final String RESPONSE_ALIVE = "alive";
- public static final String RESPONSE_UPDATETIME = "lasttime";
- public static final String RESPONSE_UPDATED_FLOWS = "updated";
-
- public static final int NODE_NAME_INDEX = 0;
- public static final int NODE_STATUS_INDEX = 1;
- public static final int NODE_START_INDEX = 2;
- public static final int NODE_END_INDEX = 3;
+ public static final String ACTION_PARAM = "action";
+ public static final String EXECID_PARAM = "execid";
+ public static final String SHAREDTOKEN_PARAM = "token";
+ public static final String USER_PARAM = "user";
- public static final String UPDATE_TIME_LIST_PARAM = "updatetime";
- public static final String EXEC_ID_LIST_PARAM = "executionId";
-
- public static final String FORCED_FAILED_MARKER = ".failed";
-
- public static final String UPDATE_MAP_EXEC_ID = "executionId";
- public static final String UPDATE_MAP_JOBID = "jobId";
- public static final String UPDATE_MAP_UPDATE_TIME = "updateTime";
- public static final String UPDATE_MAP_STATUS = "status";
- public static final String UPDATE_MAP_START_TIME = "startTime";
- public static final String UPDATE_MAP_END_TIME = "endTime";
- public static final String UPDATE_MAP_NODES = "nodes";
-
- public static final String JMX_GET_MBEANS = "getMBeans";
- public static final String JMX_GET_MBEAN_INFO = "getMBeanInfo";
- public static final String JMX_GET_MBEAN_ATTRIBUTE = "getAttribute";
- public static final String JMX_GET_ALL_MBEAN_ATTRIBUTES = "getAllMBeanAttributes";
- public static final String JMX_ATTRIBUTE = "attribute";
- public static final String JMX_MBEAN = "mBean";
-
- public static final String JMX_GET_ALL_EXECUTOR_ATTRIBUTES = "getAllExecutorAttributes";
- public static final String JMX_HOSTPORT = "hostPort";
+ public static final String UPDATE_ACTION = "update";
+ public static final String STATUS_ACTION = "status";
+ public static final String EXECUTE_ACTION = "execute";
+ public static final String CANCEL_ACTION = "cancel";
+ public static final String PAUSE_ACTION = "pause";
+ public static final String RESUME_ACTION = "resume";
+ public static final String PING_ACTION = "ping";
+ public static final String LOG_ACTION = "log";
+ public static final String ATTACHMENTS_ACTION = "attachments";
+ public static final String METADATA_ACTION = "metadata";
+ public static final String RELOAD_JOBTYPE_PLUGINS_ACTION =
+ "reloadJobTypePlugins";
+
+ public static final String MODIFY_EXECUTION_ACTION = "modifyExecution";
+ public static final String MODIFY_EXECUTION_ACTION_TYPE = "modifyType";
+ public static final String MODIFY_RETRY_FAILURES = "retryFailures";
+ public static final String MODIFY_RETRY_JOBS = "retryJobs";
+ public static final String MODIFY_CANCEL_JOBS = "cancelJobs";
+ public static final String MODIFY_DISABLE_JOBS = "skipJobs";
+ public static final String MODIFY_ENABLE_JOBS = "enableJobs";
+ public static final String MODIFY_PAUSE_JOBS = "pauseJobs";
+ public static final String MODIFY_RESUME_JOBS = "resumeJobs";
+ public static final String MODIFY_JOBS_LIST = "jobIds";
+
+ public static final String START_PARAM = "start";
+ public static final String END_PARAM = "end";
+ public static final String STATUS_PARAM = "status";
+ public static final String NODES_PARAM = "nodes";
+ public static final String EXECPATH_PARAM = "execpath";
+
+ public static final String RESPONSE_NOTFOUND = "notfound";
+ public static final String RESPONSE_ERROR = "error";
+ public static final String RESPONSE_SUCCESS = "success";
+ public static final String RESPONSE_ALIVE = "alive";
+ public static final String RESPONSE_UPDATETIME = "lasttime";
+ public static final String RESPONSE_UPDATED_FLOWS = "updated";
+
+ public static final int NODE_NAME_INDEX = 0;
+ public static final int NODE_STATUS_INDEX = 1;
+ public static final int NODE_START_INDEX = 2;
+ public static final int NODE_END_INDEX = 3;
+
+ public static final String UPDATE_TIME_LIST_PARAM = "updatetime";
+ public static final String EXEC_ID_LIST_PARAM = "executionId";
+
+ public static final String FORCED_FAILED_MARKER = ".failed";
+
+ public static final String UPDATE_MAP_EXEC_ID = "executionId";
+ public static final String UPDATE_MAP_JOBID = "jobId";
+ public static final String UPDATE_MAP_UPDATE_TIME = "updateTime";
+ public static final String UPDATE_MAP_STATUS = "status";
+ public static final String UPDATE_MAP_START_TIME = "startTime";
+ public static final String UPDATE_MAP_END_TIME = "endTime";
+ public static final String UPDATE_MAP_NODES = "nodes";
+
+ public static final String JMX_GET_MBEANS = "getMBeans";
+ public static final String JMX_GET_MBEAN_INFO = "getMBeanInfo";
+ public static final String JMX_GET_MBEAN_ATTRIBUTE = "getAttribute";
+ public static final String JMX_GET_ALL_MBEAN_ATTRIBUTES =
+ "getAllMBeanAttributes";
+ public static final String JMX_ATTRIBUTE = "attribute";
+ public static final String JMX_MBEAN = "mBean";
+
+ public static final String JMX_GET_ALL_EXECUTOR_ATTRIBUTES =
+ "getAllExecutorAttributes";
+ public static final String JMX_HOSTPORT = "hostPort";
}
src/main/java/azkaban/executor/ExecutableFlow.java 416(+209 -207)
diff --git a/src/main/java/azkaban/executor/ExecutableFlow.java b/src/main/java/azkaban/executor/ExecutableFlow.java
index bc33c0e..978e13d 100644
--- a/src/main/java/azkaban/executor/ExecutableFlow.java
+++ b/src/main/java/azkaban/executor/ExecutableFlow.java
@@ -1,12 +1,12 @@
/*
* Copyright 2013 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -28,207 +28,209 @@ import azkaban.project.Project;
import azkaban.utils.TypedMapWrapper;
public class ExecutableFlow extends ExecutableFlowBase {
- public static final String EXECUTIONID_PARAM = "executionId";
- public static final String EXECUTIONPATH_PARAM ="executionPath";
- public static final String EXECUTIONOPTIONS_PARAM ="executionOptions";
- public static final String PROJECTID_PARAM ="projectId";
- public static final String SCHEDULEID_PARAM ="scheduleId";
- public static final String SUBMITUSER_PARAM = "submitUser";
- public static final String SUBMITTIME_PARAM = "submitTime";
- public static final String VERSION_PARAM = "version";
- public static final String PROXYUSERS_PARAM = "proxyUsers";
-
- private int executionId = -1;
- private int scheduleId = -1;
- private int projectId;
- private int version;
- private long submitTime = -1;
- private String submitUser;
- private String executionPath;
-
- private HashSet<String> proxyUsers = new HashSet<String>();
- private ExecutionOptions executionOptions;
-
- public ExecutableFlow(Project project, Flow flow) {
- this.projectId = project.getId();
- this.version = project.getVersion();
- this.scheduleId = -1;
-
- this.setFlow(project, flow);
- }
-
- public ExecutableFlow() {
- }
-
- @Override
- public String getId() {
- return getFlowId();
- }
-
- @Override
- public ExecutableFlow getExecutableFlow() {
- return this;
- }
-
- public void addAllProxyUsers(Collection<String> proxyUsers) {
- this.proxyUsers.addAll(proxyUsers);
- }
-
- public Set<String> getProxyUsers() {
- return new HashSet<String>(this.proxyUsers);
- }
-
- public void setExecutionOptions(ExecutionOptions options) {
- executionOptions = options;
- }
-
- public ExecutionOptions getExecutionOptions() {
- return executionOptions;
- }
-
- protected void setFlow(Project project, Flow flow) {
- super.setFlow(project, flow);
- executionOptions = new ExecutionOptions();
- executionOptions.setMailCreator(flow.getMailCreator());
-
- if (flow.getSuccessEmails() != null) {
- executionOptions.setSuccessEmails(flow.getSuccessEmails());
- }
- if (flow.getFailureEmails() != null) {
- executionOptions.setFailureEmails(flow.getFailureEmails());
- }
- }
-
- public int getExecutionId() {
- return executionId;
- }
-
- public void setExecutionId(int executionId) {
- this.executionId = executionId;
- }
-
- @Override
- public int getProjectId() {
- return projectId;
- }
-
- public void setProjectId(int projectId) {
- this.projectId = projectId;
- }
-
- public int getScheduleId() {
- return scheduleId;
- }
-
- public void setScheduleId(int scheduleId) {
- this.scheduleId = scheduleId;
- }
-
- public String getExecutionPath() {
- return executionPath;
- }
-
- public void setExecutionPath(String executionPath) {
- this.executionPath = executionPath;
- }
-
- public String getSubmitUser() {
- return submitUser;
- }
-
- public void setSubmitUser(String submitUser) {
- this.submitUser = submitUser;
- }
-
- @Override
- public int getVersion() {
- return version;
- }
-
- public void setVersion(int version) {
- this.version = version;
- }
-
- public long getSubmitTime() {
- return submitTime;
- }
-
- public void setSubmitTime(long submitTime) {
- this.submitTime = submitTime;
- }
-
- public Map<String, Object> toObject() {
- HashMap<String, Object> flowObj = new HashMap<String, Object>();
- fillMapFromExecutable(flowObj);
-
- flowObj.put(EXECUTIONID_PARAM, executionId);
- flowObj.put(EXECUTIONPATH_PARAM, executionPath);
- flowObj.put(PROJECTID_PARAM, projectId);
-
- if(scheduleId >= 0) {
- flowObj.put(SCHEDULEID_PARAM, scheduleId);
- }
-
- flowObj.put(SUBMITUSER_PARAM, submitUser);
- flowObj.put(VERSION_PARAM, version);
-
- flowObj.put(EXECUTIONOPTIONS_PARAM, this.executionOptions.toObject());
- flowObj.put(VERSION_PARAM, version);
-
- ArrayList<String> proxyUserList = new ArrayList<String>(proxyUsers);
- flowObj.put(PROXYUSERS_PARAM, proxyUserList);
-
- flowObj.put(SUBMITTIME_PARAM, submitTime);
-
- return flowObj;
- }
-
- @SuppressWarnings("unchecked")
- public static ExecutableFlow createExecutableFlowFromObject(Object obj) {
- ExecutableFlow exFlow = new ExecutableFlow();
- HashMap<String, Object> flowObj = (HashMap<String,Object>)obj;
- exFlow.fillExecutableFromMapObject(flowObj);
-
- return exFlow;
- }
-
- @Override
- public void fillExecutableFromMapObject(TypedMapWrapper<String, Object> flowObj) {
- super.fillExecutableFromMapObject(flowObj);
-
- this.executionId = flowObj.getInt(EXECUTIONID_PARAM);
- this.executionPath = flowObj.getString(EXECUTIONPATH_PARAM);
-
- this.projectId = flowObj.getInt(PROJECTID_PARAM);
- this.scheduleId = flowObj.getInt(SCHEDULEID_PARAM);
- this.submitUser = flowObj.getString(SUBMITUSER_PARAM);
- this.version = flowObj.getInt(VERSION_PARAM);
- this.submitTime = flowObj.getLong(SUBMITTIME_PARAM);
-
- if (flowObj.containsKey(EXECUTIONOPTIONS_PARAM)) {
- this.executionOptions = ExecutionOptions.createFromObject(flowObj.getObject(EXECUTIONOPTIONS_PARAM));
- }
- else {
- // for backwards compatibility should remove in a few versions.
- this.executionOptions = ExecutionOptions.createFromObject(flowObj);
- }
-
- if(flowObj.containsKey(PROXYUSERS_PARAM)) {
- List<String> proxyUserList = flowObj.<String>getList(PROXYUSERS_PARAM);
- this.addAllProxyUsers(proxyUserList);
-
- }
- }
-
- public Map<String, Object> toUpdateObject(long lastUpdateTime) {
- Map<String, Object> updateData = super.toUpdateObject(lastUpdateTime);
- updateData.put(EXECUTIONID_PARAM, this.executionId);
- return updateData;
- }
-
- public void resetForRetry() {
- super.resetForRetry();
- this.setStatus(Status.RUNNING);
- }
-
-}
\ No newline at end of file
+ public static final String EXECUTIONID_PARAM = "executionId";
+ public static final String EXECUTIONPATH_PARAM = "executionPath";
+ public static final String EXECUTIONOPTIONS_PARAM = "executionOptions";
+ public static final String PROJECTID_PARAM = "projectId";
+ public static final String SCHEDULEID_PARAM = "scheduleId";
+ public static final String SUBMITUSER_PARAM = "submitUser";
+ public static final String SUBMITTIME_PARAM = "submitTime";
+ public static final String VERSION_PARAM = "version";
+ public static final String PROXYUSERS_PARAM = "proxyUsers";
+
+ private int executionId = -1;
+ private int scheduleId = -1;
+ private int projectId;
+ private int version;
+ private long submitTime = -1;
+ private String submitUser;
+ private String executionPath;
+
+ private HashSet<String> proxyUsers = new HashSet<String>();
+ private ExecutionOptions executionOptions;
+
+ public ExecutableFlow(Project project, Flow flow) {
+ this.projectId = project.getId();
+ this.version = project.getVersion();
+ this.scheduleId = -1;
+
+ this.setFlow(project, flow);
+ }
+
+ public ExecutableFlow() {
+ }
+
+ @Override
+ public String getId() {
+ return getFlowId();
+ }
+
+ @Override
+ public ExecutableFlow getExecutableFlow() {
+ return this;
+ }
+
+ public void addAllProxyUsers(Collection<String> proxyUsers) {
+ this.proxyUsers.addAll(proxyUsers);
+ }
+
+ public Set<String> getProxyUsers() {
+ return new HashSet<String>(this.proxyUsers);
+ }
+
+ public void setExecutionOptions(ExecutionOptions options) {
+ executionOptions = options;
+ }
+
+ public ExecutionOptions getExecutionOptions() {
+ return executionOptions;
+ }
+
+ protected void setFlow(Project project, Flow flow) {
+ super.setFlow(project, flow);
+ executionOptions = new ExecutionOptions();
+ executionOptions.setMailCreator(flow.getMailCreator());
+
+ if (flow.getSuccessEmails() != null) {
+ executionOptions.setSuccessEmails(flow.getSuccessEmails());
+ }
+ if (flow.getFailureEmails() != null) {
+ executionOptions.setFailureEmails(flow.getFailureEmails());
+ }
+ }
+
+ public int getExecutionId() {
+ return executionId;
+ }
+
+ public void setExecutionId(int executionId) {
+ this.executionId = executionId;
+ }
+
+ @Override
+ public int getProjectId() {
+ return projectId;
+ }
+
+ public void setProjectId(int projectId) {
+ this.projectId = projectId;
+ }
+
+ public int getScheduleId() {
+ return scheduleId;
+ }
+
+ public void setScheduleId(int scheduleId) {
+ this.scheduleId = scheduleId;
+ }
+
+ public String getExecutionPath() {
+ return executionPath;
+ }
+
+ public void setExecutionPath(String executionPath) {
+ this.executionPath = executionPath;
+ }
+
+ public String getSubmitUser() {
+ return submitUser;
+ }
+
+ public void setSubmitUser(String submitUser) {
+ this.submitUser = submitUser;
+ }
+
+ @Override
+ public int getVersion() {
+ return version;
+ }
+
+ public void setVersion(int version) {
+ this.version = version;
+ }
+
+ public long getSubmitTime() {
+ return submitTime;
+ }
+
+ public void setSubmitTime(long submitTime) {
+ this.submitTime = submitTime;
+ }
+
+ public Map<String, Object> toObject() {
+ HashMap<String, Object> flowObj = new HashMap<String, Object>();
+ fillMapFromExecutable(flowObj);
+
+ flowObj.put(EXECUTIONID_PARAM, executionId);
+ flowObj.put(EXECUTIONPATH_PARAM, executionPath);
+ flowObj.put(PROJECTID_PARAM, projectId);
+
+ if (scheduleId >= 0) {
+ flowObj.put(SCHEDULEID_PARAM, scheduleId);
+ }
+
+ flowObj.put(SUBMITUSER_PARAM, submitUser);
+ flowObj.put(VERSION_PARAM, version);
+
+ flowObj.put(EXECUTIONOPTIONS_PARAM, this.executionOptions.toObject());
+ flowObj.put(VERSION_PARAM, version);
+
+ ArrayList<String> proxyUserList = new ArrayList<String>(proxyUsers);
+ flowObj.put(PROXYUSERS_PARAM, proxyUserList);
+
+ flowObj.put(SUBMITTIME_PARAM, submitTime);
+
+ return flowObj;
+ }
+
+ @SuppressWarnings("unchecked")
+ public static ExecutableFlow createExecutableFlowFromObject(Object obj) {
+ ExecutableFlow exFlow = new ExecutableFlow();
+ HashMap<String, Object> flowObj = (HashMap<String, Object>) obj;
+ exFlow.fillExecutableFromMapObject(flowObj);
+
+ return exFlow;
+ }
+
+ @Override
+ public void fillExecutableFromMapObject(
+ TypedMapWrapper<String, Object> flowObj) {
+ super.fillExecutableFromMapObject(flowObj);
+
+ this.executionId = flowObj.getInt(EXECUTIONID_PARAM);
+ this.executionPath = flowObj.getString(EXECUTIONPATH_PARAM);
+
+ this.projectId = flowObj.getInt(PROJECTID_PARAM);
+ this.scheduleId = flowObj.getInt(SCHEDULEID_PARAM);
+ this.submitUser = flowObj.getString(SUBMITUSER_PARAM);
+ this.version = flowObj.getInt(VERSION_PARAM);
+ this.submitTime = flowObj.getLong(SUBMITTIME_PARAM);
+
+ if (flowObj.containsKey(EXECUTIONOPTIONS_PARAM)) {
+ this.executionOptions =
+ ExecutionOptions.createFromObject(flowObj
+ .getObject(EXECUTIONOPTIONS_PARAM));
+ } else {
+ // for backwards compatibility should remove in a few versions.
+ this.executionOptions = ExecutionOptions.createFromObject(flowObj);
+ }
+
+ if (flowObj.containsKey(PROXYUSERS_PARAM)) {
+ List<String> proxyUserList = flowObj.<String> getList(PROXYUSERS_PARAM);
+ this.addAllProxyUsers(proxyUserList);
+
+ }
+ }
+
+ public Map<String, Object> toUpdateObject(long lastUpdateTime) {
+ Map<String, Object> updateData = super.toUpdateObject(lastUpdateTime);
+ updateData.put(EXECUTIONID_PARAM, this.executionId);
+ return updateData;
+ }
+
+ public void resetForRetry() {
+ super.resetForRetry();
+ this.setStatus(Status.RUNNING);
+ }
+
+}
src/main/java/azkaban/executor/ExecutableFlowBase.java 802(+408 -394)
diff --git a/src/main/java/azkaban/executor/ExecutableFlowBase.java b/src/main/java/azkaban/executor/ExecutableFlowBase.java
index a760204..72aaa6a 100644
--- a/src/main/java/azkaban/executor/ExecutableFlowBase.java
+++ b/src/main/java/azkaban/executor/ExecutableFlowBase.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn, Inc
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -30,394 +30,408 @@ import azkaban.project.Project;
import azkaban.utils.TypedMapWrapper;
public class ExecutableFlowBase extends ExecutableNode {
- public static final String FLOW_ID_PARAM = "flowId";
- public static final String NODES_PARAM = "nodes";
- public static final String PROPERTIES_PARAM = "properties";
- public static final String SOURCE_PARAM = "source";
- public static final String INHERITED_PARAM = "inherited";
-
- private HashMap<String, ExecutableNode> executableNodes = new HashMap<String, ExecutableNode>();
- private ArrayList<String> startNodes;
- private ArrayList<String> endNodes;
-
- private HashMap<String, FlowProps> flowProps = new HashMap<String, FlowProps>();
- private String flowId;
-
- public ExecutableFlowBase(Project project, Node node, Flow flow, ExecutableFlowBase parent) {
- super(node, parent);
-
- setFlow(project, flow);
- }
-
- public ExecutableFlowBase() {
- }
-
- public int getExecutionId() {
- if (this.getParentFlow() != null) {
- return this.getParentFlow().getExecutionId();
- }
-
- return -1;
- }
-
- public int getProjectId() {
- if (this.getParentFlow() != null) {
- return this.getParentFlow().getProjectId();
- }
-
- return -1;
- }
-
- public int getVersion() {
- if (this.getParentFlow() != null) {
- return this.getParentFlow().getVersion();
- }
-
- return -1;
- }
-
- public Collection<FlowProps> getFlowProps() {
- return flowProps.values();
- }
-
- public String getFlowId() {
- return flowId;
- }
-
- protected void setFlow(Project project, Flow flow) {
- this.flowId = flow.getId();
- flowProps.putAll(flow.getAllFlowProps());
-
- for (Node node: flow.getNodes()) {
- String id = node.getId();
- if (node.getType().equals(SpecialJobTypes.EMBEDDED_FLOW_TYPE)) {
- String embeddedFlowId = node.getEmbeddedFlowId();
- Flow subFlow = project.getFlow(embeddedFlowId);
-
- ExecutableFlowBase embeddedFlow = new ExecutableFlowBase(project, node, subFlow, this);
- executableNodes.put(id, embeddedFlow);
- }
- else {
- ExecutableNode exNode = new ExecutableNode(node, this);
- executableNodes.put(id, exNode);
- }
- }
-
- for (Edge edge: flow.getEdges()) {
- ExecutableNode sourceNode = executableNodes.get(edge.getSourceId());
- ExecutableNode targetNode = executableNodes.get(edge.getTargetId());
-
- if (sourceNode == null) {
- System.out.println("Source node " + edge.getSourceId() + " doesn't exist");
- }
- sourceNode.addOutNode(edge.getTargetId());
- targetNode.addInNode(edge.getSourceId());
- }
- }
-
- public List<ExecutableNode> getExecutableNodes() {
- return new ArrayList<ExecutableNode>(executableNodes.values());
- }
-
- public ExecutableNode getExecutableNode(String id) {
- return executableNodes.get(id);
- }
-
- public ExecutableNode getExecutableNodePath(String ids) {
- String[] split = ids.split(":");
- return getExecutableNodePath(split);
- }
-
- public ExecutableNode getExecutableNodePath(String ... ids) {
- return getExecutableNodePath(this, ids, 0);
- }
-
- private ExecutableNode getExecutableNodePath(ExecutableFlowBase flow, String[] ids, int currentIdIdx) {
- ExecutableNode node = flow.getExecutableNode(ids[currentIdIdx]);
- currentIdIdx++;
-
- if (node == null) {
- return null;
- }
-
- if (ids.length == currentIdIdx) {
- return node;
- }
- else if (node instanceof ExecutableFlowBase) {
- return getExecutableNodePath((ExecutableFlowBase)node, ids, currentIdIdx);
- }
- else {
- return null;
- }
-
- }
-
- public List<String> getStartNodes() {
- if (startNodes == null) {
- startNodes = new ArrayList<String>();
- for (ExecutableNode node: executableNodes.values()) {
- if (node.getInNodes().isEmpty()) {
- startNodes.add(node.getId());
- }
- }
- }
-
- return startNodes;
- }
-
- public List<String> getEndNodes() {
- if (endNodes == null) {
- endNodes = new ArrayList<String>();
- for (ExecutableNode node: executableNodes.values()) {
- if (node.getOutNodes().isEmpty()) {
- endNodes.add(node.getId());
- }
- }
- }
-
- return endNodes;
- }
-
- public Map<String,Object> toObject() {
- Map<String,Object> mapObj = new HashMap<String,Object>();
- fillMapFromExecutable(mapObj);
-
- return mapObj;
- }
-
- protected void fillMapFromExecutable(Map<String,Object> flowObjMap) {
- super.fillMapFromExecutable(flowObjMap);
-
- flowObjMap.put(FLOW_ID_PARAM, flowId);
-
- ArrayList<Object> nodes = new ArrayList<Object>();
- for (ExecutableNode node: executableNodes.values()) {
- nodes.add(node.toObject());
- }
- flowObjMap.put(NODES_PARAM, nodes);
-
- // Flow properties
- ArrayList<Object> props = new ArrayList<Object>();
- for (FlowProps fprop: flowProps.values()) {
- HashMap<String, Object> propObj = new HashMap<String, Object>();
- String source = fprop.getSource();
- String inheritedSource = fprop.getInheritedSource();
-
- propObj.put(SOURCE_PARAM, source);
- if (inheritedSource != null) {
- propObj.put(INHERITED_PARAM, inheritedSource);
- }
- props.add(propObj);
- }
- flowObjMap.put(PROPERTIES_PARAM, props);
- }
-
- @Override
- public void fillExecutableFromMapObject(TypedMapWrapper<String,Object> flowObjMap) {
- super.fillExecutableFromMapObject(flowObjMap);
-
- this.flowId = flowObjMap.getString(FLOW_ID_PARAM);
- List<Object> nodes = flowObjMap.<Object>getList(NODES_PARAM);
-
- if (nodes != null) {
- for (Object nodeObj: nodes) {
- @SuppressWarnings("unchecked")
- Map<String,Object> nodeObjMap = (Map<String,Object>)nodeObj;
- TypedMapWrapper<String,Object> wrapper = new TypedMapWrapper<String,Object>(nodeObjMap);
-
- String type = wrapper.getString(TYPE_PARAM);
- if (type != null && type.equals(SpecialJobTypes.EMBEDDED_FLOW_TYPE)) {
- ExecutableFlowBase exFlow = new ExecutableFlowBase();
- exFlow.fillExecutableFromMapObject(wrapper);
- exFlow.setParentFlow(this);
-
- executableNodes.put(exFlow.getId(), exFlow);
- }
- else {
- ExecutableNode exJob = new ExecutableNode();
- exJob.fillExecutableFromMapObject(nodeObjMap);
- exJob.setParentFlow(this);
-
- executableNodes.put(exJob.getId(), exJob);
- }
- }
- }
-
- List<Object> properties = flowObjMap.<Object>getList(PROPERTIES_PARAM);
- for (Object propNode : properties) {
- @SuppressWarnings("unchecked")
- HashMap<String, Object> fprop = (HashMap<String, Object>)propNode;
- String source = (String)fprop.get("source");
- String inheritedSource = (String)fprop.get("inherited");
-
- FlowProps flowProps = new FlowProps(inheritedSource, source);
- this.flowProps.put(source, flowProps);
- }
- }
-
- public Map<String, Object> toUpdateObject(long lastUpdateTime) {
- Map<String, Object> updateData = super.toUpdateObject();
-
- List<Map<String,Object>> updatedNodes = new ArrayList<Map<String,Object>>();
- for (ExecutableNode node: executableNodes.values()) {
- if (node instanceof ExecutableFlowBase) {
- Map<String, Object> updatedNodeMap = ((ExecutableFlowBase)node).toUpdateObject(lastUpdateTime);
- // We add only flows to the list which either have a good update time, or has updated descendants.
- if (node.getUpdateTime() > lastUpdateTime || updatedNodeMap.containsKey(NODES_PARAM)) {
- updatedNodes.add(updatedNodeMap);
- }
- }
- else {
- if (node.getUpdateTime() > lastUpdateTime) {
- Map<String, Object> updatedNodeMap = node.toUpdateObject();
- updatedNodes.add(updatedNodeMap);
- }
- }
- }
-
- // if there are no updated nodes, we just won't add it to the list. This is good
- // since if this is a nested flow, the parent is given the option to include or
- // discard these subflows.
- if (!updatedNodes.isEmpty()) {
- updateData.put(NODES_PARAM, updatedNodes);
- }
- return updateData;
- }
-
- public void applyUpdateObject(TypedMapWrapper<String, Object> updateData, List<ExecutableNode> updatedNodes) {
- super.applyUpdateObject(updateData);
-
- if (updatedNodes != null) {
- updatedNodes.add(this);
- }
-
- List<Map<String,Object>> nodes = (List<Map<String,Object>>)updateData.<Map<String,Object>>getList(NODES_PARAM);
- if (nodes != null) {
- for (Map<String,Object> node: nodes) {
- TypedMapWrapper<String,Object> nodeWrapper = new TypedMapWrapper<String,Object>(node);
- String id = nodeWrapper.getString(ID_PARAM);
- if (id == null) {
- // Legacy case
- id = nodeWrapper.getString("jobId");
- }
-
- ExecutableNode exNode = executableNodes.get(id);
- if (updatedNodes != null) {
- updatedNodes.add(exNode);
- }
-
- if (exNode instanceof ExecutableFlowBase) {
- ((ExecutableFlowBase)exNode).applyUpdateObject(nodeWrapper, updatedNodes);
- }
- else {
- exNode.applyUpdateObject(nodeWrapper);
- }
- }
- }
- }
-
- public void applyUpdateObject(Map<String, Object> updateData, List<ExecutableNode> updatedNodes) {
- TypedMapWrapper<String, Object> typedMapWrapper = new TypedMapWrapper<String,Object>(updateData);
- applyUpdateObject(typedMapWrapper, updatedNodes);
- }
-
- @Override
- public void applyUpdateObject(Map<String, Object> updateData) {
- TypedMapWrapper<String, Object> typedMapWrapper = new TypedMapWrapper<String,Object>(updateData);
- applyUpdateObject(typedMapWrapper, null);
- }
-
- public void reEnableDependents(ExecutableNode ... nodes) {
- for(ExecutableNode node: nodes) {
- for(String dependent: node.getOutNodes()) {
- ExecutableNode dependentNode = getExecutableNode(dependent);
-
- if (dependentNode.getStatus() == Status.KILLED) {
- dependentNode.setStatus(Status.READY);
- dependentNode.setUpdateTime(System.currentTimeMillis());
- reEnableDependents(dependentNode);
-
- if (dependentNode instanceof ExecutableFlowBase) {
-
- ((ExecutableFlowBase)dependentNode).reEnableDependents();
- }
- }
- else if (dependentNode.getStatus() == Status.SKIPPED) {
- dependentNode.setStatus(Status.DISABLED);
- dependentNode.setUpdateTime(System.currentTimeMillis());
- reEnableDependents(dependentNode);
- }
- }
- }
- }
-
- /**
- * Only returns true if the status of all finished nodes is true.
- * @return
- */
- public boolean isFlowFinished() {
- for (String end: getEndNodes()) {
- ExecutableNode node = getExecutableNode(end);
- if (!Status.isStatusFinished(node.getStatus()) ) {
- return false;
- }
- }
-
- return true;
- }
-
- /**
- * Finds all jobs which are ready to run. This occurs when all of its
- * dependency nodes are finished running.
- *
- * It will also return any subflow that has been completed such that the
- * FlowRunner can properly handle them.
- *
- * @param flow
- * @return
- */
- public List<ExecutableNode> findNextJobsToRun() {
- ArrayList<ExecutableNode> jobsToRun = new ArrayList<ExecutableNode>();
-
- if (isFlowFinished() && !Status.isStatusFinished(getStatus())) {
- jobsToRun.add(this);
- }
- else {
- nodeloop:
- for (ExecutableNode node: executableNodes.values()) {
- if(Status.isStatusFinished(node.getStatus())) {
- continue;
- }
-
- if ((node instanceof ExecutableFlowBase) && Status.isStatusRunning(node.getStatus())) {
- // If the flow is still running, we traverse into the flow
- jobsToRun.addAll(((ExecutableFlowBase)node).findNextJobsToRun());
- }
- else if (Status.isStatusRunning(node.getStatus())) {
- continue;
- }
- else {
- for (String dependency: node.getInNodes()) {
- // We find that the outer-loop is unfinished.
- if (!Status.isStatusFinished(getExecutableNode(dependency).getStatus())) {
- continue nodeloop;
- }
- }
-
- jobsToRun.add(node);
- }
- }
- }
-
- return jobsToRun;
- }
-
- public String getFlowPath() {
- if (this.getParentFlow() == null) {
- return this.getFlowId();
- }
- else {
- return this.getParentFlow().getFlowPath() + "," + this.getId() + ":"+ this.getFlowId();
- }
- }
-}
\ No newline at end of file
+ public static final String FLOW_ID_PARAM = "flowId";
+ public static final String NODES_PARAM = "nodes";
+ public static final String PROPERTIES_PARAM = "properties";
+ public static final String SOURCE_PARAM = "source";
+ public static final String INHERITED_PARAM = "inherited";
+
+ private HashMap<String, ExecutableNode> executableNodes =
+ new HashMap<String, ExecutableNode>();
+ private ArrayList<String> startNodes;
+ private ArrayList<String> endNodes;
+
+ private HashMap<String, FlowProps> flowProps =
+ new HashMap<String, FlowProps>();
+ private String flowId;
+
+ public ExecutableFlowBase(Project project, Node node, Flow flow,
+ ExecutableFlowBase parent) {
+ super(node, parent);
+
+ setFlow(project, flow);
+ }
+
+ public ExecutableFlowBase() {
+ }
+
+ public int getExecutionId() {
+ if (this.getParentFlow() != null) {
+ return this.getParentFlow().getExecutionId();
+ }
+
+ return -1;
+ }
+
+ public int getProjectId() {
+ if (this.getParentFlow() != null) {
+ return this.getParentFlow().getProjectId();
+ }
+
+ return -1;
+ }
+
+ public int getVersion() {
+ if (this.getParentFlow() != null) {
+ return this.getParentFlow().getVersion();
+ }
+
+ return -1;
+ }
+
+ public Collection<FlowProps> getFlowProps() {
+ return flowProps.values();
+ }
+
+ public String getFlowId() {
+ return flowId;
+ }
+
+ protected void setFlow(Project project, Flow flow) {
+ this.flowId = flow.getId();
+ flowProps.putAll(flow.getAllFlowProps());
+
+ for (Node node : flow.getNodes()) {
+ String id = node.getId();
+ if (node.getType().equals(SpecialJobTypes.EMBEDDED_FLOW_TYPE)) {
+ String embeddedFlowId = node.getEmbeddedFlowId();
+ Flow subFlow = project.getFlow(embeddedFlowId);
+
+ ExecutableFlowBase embeddedFlow =
+ new ExecutableFlowBase(project, node, subFlow, this);
+ executableNodes.put(id, embeddedFlow);
+ } else {
+ ExecutableNode exNode = new ExecutableNode(node, this);
+ executableNodes.put(id, exNode);
+ }
+ }
+
+ for (Edge edge : flow.getEdges()) {
+ ExecutableNode sourceNode = executableNodes.get(edge.getSourceId());
+ ExecutableNode targetNode = executableNodes.get(edge.getTargetId());
+
+ if (sourceNode == null) {
+ System.out.println("Source node " + edge.getSourceId()
+ + " doesn't exist");
+ }
+ sourceNode.addOutNode(edge.getTargetId());
+ targetNode.addInNode(edge.getSourceId());
+ }
+ }
+
+ public List<ExecutableNode> getExecutableNodes() {
+ return new ArrayList<ExecutableNode>(executableNodes.values());
+ }
+
+ public ExecutableNode getExecutableNode(String id) {
+ return executableNodes.get(id);
+ }
+
+ public ExecutableNode getExecutableNodePath(String ids) {
+ String[] split = ids.split(":");
+ return getExecutableNodePath(split);
+ }
+
+ public ExecutableNode getExecutableNodePath(String... ids) {
+ return getExecutableNodePath(this, ids, 0);
+ }
+
+ private ExecutableNode getExecutableNodePath(ExecutableFlowBase flow,
+ String[] ids, int currentIdIdx) {
+ ExecutableNode node = flow.getExecutableNode(ids[currentIdIdx]);
+ currentIdIdx++;
+
+ if (node == null) {
+ return null;
+ }
+
+ if (ids.length == currentIdIdx) {
+ return node;
+ } else if (node instanceof ExecutableFlowBase) {
+ return getExecutableNodePath((ExecutableFlowBase) node, ids, currentIdIdx);
+ } else {
+ return null;
+ }
+
+ }
+
+ public List<String> getStartNodes() {
+ if (startNodes == null) {
+ startNodes = new ArrayList<String>();
+ for (ExecutableNode node : executableNodes.values()) {
+ if (node.getInNodes().isEmpty()) {
+ startNodes.add(node.getId());
+ }
+ }
+ }
+
+ return startNodes;
+ }
+
+ public List<String> getEndNodes() {
+ if (endNodes == null) {
+ endNodes = new ArrayList<String>();
+ for (ExecutableNode node : executableNodes.values()) {
+ if (node.getOutNodes().isEmpty()) {
+ endNodes.add(node.getId());
+ }
+ }
+ }
+
+ return endNodes;
+ }
+
+ public Map<String, Object> toObject() {
+ Map<String, Object> mapObj = new HashMap<String, Object>();
+ fillMapFromExecutable(mapObj);
+
+ return mapObj;
+ }
+
+ protected void fillMapFromExecutable(Map<String, Object> flowObjMap) {
+ super.fillMapFromExecutable(flowObjMap);
+
+ flowObjMap.put(FLOW_ID_PARAM, flowId);
+
+ ArrayList<Object> nodes = new ArrayList<Object>();
+ for (ExecutableNode node : executableNodes.values()) {
+ nodes.add(node.toObject());
+ }
+ flowObjMap.put(NODES_PARAM, nodes);
+
+ // Flow properties
+ ArrayList<Object> props = new ArrayList<Object>();
+ for (FlowProps fprop : flowProps.values()) {
+ HashMap<String, Object> propObj = new HashMap<String, Object>();
+ String source = fprop.getSource();
+ String inheritedSource = fprop.getInheritedSource();
+
+ propObj.put(SOURCE_PARAM, source);
+ if (inheritedSource != null) {
+ propObj.put(INHERITED_PARAM, inheritedSource);
+ }
+ props.add(propObj);
+ }
+ flowObjMap.put(PROPERTIES_PARAM, props);
+ }
+
+ @Override
+ public void fillExecutableFromMapObject(
+ TypedMapWrapper<String, Object> flowObjMap) {
+ super.fillExecutableFromMapObject(flowObjMap);
+
+ this.flowId = flowObjMap.getString(FLOW_ID_PARAM);
+ List<Object> nodes = flowObjMap.<Object> getList(NODES_PARAM);
+
+ if (nodes != null) {
+ for (Object nodeObj : nodes) {
+ @SuppressWarnings("unchecked")
+ Map<String, Object> nodeObjMap = (Map<String, Object>) nodeObj;
+ TypedMapWrapper<String, Object> wrapper =
+ new TypedMapWrapper<String, Object>(nodeObjMap);
+
+ String type = wrapper.getString(TYPE_PARAM);
+ if (type != null && type.equals(SpecialJobTypes.EMBEDDED_FLOW_TYPE)) {
+ ExecutableFlowBase exFlow = new ExecutableFlowBase();
+ exFlow.fillExecutableFromMapObject(wrapper);
+ exFlow.setParentFlow(this);
+
+ executableNodes.put(exFlow.getId(), exFlow);
+ } else {
+ ExecutableNode exJob = new ExecutableNode();
+ exJob.fillExecutableFromMapObject(nodeObjMap);
+ exJob.setParentFlow(this);
+
+ executableNodes.put(exJob.getId(), exJob);
+ }
+ }
+ }
+
+ List<Object> properties = flowObjMap.<Object> getList(PROPERTIES_PARAM);
+ for (Object propNode : properties) {
+ @SuppressWarnings("unchecked")
+ HashMap<String, Object> fprop = (HashMap<String, Object>) propNode;
+ String source = (String) fprop.get("source");
+ String inheritedSource = (String) fprop.get("inherited");
+
+ FlowProps flowProps = new FlowProps(inheritedSource, source);
+ this.flowProps.put(source, flowProps);
+ }
+ }
+
+ public Map<String, Object> toUpdateObject(long lastUpdateTime) {
+ Map<String, Object> updateData = super.toUpdateObject();
+
+ List<Map<String, Object>> updatedNodes =
+ new ArrayList<Map<String, Object>>();
+ for (ExecutableNode node : executableNodes.values()) {
+ if (node instanceof ExecutableFlowBase) {
+ Map<String, Object> updatedNodeMap =
+ ((ExecutableFlowBase) node).toUpdateObject(lastUpdateTime);
+ // We add only flows to the list which either have a good update time,
+ // or has updated descendants.
+ if (node.getUpdateTime() > lastUpdateTime
+ || updatedNodeMap.containsKey(NODES_PARAM)) {
+ updatedNodes.add(updatedNodeMap);
+ }
+ } else {
+ if (node.getUpdateTime() > lastUpdateTime) {
+ Map<String, Object> updatedNodeMap = node.toUpdateObject();
+ updatedNodes.add(updatedNodeMap);
+ }
+ }
+ }
+
+ // if there are no updated nodes, we just won't add it to the list. This is
+ // good
+ // since if this is a nested flow, the parent is given the option to include
+ // or
+ // discard these subflows.
+ if (!updatedNodes.isEmpty()) {
+ updateData.put(NODES_PARAM, updatedNodes);
+ }
+ return updateData;
+ }
+
+ public void applyUpdateObject(TypedMapWrapper<String, Object> updateData,
+ List<ExecutableNode> updatedNodes) {
+ super.applyUpdateObject(updateData);
+
+ if (updatedNodes != null) {
+ updatedNodes.add(this);
+ }
+
+ List<Map<String, Object>> nodes =
+ (List<Map<String, Object>>) updateData
+ .<Map<String, Object>> getList(NODES_PARAM);
+ if (nodes != null) {
+ for (Map<String, Object> node : nodes) {
+ TypedMapWrapper<String, Object> nodeWrapper =
+ new TypedMapWrapper<String, Object>(node);
+ String id = nodeWrapper.getString(ID_PARAM);
+ if (id == null) {
+ // Legacy case
+ id = nodeWrapper.getString("jobId");
+ }
+
+ ExecutableNode exNode = executableNodes.get(id);
+ if (updatedNodes != null) {
+ updatedNodes.add(exNode);
+ }
+
+ if (exNode instanceof ExecutableFlowBase) {
+ ((ExecutableFlowBase) exNode).applyUpdateObject(nodeWrapper,
+ updatedNodes);
+ } else {
+ exNode.applyUpdateObject(nodeWrapper);
+ }
+ }
+ }
+ }
+
+ public void applyUpdateObject(Map<String, Object> updateData,
+ List<ExecutableNode> updatedNodes) {
+ TypedMapWrapper<String, Object> typedMapWrapper =
+ new TypedMapWrapper<String, Object>(updateData);
+ applyUpdateObject(typedMapWrapper, updatedNodes);
+ }
+
+ @Override
+ public void applyUpdateObject(Map<String, Object> updateData) {
+ TypedMapWrapper<String, Object> typedMapWrapper =
+ new TypedMapWrapper<String, Object>(updateData);
+ applyUpdateObject(typedMapWrapper, null);
+ }
+
+ public void reEnableDependents(ExecutableNode... nodes) {
+ for (ExecutableNode node : nodes) {
+ for (String dependent : node.getOutNodes()) {
+ ExecutableNode dependentNode = getExecutableNode(dependent);
+
+ if (dependentNode.getStatus() == Status.KILLED) {
+ dependentNode.setStatus(Status.READY);
+ dependentNode.setUpdateTime(System.currentTimeMillis());
+ reEnableDependents(dependentNode);
+
+ if (dependentNode instanceof ExecutableFlowBase) {
+
+ ((ExecutableFlowBase) dependentNode).reEnableDependents();
+ }
+ } else if (dependentNode.getStatus() == Status.SKIPPED) {
+ dependentNode.setStatus(Status.DISABLED);
+ dependentNode.setUpdateTime(System.currentTimeMillis());
+ reEnableDependents(dependentNode);
+ }
+ }
+ }
+ }
+
+ /**
+ * Only returns true if the status of all finished nodes is true.
+ *
+ * @return
+ */
+ public boolean isFlowFinished() {
+ for (String end : getEndNodes()) {
+ ExecutableNode node = getExecutableNode(end);
+ if (!Status.isStatusFinished(node.getStatus())) {
+ return false;
+ }
+ }
+
+ return true;
+ }
+
+ /**
+ * Finds all jobs which are ready to run. This occurs when all of its
+ * dependency nodes are finished running.
+ *
+ * It will also return any subflow that has been completed such that the
+ * FlowRunner can properly handle them.
+ *
+ * @param flow
+ * @return
+ */
+ public List<ExecutableNode> findNextJobsToRun() {
+ ArrayList<ExecutableNode> jobsToRun = new ArrayList<ExecutableNode>();
+
+ if (isFlowFinished() && !Status.isStatusFinished(getStatus())) {
+ jobsToRun.add(this);
+ } else {
+ nodeloop: for (ExecutableNode node : executableNodes.values()) {
+ if (Status.isStatusFinished(node.getStatus())) {
+ continue;
+ }
+
+ if ((node instanceof ExecutableFlowBase)
+ && Status.isStatusRunning(node.getStatus())) {
+ // If the flow is still running, we traverse into the flow
+ jobsToRun.addAll(((ExecutableFlowBase) node).findNextJobsToRun());
+ } else if (Status.isStatusRunning(node.getStatus())) {
+ continue;
+ } else {
+ for (String dependency : node.getInNodes()) {
+ // We find that the outer-loop is unfinished.
+ if (!Status.isStatusFinished(getExecutableNode(dependency)
+ .getStatus())) {
+ continue nodeloop;
+ }
+ }
+
+ jobsToRun.add(node);
+ }
+ }
+ }
+
+ return jobsToRun;
+ }
+
+ public String getFlowPath() {
+ if (this.getParentFlow() == null) {
+ return this.getFlowId();
+ } else {
+ return this.getParentFlow().getFlowPath() + "," + this.getId() + ":"
+ + this.getFlowId();
+ }
+ }
+}
src/main/java/azkaban/executor/ExecutableJobInfo.java 253(+127 -126)
diff --git a/src/main/java/azkaban/executor/ExecutableJobInfo.java b/src/main/java/azkaban/executor/ExecutableJobInfo.java
index f993111..94389ae 100644
--- a/src/main/java/azkaban/executor/ExecutableJobInfo.java
+++ b/src/main/java/azkaban/executor/ExecutableJobInfo.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -24,127 +24,128 @@ import java.util.Map;
import azkaban.utils.Pair;
public class ExecutableJobInfo {
- private final int execId;
- private final int projectId;
- private final int version;
- private final String flowId;
- private final String jobId;
- private final long startTime;
- private final long endTime;
- private final Status status;
- private final int attempt;
-
- private ArrayList<Pair<String, String>> jobPath;
-
- public ExecutableJobInfo(int execId, int projectId, int version, String flowId, String jobId, long startTime, long endTime, Status status, int attempt) {
- this.execId = execId;
- this.projectId = projectId;
- this.startTime = startTime;
- this.endTime = endTime;
- this.status = status;
- this.version = version;
- this.flowId = flowId;
- this.jobId = jobId;
- this.attempt = attempt;
-
- parseFlowId();
- }
-
- public int getProjectId() {
- return projectId;
- }
-
- public int getExecId() {
- return execId;
- }
-
- public int getVersion() {
- return version;
- }
-
- public String getFlowId() {
- return flowId;
- }
-
- public String getImmediateFlowId() {
- if (jobPath.size() == 1) {
- return flowId;
- }
- Pair<String, String> pair = jobPath.get(jobPath.size() - 1);
- return pair.getSecond();
- }
-
- public String getHeadFlowId() {
- Pair<String, String> pair = jobPath.get(0);
-
- return pair.getFirst();
- }
-
- public String getJobId() {
- return jobId;
- }
-
- public long getStartTime() {
- return startTime;
- }
-
- public long getEndTime() {
- return endTime;
- }
-
- public Status getStatus() {
- return status;
- }
-
- public int getAttempt() {
- return attempt;
- }
-
- public List<Pair<String,String>> getParsedFlowId() {
- return jobPath;
- }
-
- private void parseFlowId() {
- jobPath = new ArrayList<Pair<String,String>>();
- String[] flowPairs = flowId.split(",");
-
- for (String flowPair: flowPairs) {
- String[] pairSplit = flowPair.split(":");
- Pair<String, String> pair;
- if (pairSplit.length == 1) {
- pair = new Pair<String, String>(pairSplit[0], pairSplit[0]);
- }
- else {
- pair = new Pair<String, String>(pairSplit[0], pairSplit[1]);
- }
-
- jobPath.add(pair);
- }
- }
-
- public String getJobIdPath() {
- // Skip the first one because it's always just the root.
- String path = "";
- for (int i=1; i < jobPath.size(); ++i) {
- Pair<String,String> pair = jobPath.get(i);
- path += pair.getFirst() + ":";
- }
-
- path += jobId;
- return path;
- }
-
- public Map<String, Object> toObject() {
- HashMap<String, Object> map = new HashMap<String, Object>();
- map.put("execId", execId);
- map.put("version", version);
- map.put("flowId", flowId);
- map.put("jobId", jobId);
- map.put("startTime", startTime);
- map.put("endTime", endTime);
- map.put("status", status.toString());
- map.put("attempt", attempt);
-
- return map;
- }
+ private final int execId;
+ private final int projectId;
+ private final int version;
+ private final String flowId;
+ private final String jobId;
+ private final long startTime;
+ private final long endTime;
+ private final Status status;
+ private final int attempt;
+
+ private ArrayList<Pair<String, String>> jobPath;
+
+ public ExecutableJobInfo(int execId, int projectId, int version,
+ String flowId, String jobId, long startTime, long endTime, Status status,
+ int attempt) {
+ this.execId = execId;
+ this.projectId = projectId;
+ this.startTime = startTime;
+ this.endTime = endTime;
+ this.status = status;
+ this.version = version;
+ this.flowId = flowId;
+ this.jobId = jobId;
+ this.attempt = attempt;
+
+ parseFlowId();
+ }
+
+ public int getProjectId() {
+ return projectId;
+ }
+
+ public int getExecId() {
+ return execId;
+ }
+
+ public int getVersion() {
+ return version;
+ }
+
+ public String getFlowId() {
+ return flowId;
+ }
+
+ public String getImmediateFlowId() {
+ if (jobPath.size() == 1) {
+ return flowId;
+ }
+ Pair<String, String> pair = jobPath.get(jobPath.size() - 1);
+ return pair.getSecond();
+ }
+
+ public String getHeadFlowId() {
+ Pair<String, String> pair = jobPath.get(0);
+
+ return pair.getFirst();
+ }
+
+ public String getJobId() {
+ return jobId;
+ }
+
+ public long getStartTime() {
+ return startTime;
+ }
+
+ public long getEndTime() {
+ return endTime;
+ }
+
+ public Status getStatus() {
+ return status;
+ }
+
+ public int getAttempt() {
+ return attempt;
+ }
+
+ public List<Pair<String, String>> getParsedFlowId() {
+ return jobPath;
+ }
+
+ private void parseFlowId() {
+ jobPath = new ArrayList<Pair<String, String>>();
+ String[] flowPairs = flowId.split(",");
+
+ for (String flowPair : flowPairs) {
+ String[] pairSplit = flowPair.split(":");
+ Pair<String, String> pair;
+ if (pairSplit.length == 1) {
+ pair = new Pair<String, String>(pairSplit[0], pairSplit[0]);
+ } else {
+ pair = new Pair<String, String>(pairSplit[0], pairSplit[1]);
+ }
+
+ jobPath.add(pair);
+ }
+ }
+
+ public String getJobIdPath() {
+ // Skip the first one because it's always just the root.
+ String path = "";
+ for (int i = 1; i < jobPath.size(); ++i) {
+ Pair<String, String> pair = jobPath.get(i);
+ path += pair.getFirst() + ":";
+ }
+
+ path += jobId;
+ return path;
+ }
+
+ public Map<String, Object> toObject() {
+ HashMap<String, Object> map = new HashMap<String, Object>();
+ map.put("execId", execId);
+ map.put("version", version);
+ map.put("flowId", flowId);
+ map.put("jobId", jobId);
+ map.put("startTime", startTime);
+ map.put("endTime", endTime);
+ map.put("status", status.toString());
+ map.put("attempt", attempt);
+
+ return map;
+ }
}
src/main/java/azkaban/executor/ExecutableNode.java 831(+422 -409)
diff --git a/src/main/java/azkaban/executor/ExecutableNode.java b/src/main/java/azkaban/executor/ExecutableNode.java
index d444ef6..6b5ab86 100644
--- a/src/main/java/azkaban/executor/ExecutableNode.java
+++ b/src/main/java/azkaban/executor/ExecutableNode.java
@@ -1,12 +1,12 @@
/*
* Copyright 2013 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -33,410 +33,423 @@ import azkaban.utils.TypedMapWrapper;
* Base Executable that nodes and flows are based.
*/
public class ExecutableNode {
- public static final String ID_PARAM = "id";
- public static final String STATUS_PARAM = "status";
- public static final String STARTTIME_PARAM = "startTime";
- public static final String ENDTIME_PARAM = "endTime";
- public static final String UPDATETIME_PARAM = "updateTime";
- public static final String INNODES_PARAM = "inNodes";
- public static final String OUTNODES_PARAM = "outNodes";
- public static final String TYPE_PARAM = "type";
- public static final String PROPS_SOURCE_PARAM = "propSource";
- public static final String JOB_SOURCE_PARAM = "jobSource";
- public static final String OUTPUT_PROPS_PARAM = "outputProps";
-
- private String id;
- private String type = null;
- private Status status = Status.READY;
- private long startTime = -1;
- private long endTime = -1;
- private long updateTime = -1;
-
- // Path to Job File
- private String jobSource;
- // Path to top level props file
- private String propsSource;
- private Set<String> inNodes = new HashSet<String>();
- private Set<String> outNodes = new HashSet<String>();
-
- private Props inputProps;
- private Props outputProps;
-
- public static final String ATTEMPT_PARAM = "attempt";
- public static final String PASTATTEMPTS_PARAM = "pastAttempts";
-
- private int attempt = 0;
- private long delayExecution = 0;
- private ArrayList<ExecutionAttempt> pastAttempts = null;
-
- // Transient. These values aren't saved, but rediscovered.
- private ExecutableFlowBase parentFlow;
-
- public ExecutableNode(Node node) {
- this.id = node.getId();
- this.jobSource = node.getJobSource();
- this.propsSource = node.getPropsSource();
- }
-
- public ExecutableNode(Node node, ExecutableFlowBase parent) {
- this(node.getId(), node.getType(), node.getJobSource(), node.getPropsSource(), parent);
- }
-
- public ExecutableNode(String id, String type, String jobSource, String propsSource, ExecutableFlowBase parent) {
- this.id = id;
- this.jobSource = jobSource;
- this.propsSource = propsSource;
- this.type = type;
- setParentFlow(parent);
- }
-
- public ExecutableNode() {
- }
-
- public ExecutableFlow getExecutableFlow() {
- if (parentFlow == null) {
- return null;
- }
-
- return parentFlow.getExecutableFlow();
- }
-
- public void setParentFlow(ExecutableFlowBase flow) {
- this.parentFlow = flow;
- }
-
- public ExecutableFlowBase getParentFlow() {
- return parentFlow;
- }
-
- public String getId() {
- return id;
- }
-
- public void setId(String id) {
- this.id = id;
- }
-
- public Status getStatus() {
- return status;
- }
-
- public String getType() {
- return type;
- }
-
- public void setType(String type) {
- this.type = type;
- }
-
- public void setStatus(Status status) {
- this.status = status;
- }
-
- public long getStartTime() {
- return startTime;
- }
-
- public void setStartTime(long startTime) {
- this.startTime = startTime;
- }
-
- public long getEndTime() {
- return endTime;
- }
-
- public void setEndTime(long endTime) {
- this.endTime = endTime;
- }
-
- public long getUpdateTime() {
- return updateTime;
- }
-
- public void setUpdateTime(long updateTime) {
- this.updateTime = updateTime;
- }
-
- public void addOutNode(String exNode) {
- outNodes.add(exNode);
- }
-
- public void addInNode(String exNode) {
- inNodes.add(exNode);
- }
-
- public Set<String> getOutNodes() {
- return outNodes;
- }
-
- public Set<String> getInNodes() {
- return inNodes;
- }
-
- public boolean hasJobSource() {
- return jobSource != null;
- }
-
- public boolean hasPropsSource() {
- return propsSource != null;
- }
-
- public String getJobSource() {
- return jobSource;
- }
-
- public String getPropsSource() {
- return propsSource;
- }
-
- public void setInputProps(Props input) {
- this.inputProps = input;
- }
-
- public void setOutputProps(Props output) {
- this.outputProps = output;
- }
-
- public Props getInputProps() {
- return this.inputProps;
- }
-
- public Props getOutputProps() {
- return outputProps;
- }
-
- public long getDelayedExecution() {
- return delayExecution;
- }
-
- public void setDelayedExecution(long delayMs) {
- delayExecution = delayMs;
- }
-
- public List<ExecutionAttempt> getPastAttemptList() {
- return pastAttempts;
- }
-
- public int getAttempt() {
- return attempt;
- }
-
- public void setAttempt(int attempt) {
- this.attempt = attempt;
- }
-
- public void resetForRetry() {
- ExecutionAttempt pastAttempt = new ExecutionAttempt(attempt, this);
- attempt++;
-
- synchronized (this) {
- if (pastAttempts == null) {
- pastAttempts = new ArrayList<ExecutionAttempt>();
- }
-
- pastAttempts.add(pastAttempt);
- }
-
- this.setStartTime(-1);
- this.setEndTime(-1);
- this.setUpdateTime(System.currentTimeMillis());
- this.setStatus(Status.READY);
- }
-
- public List<Object> getAttemptObjects() {
- ArrayList<Object> array = new ArrayList<Object>();
-
- for (ExecutionAttempt attempt: pastAttempts) {
- array.add(attempt.toObject());
- }
-
- return array;
- }
-
- public String getNestedId() {
- return getPrintableId(":");
- }
-
- public String getPrintableId(String delimiter) {
- if (this.getParentFlow() == null || this.getParentFlow() instanceof ExecutableFlow) {
- return getId();
- }
- return getParentFlow().getPrintableId(delimiter) + delimiter + getId();
- }
-
- public Map<String,Object> toObject() {
- Map<String,Object> mapObj = new HashMap<String,Object>();
- fillMapFromExecutable(mapObj);
-
- return mapObj;
- }
-
- protected void fillMapFromExecutable(Map<String,Object> objMap) {
- objMap.put(ID_PARAM, this.id);
- objMap.put(STATUS_PARAM, status.toString());
- objMap.put(STARTTIME_PARAM, startTime);
- objMap.put(ENDTIME_PARAM, endTime);
- objMap.put(UPDATETIME_PARAM, updateTime);
- objMap.put(TYPE_PARAM, type);
- objMap.put(ATTEMPT_PARAM, attempt);
-
- if (inNodes != null && !inNodes.isEmpty()) {
- objMap.put(INNODES_PARAM, inNodes);
- }
- if (outNodes != null && !outNodes.isEmpty()) {
- objMap.put(OUTNODES_PARAM, outNodes);
- }
-
- if (hasPropsSource()) {
- objMap.put(PROPS_SOURCE_PARAM, this.propsSource);
- }
- if (hasJobSource()) {
- objMap.put(JOB_SOURCE_PARAM, this.jobSource);
- }
-
- if (outputProps != null && outputProps.size() > 0) {
- objMap.put(OUTPUT_PROPS_PARAM, PropsUtils.toStringMap(outputProps, true));
- }
-
- if (pastAttempts != null) {
- ArrayList<Object> attemptsList = new ArrayList<Object>(pastAttempts.size());
- for (ExecutionAttempt attempts : pastAttempts) {
- attemptsList.add(attempts.toObject());
- }
- objMap.put(PASTATTEMPTS_PARAM, attemptsList);
- }
- }
-
- @SuppressWarnings("unchecked")
- public void fillExecutableFromMapObject(TypedMapWrapper<String, Object> wrappedMap) {
- this.id = wrappedMap.getString(ID_PARAM);
- this.type = wrappedMap.getString(TYPE_PARAM);
- this.status = Status.valueOf(wrappedMap.getString(STATUS_PARAM));
- this.startTime = wrappedMap.getLong(STARTTIME_PARAM);
- this.endTime = wrappedMap.getLong(ENDTIME_PARAM);
- this.updateTime = wrappedMap.getLong(UPDATETIME_PARAM);
- this.attempt = wrappedMap.getInt(ATTEMPT_PARAM, 0);
-
- this.inNodes = new HashSet<String>();
- this.inNodes.addAll(wrappedMap.getStringCollection(INNODES_PARAM, Collections.<String>emptySet()));
-
- this.outNodes = new HashSet<String>();
- this.outNodes.addAll(wrappedMap.getStringCollection(OUTNODES_PARAM, Collections.<String>emptySet()));
-
- this.propsSource = wrappedMap.getString(PROPS_SOURCE_PARAM);
- this.jobSource = wrappedMap.getString(JOB_SOURCE_PARAM);
-
- Map<String, String> outputProps = wrappedMap.<String,String>getMap(OUTPUT_PROPS_PARAM);
- if (outputProps != null) {
- this.outputProps = new Props(null, outputProps);
- }
-
- Collection<Object> pastAttempts = wrappedMap.<Object>getCollection(PASTATTEMPTS_PARAM);
- if (pastAttempts!=null) {
- ArrayList<ExecutionAttempt> attempts = new ArrayList<ExecutionAttempt>();
- for (Object attemptObj: pastAttempts) {
- ExecutionAttempt attempt = ExecutionAttempt.fromObject(attemptObj);
- attempts.add(attempt);
- }
-
- this.pastAttempts = attempts;
- }
- }
-
- public void fillExecutableFromMapObject(Map<String,Object> objMap) {
- TypedMapWrapper<String, Object> wrapper = new TypedMapWrapper<String, Object>(objMap);
- fillExecutableFromMapObject(wrapper);
- }
-
- public Map<String, Object> toUpdateObject() {
- Map<String, Object> updatedNodeMap = new HashMap<String,Object>();
- updatedNodeMap.put(ID_PARAM, getId());
- updatedNodeMap.put(STATUS_PARAM, getStatus().getNumVal());
- updatedNodeMap.put(STARTTIME_PARAM, getStartTime());
- updatedNodeMap.put(ENDTIME_PARAM, getEndTime());
- updatedNodeMap.put(UPDATETIME_PARAM, getUpdateTime());
-
- updatedNodeMap.put(ATTEMPT_PARAM, getAttempt());
-
- if (getAttempt() > 0) {
- ArrayList<Map<String,Object>> pastAttempts = new ArrayList<Map<String,Object>>();
- for (ExecutionAttempt attempt: getPastAttemptList()) {
- pastAttempts.add(attempt.toObject());
- }
- updatedNodeMap.put(PASTATTEMPTS_PARAM, pastAttempts);
- }
-
- return updatedNodeMap;
- }
-
- public void applyUpdateObject(TypedMapWrapper<String, Object> updateData) {
- this.status = Status.fromInteger(updateData.getInt(STATUS_PARAM, this.status.getNumVal()));
- this.startTime = updateData.getLong(STARTTIME_PARAM);
- this.updateTime = updateData.getLong(UPDATETIME_PARAM);
- this.endTime = updateData.getLong(ENDTIME_PARAM);
-
- if (updateData.containsKey(ATTEMPT_PARAM)) {
- attempt = updateData.getInt(ATTEMPT_PARAM);
- if (attempt > 0) {
- updatePastAttempts(
- updateData.<Object>getList(PASTATTEMPTS_PARAM, Collections.<Object>emptyList()));
- }
- }
- }
-
- public void applyUpdateObject(Map<String, Object> updateData) {
- TypedMapWrapper<String,Object> wrapper = new TypedMapWrapper<String,Object>(updateData);
- applyUpdateObject(wrapper);
- }
-
- public void cancelNode(long cancelTime) {
- if (this.status == Status.DISABLED) {
- skipNode(cancelTime);
- }
- else {
- this.setStatus(Status.CANCELLED);
- this.setStartTime(cancelTime);
- this.setEndTime(cancelTime);
- this.setUpdateTime(cancelTime);
- }
- }
-
- public void skipNode(long skipTime) {
- this.setStatus(Status.SKIPPED);
- this.setStartTime(skipTime);
- this.setEndTime(skipTime);
- this.setUpdateTime(skipTime);
- }
-
- private void updatePastAttempts(List<Object> pastAttemptsList) {
- if (pastAttemptsList == null) {
- return;
- }
-
- synchronized (this) {
- if (this.pastAttempts == null) {
- this.pastAttempts = new ArrayList<ExecutionAttempt>();
- }
-
- // We just check size because past attempts don't change
- if (pastAttemptsList.size() <= this.pastAttempts.size()) {
- return;
- }
-
- Object[] pastAttemptArray = pastAttemptsList.toArray();
- for (int i = this.pastAttempts.size(); i < pastAttemptArray.length; ++i) {
- ExecutionAttempt attempt = ExecutionAttempt.fromObject(pastAttemptArray[i]);
- this.pastAttempts.add(attempt);
- }
- }
- }
-
- public int getRetries() {
- return inputProps.getInt("retries", 0);
- }
-
- public long getRetryBackoff() {
- return inputProps.getLong("retry.backoff", 0);
- }
-}
+ public static final String ID_PARAM = "id";
+ public static final String STATUS_PARAM = "status";
+ public static final String STARTTIME_PARAM = "startTime";
+ public static final String ENDTIME_PARAM = "endTime";
+ public static final String UPDATETIME_PARAM = "updateTime";
+ public static final String INNODES_PARAM = "inNodes";
+ public static final String OUTNODES_PARAM = "outNodes";
+ public static final String TYPE_PARAM = "type";
+ public static final String PROPS_SOURCE_PARAM = "propSource";
+ public static final String JOB_SOURCE_PARAM = "jobSource";
+ public static final String OUTPUT_PROPS_PARAM = "outputProps";
+
+ private String id;
+ private String type = null;
+ private Status status = Status.READY;
+ private long startTime = -1;
+ private long endTime = -1;
+ private long updateTime = -1;
+
+ // Path to Job File
+ private String jobSource;
+ // Path to top level props file
+ private String propsSource;
+ private Set<String> inNodes = new HashSet<String>();
+ private Set<String> outNodes = new HashSet<String>();
+
+ private Props inputProps;
+ private Props outputProps;
+
+ public static final String ATTEMPT_PARAM = "attempt";
+ public static final String PASTATTEMPTS_PARAM = "pastAttempts";
+
+ private int attempt = 0;
+ private long delayExecution = 0;
+ private ArrayList<ExecutionAttempt> pastAttempts = null;
+
+ // Transient. These values aren't saved, but rediscovered.
+ private ExecutableFlowBase parentFlow;
+
+ public ExecutableNode(Node node) {
+ this.id = node.getId();
+ this.jobSource = node.getJobSource();
+ this.propsSource = node.getPropsSource();
+ }
+
+ public ExecutableNode(Node node, ExecutableFlowBase parent) {
+ this(node.getId(), node.getType(), node.getJobSource(), node
+ .getPropsSource(), parent);
+ }
+
+ public ExecutableNode(String id, String type, String jobSource,
+ String propsSource, ExecutableFlowBase parent) {
+ this.id = id;
+ this.jobSource = jobSource;
+ this.propsSource = propsSource;
+ this.type = type;
+ setParentFlow(parent);
+ }
+
+ public ExecutableNode() {
+ }
+
+ public ExecutableFlow getExecutableFlow() {
+ if (parentFlow == null) {
+ return null;
+ }
+
+ return parentFlow.getExecutableFlow();
+ }
+
+ public void setParentFlow(ExecutableFlowBase flow) {
+ this.parentFlow = flow;
+ }
+
+ public ExecutableFlowBase getParentFlow() {
+ return parentFlow;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ public Status getStatus() {
+ return status;
+ }
+
+ public String getType() {
+ return type;
+ }
+
+ public void setType(String type) {
+ this.type = type;
+ }
+
+ public void setStatus(Status status) {
+ this.status = status;
+ }
+
+ public long getStartTime() {
+ return startTime;
+ }
+
+ public void setStartTime(long startTime) {
+ this.startTime = startTime;
+ }
+
+ public long getEndTime() {
+ return endTime;
+ }
+
+ public void setEndTime(long endTime) {
+ this.endTime = endTime;
+ }
+
+ public long getUpdateTime() {
+ return updateTime;
+ }
+
+ public void setUpdateTime(long updateTime) {
+ this.updateTime = updateTime;
+ }
+
+ public void addOutNode(String exNode) {
+ outNodes.add(exNode);
+ }
+
+ public void addInNode(String exNode) {
+ inNodes.add(exNode);
+ }
+
+ public Set<String> getOutNodes() {
+ return outNodes;
+ }
+
+ public Set<String> getInNodes() {
+ return inNodes;
+ }
+
+ public boolean hasJobSource() {
+ return jobSource != null;
+ }
+
+ public boolean hasPropsSource() {
+ return propsSource != null;
+ }
+
+ public String getJobSource() {
+ return jobSource;
+ }
+
+ public String getPropsSource() {
+ return propsSource;
+ }
+
+ public void setInputProps(Props input) {
+ this.inputProps = input;
+ }
+
+ public void setOutputProps(Props output) {
+ this.outputProps = output;
+ }
+
+ public Props getInputProps() {
+ return this.inputProps;
+ }
+
+ public Props getOutputProps() {
+ return outputProps;
+ }
+
+ public long getDelayedExecution() {
+ return delayExecution;
+ }
+
+ public void setDelayedExecution(long delayMs) {
+ delayExecution = delayMs;
+ }
+
+ public List<ExecutionAttempt> getPastAttemptList() {
+ return pastAttempts;
+ }
+ public int getAttempt() {
+ return attempt;
+ }
+
+ public void setAttempt(int attempt) {
+ this.attempt = attempt;
+ }
+
+ public void resetForRetry() {
+ ExecutionAttempt pastAttempt = new ExecutionAttempt(attempt, this);
+ attempt++;
+
+ synchronized (this) {
+ if (pastAttempts == null) {
+ pastAttempts = new ArrayList<ExecutionAttempt>();
+ }
+
+ pastAttempts.add(pastAttempt);
+ }
+
+ this.setStartTime(-1);
+ this.setEndTime(-1);
+ this.setUpdateTime(System.currentTimeMillis());
+ this.setStatus(Status.READY);
+ }
+
+ public List<Object> getAttemptObjects() {
+ ArrayList<Object> array = new ArrayList<Object>();
+
+ for (ExecutionAttempt attempt : pastAttempts) {
+ array.add(attempt.toObject());
+ }
+
+ return array;
+ }
+
+ public String getNestedId() {
+ return getPrintableId(":");
+ }
+
+ public String getPrintableId(String delimiter) {
+ if (this.getParentFlow() == null
+ || this.getParentFlow() instanceof ExecutableFlow) {
+ return getId();
+ }
+ return getParentFlow().getPrintableId(delimiter) + delimiter + getId();
+ }
+
+ public Map<String, Object> toObject() {
+ Map<String, Object> mapObj = new HashMap<String, Object>();
+ fillMapFromExecutable(mapObj);
+
+ return mapObj;
+ }
+
+ protected void fillMapFromExecutable(Map<String, Object> objMap) {
+ objMap.put(ID_PARAM, this.id);
+ objMap.put(STATUS_PARAM, status.toString());
+ objMap.put(STARTTIME_PARAM, startTime);
+ objMap.put(ENDTIME_PARAM, endTime);
+ objMap.put(UPDATETIME_PARAM, updateTime);
+ objMap.put(TYPE_PARAM, type);
+ objMap.put(ATTEMPT_PARAM, attempt);
+
+ if (inNodes != null && !inNodes.isEmpty()) {
+ objMap.put(INNODES_PARAM, inNodes);
+ }
+ if (outNodes != null && !outNodes.isEmpty()) {
+ objMap.put(OUTNODES_PARAM, outNodes);
+ }
+
+ if (hasPropsSource()) {
+ objMap.put(PROPS_SOURCE_PARAM, this.propsSource);
+ }
+ if (hasJobSource()) {
+ objMap.put(JOB_SOURCE_PARAM, this.jobSource);
+ }
+
+ if (outputProps != null && outputProps.size() > 0) {
+ objMap.put(OUTPUT_PROPS_PARAM, PropsUtils.toStringMap(outputProps, true));
+ }
+
+ if (pastAttempts != null) {
+ ArrayList<Object> attemptsList =
+ new ArrayList<Object>(pastAttempts.size());
+ for (ExecutionAttempt attempts : pastAttempts) {
+ attemptsList.add(attempts.toObject());
+ }
+ objMap.put(PASTATTEMPTS_PARAM, attemptsList);
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ public void fillExecutableFromMapObject(
+ TypedMapWrapper<String, Object> wrappedMap) {
+ this.id = wrappedMap.getString(ID_PARAM);
+ this.type = wrappedMap.getString(TYPE_PARAM);
+ this.status = Status.valueOf(wrappedMap.getString(STATUS_PARAM));
+ this.startTime = wrappedMap.getLong(STARTTIME_PARAM);
+ this.endTime = wrappedMap.getLong(ENDTIME_PARAM);
+ this.updateTime = wrappedMap.getLong(UPDATETIME_PARAM);
+ this.attempt = wrappedMap.getInt(ATTEMPT_PARAM, 0);
+
+ this.inNodes = new HashSet<String>();
+ this.inNodes.addAll(wrappedMap.getStringCollection(INNODES_PARAM,
+ Collections.<String> emptySet()));
+
+ this.outNodes = new HashSet<String>();
+ this.outNodes.addAll(wrappedMap.getStringCollection(OUTNODES_PARAM,
+ Collections.<String> emptySet()));
+
+ this.propsSource = wrappedMap.getString(PROPS_SOURCE_PARAM);
+ this.jobSource = wrappedMap.getString(JOB_SOURCE_PARAM);
+
+ Map<String, String> outputProps =
+ wrappedMap.<String, String> getMap(OUTPUT_PROPS_PARAM);
+ if (outputProps != null) {
+ this.outputProps = new Props(null, outputProps);
+ }
+
+ Collection<Object> pastAttempts =
+ wrappedMap.<Object> getCollection(PASTATTEMPTS_PARAM);
+ if (pastAttempts != null) {
+ ArrayList<ExecutionAttempt> attempts = new ArrayList<ExecutionAttempt>();
+ for (Object attemptObj : pastAttempts) {
+ ExecutionAttempt attempt = ExecutionAttempt.fromObject(attemptObj);
+ attempts.add(attempt);
+ }
+
+ this.pastAttempts = attempts;
+ }
+ }
+
+ public void fillExecutableFromMapObject(Map<String, Object> objMap) {
+ TypedMapWrapper<String, Object> wrapper =
+ new TypedMapWrapper<String, Object>(objMap);
+ fillExecutableFromMapObject(wrapper);
+ }
+
+ public Map<String, Object> toUpdateObject() {
+ Map<String, Object> updatedNodeMap = new HashMap<String, Object>();
+ updatedNodeMap.put(ID_PARAM, getId());
+ updatedNodeMap.put(STATUS_PARAM, getStatus().getNumVal());
+ updatedNodeMap.put(STARTTIME_PARAM, getStartTime());
+ updatedNodeMap.put(ENDTIME_PARAM, getEndTime());
+ updatedNodeMap.put(UPDATETIME_PARAM, getUpdateTime());
+
+ updatedNodeMap.put(ATTEMPT_PARAM, getAttempt());
+
+ if (getAttempt() > 0) {
+ ArrayList<Map<String, Object>> pastAttempts =
+ new ArrayList<Map<String, Object>>();
+ for (ExecutionAttempt attempt : getPastAttemptList()) {
+ pastAttempts.add(attempt.toObject());
+ }
+ updatedNodeMap.put(PASTATTEMPTS_PARAM, pastAttempts);
+ }
+
+ return updatedNodeMap;
+ }
+
+ public void applyUpdateObject(TypedMapWrapper<String, Object> updateData) {
+ this.status =
+ Status.fromInteger(updateData.getInt(STATUS_PARAM,
+ this.status.getNumVal()));
+ this.startTime = updateData.getLong(STARTTIME_PARAM);
+ this.updateTime = updateData.getLong(UPDATETIME_PARAM);
+ this.endTime = updateData.getLong(ENDTIME_PARAM);
+
+ if (updateData.containsKey(ATTEMPT_PARAM)) {
+ attempt = updateData.getInt(ATTEMPT_PARAM);
+ if (attempt > 0) {
+ updatePastAttempts(updateData.<Object> getList(PASTATTEMPTS_PARAM,
+ Collections.<Object> emptyList()));
+ }
+ }
+ }
+
+ public void applyUpdateObject(Map<String, Object> updateData) {
+ TypedMapWrapper<String, Object> wrapper =
+ new TypedMapWrapper<String, Object>(updateData);
+ applyUpdateObject(wrapper);
+ }
+
+ public void cancelNode(long cancelTime) {
+ if (this.status == Status.DISABLED) {
+ skipNode(cancelTime);
+ } else {
+ this.setStatus(Status.CANCELLED);
+ this.setStartTime(cancelTime);
+ this.setEndTime(cancelTime);
+ this.setUpdateTime(cancelTime);
+ }
+ }
+
+ public void skipNode(long skipTime) {
+ this.setStatus(Status.SKIPPED);
+ this.setStartTime(skipTime);
+ this.setEndTime(skipTime);
+ this.setUpdateTime(skipTime);
+ }
+
+ private void updatePastAttempts(List<Object> pastAttemptsList) {
+ if (pastAttemptsList == null) {
+ return;
+ }
+
+ synchronized (this) {
+ if (this.pastAttempts == null) {
+ this.pastAttempts = new ArrayList<ExecutionAttempt>();
+ }
+
+ // We just check size because past attempts don't change
+ if (pastAttemptsList.size() <= this.pastAttempts.size()) {
+ return;
+ }
+
+ Object[] pastAttemptArray = pastAttemptsList.toArray();
+ for (int i = this.pastAttempts.size(); i < pastAttemptArray.length; ++i) {
+ ExecutionAttempt attempt =
+ ExecutionAttempt.fromObject(pastAttemptArray[i]);
+ this.pastAttempts.add(attempt);
+ }
+ }
+ }
+
+ public int getRetries() {
+ return inputProps.getInt("retries", 0);
+ }
+
+ public long getRetryBackoff() {
+ return inputProps.getLong("retry.backoff", 0);
+ }
+}
src/main/java/azkaban/executor/ExecutionAttempt.java 122(+62 -60)
diff --git a/src/main/java/azkaban/executor/ExecutionAttempt.java b/src/main/java/azkaban/executor/ExecutionAttempt.java
index 7712010..d54a9f3 100644
--- a/src/main/java/azkaban/executor/ExecutionAttempt.java
+++ b/src/main/java/azkaban/executor/ExecutionAttempt.java
@@ -6,64 +6,66 @@ import java.util.Map;
import azkaban.utils.TypedMapWrapper;
public class ExecutionAttempt {
- public static final String ATTEMPT_PARAM = "attempt";
- public static final String STATUS_PARAM = "status";
- public static final String STARTTIME_PARAM = "startTime";
- public static final String ENDTIME_PARAM = "endTime";
-
- private int attempt = 0;
- private long startTime = -1;
- private long endTime = -1;
- private Status status;
-
- public ExecutionAttempt(int attempt, ExecutableNode executable) {
- this.attempt = attempt;
- this.startTime = executable.getStartTime();
- this.endTime = executable.getEndTime();
- this.status = executable.getStatus();
- }
-
- public ExecutionAttempt(int attempt, long startTime, long endTime, Status status) {
- this.attempt = attempt;
- this.startTime = startTime;
- this.endTime = endTime;
- this.status = status;
- }
-
- public long getStartTime() {
- return startTime;
- }
+ public static final String ATTEMPT_PARAM = "attempt";
+ public static final String STATUS_PARAM = "status";
+ public static final String STARTTIME_PARAM = "startTime";
+ public static final String ENDTIME_PARAM = "endTime";
- public long getEndTime() {
- return endTime;
- }
-
- public Status getStatus() {
- return status;
- }
-
- public int getAttempt() {
- return attempt;
- }
-
- public static ExecutionAttempt fromObject(Object obj) {
- @SuppressWarnings("unchecked")
- Map<String, Object> map = (Map<String, Object>)obj;
- TypedMapWrapper<String, Object> wrapper = new TypedMapWrapper<String, Object>(map);
- int attempt = wrapper.getInt(ATTEMPT_PARAM);
- long startTime = wrapper.getLong(STARTTIME_PARAM);
- long endTime = wrapper.getLong(ENDTIME_PARAM);
- Status status = Status.valueOf(wrapper.getString(STATUS_PARAM));
-
- return new ExecutionAttempt(attempt, startTime, endTime, status);
- }
-
- public Map<String, Object> toObject() {
- HashMap<String,Object> attempts = new HashMap<String,Object>();
- attempts.put(ATTEMPT_PARAM, attempt);
- attempts.put(STARTTIME_PARAM, startTime);
- attempts.put(ENDTIME_PARAM, endTime);
- attempts.put(STATUS_PARAM, status.toString());
- return attempts;
- }
-}
\ No newline at end of file
+ private int attempt = 0;
+ private long startTime = -1;
+ private long endTime = -1;
+ private Status status;
+
+ public ExecutionAttempt(int attempt, ExecutableNode executable) {
+ this.attempt = attempt;
+ this.startTime = executable.getStartTime();
+ this.endTime = executable.getEndTime();
+ this.status = executable.getStatus();
+ }
+
+ public ExecutionAttempt(int attempt, long startTime, long endTime,
+ Status status) {
+ this.attempt = attempt;
+ this.startTime = startTime;
+ this.endTime = endTime;
+ this.status = status;
+ }
+
+ public long getStartTime() {
+ return startTime;
+ }
+
+ public long getEndTime() {
+ return endTime;
+ }
+
+ public Status getStatus() {
+ return status;
+ }
+
+ public int getAttempt() {
+ return attempt;
+ }
+
+ public static ExecutionAttempt fromObject(Object obj) {
+ @SuppressWarnings("unchecked")
+ Map<String, Object> map = (Map<String, Object>) obj;
+ TypedMapWrapper<String, Object> wrapper =
+ new TypedMapWrapper<String, Object>(map);
+ int attempt = wrapper.getInt(ATTEMPT_PARAM);
+ long startTime = wrapper.getLong(STARTTIME_PARAM);
+ long endTime = wrapper.getLong(ENDTIME_PARAM);
+ Status status = Status.valueOf(wrapper.getString(STATUS_PARAM));
+
+ return new ExecutionAttempt(attempt, startTime, endTime, status);
+ }
+
+ public Map<String, Object> toObject() {
+ HashMap<String, Object> attempts = new HashMap<String, Object>();
+ attempts.put(ATTEMPT_PARAM, attempt);
+ attempts.put(STARTTIME_PARAM, startTime);
+ attempts.put(ENDTIME_PARAM, endTime);
+ attempts.put(STATUS_PARAM, status.toString());
+ return attempts;
+ }
+}
src/main/java/azkaban/executor/ExecutionOptions.java 444(+227 -217)
diff --git a/src/main/java/azkaban/executor/ExecutionOptions.java b/src/main/java/azkaban/executor/ExecutionOptions.java
index 7913751..90991f6 100644
--- a/src/main/java/azkaban/executor/ExecutionOptions.java
+++ b/src/main/java/azkaban/executor/ExecutionOptions.java
@@ -1,12 +1,12 @@
/*
* Copyright 2013 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -30,218 +30,228 @@ import azkaban.utils.TypedMapWrapper;
* Execution options for submitted flows and scheduled flows
*/
public class ExecutionOptions {
- public static final String CONCURRENT_OPTION_SKIP="skip";
- public static final String CONCURRENT_OPTION_PIPELINE="pipeline";
- public static final String CONCURRENT_OPTION_IGNORE="ignore";
-
- private static final String FLOW_PARAMETERS = "flowParameters";
- private static final String NOTIFY_ON_FIRST_FAILURE = "notifyOnFirstFailure";
- private static final String NOTIFY_ON_LAST_FAILURE = "notifyOnLastFailure";
- private static final String SUCCESS_EMAILS = "successEmails";
- private static final String FAILURE_EMAILS = "failureEmails";
- private static final String FAILURE_ACTION = "failureAction";
- private static final String PIPELINE_LEVEL = "pipelineLevel";
- private static final String PIPELINE_EXECID = "pipelineExecId";
- private static final String QUEUE_LEVEL = "queueLevel";
- private static final String CONCURRENT_OPTION = "concurrentOption";
- private static final String DISABLE = "disabled";
- private static final String FAILURE_EMAILS_OVERRIDE = "failureEmailsOverride";
- private static final String SUCCESS_EMAILS_OVERRIDE = "successEmailsOverride";
- private static final String MAIL_CREATOR = "mailCreator";
-
- private boolean notifyOnFirstFailure = true;
- private boolean notifyOnLastFailure = false;
- private boolean failureEmailsOverride = false;
- private boolean successEmailsOverride = false;
- private ArrayList<String> failureEmails = new ArrayList<String>();
- private ArrayList<String> successEmails = new ArrayList<String>();
-
- private Integer pipelineLevel = null;
- private Integer pipelineExecId = null;
- private Integer queueLevel = 0;
- private String concurrentOption = CONCURRENT_OPTION_IGNORE;
- private String mailCreator = DefaultMailCreator.DEFAULT_MAIL_CREATOR;
- private Map<String, String> flowParameters = new HashMap<String, String>();
-
- public enum FailureAction {
- FINISH_CURRENTLY_RUNNING,
- CANCEL_ALL,
- FINISH_ALL_POSSIBLE
- }
-
- private FailureAction failureAction = FailureAction.FINISH_CURRENTLY_RUNNING;
-
- private List<Object> initiallyDisabledJobs = new ArrayList<Object>();
-
- public void addAllFlowParameters(Map<String,String> flowParam) {
- flowParameters.putAll(flowParam);
- }
-
- public Map<String,String> getFlowParameters() {
- return flowParameters;
- }
-
- public void setFailureEmails(Collection<String> emails) {
- failureEmails = new ArrayList<String>(emails);
- }
-
- public boolean isFailureEmailsOverridden() {
- return this.failureEmailsOverride;
- }
-
- public boolean isSuccessEmailsOverridden() {
- return this.successEmailsOverride;
- }
-
- public void setSuccessEmailsOverridden(boolean override) {
- this.successEmailsOverride = override;
- }
-
- public void setFailureEmailsOverridden(boolean override) {
- this.failureEmailsOverride = override;
- }
-
- public List<String> getFailureEmails() {
- return failureEmails;
- }
-
- public void setSuccessEmails(Collection<String> emails) {
- successEmails = new ArrayList<String>(emails);
- }
-
- public List<String> getSuccessEmails() {
- return successEmails;
- }
-
- public boolean getNotifyOnFirstFailure() {
- return notifyOnFirstFailure;
- }
-
- public boolean getNotifyOnLastFailure() {
- return notifyOnLastFailure;
- }
-
- public void setNotifyOnFirstFailure(boolean notify) {
- this.notifyOnFirstFailure = notify;
- }
-
- public void setNotifyOnLastFailure(boolean notify) {
- this.notifyOnLastFailure = notify;
- }
-
- public FailureAction getFailureAction() {
- return failureAction;
- }
-
- public void setFailureAction(FailureAction action) {
- failureAction = action;
- }
-
- public void setConcurrentOption(String concurrentOption) {
- this.concurrentOption = concurrentOption;
- }
-
- public void setMailCreator(String mailCreator) {
- this.mailCreator = mailCreator;
- }
-
- public String getConcurrentOption() {
- return concurrentOption;
- }
-
- public String getMailCreator() {
- return mailCreator;
- }
-
- public Integer getPipelineLevel() {
- return pipelineLevel;
- }
-
- public Integer getPipelineExecutionId() {
- return pipelineExecId;
- }
-
- public void setPipelineLevel(Integer level) {
- pipelineLevel = level;
- }
-
- public void setPipelineExecutionId(Integer id) {
- this.pipelineExecId = id;
- }
-
- public Integer getQueueLevel() {
- return queueLevel;
- }
-
- public List<Object> getDisabledJobs() {
- return new ArrayList<Object>(initiallyDisabledJobs);
- }
-
- public void setDisabledJobs(List<Object> disabledJobs) {
- initiallyDisabledJobs = disabledJobs;
- }
-
- public Map<String,Object> toObject() {
- HashMap<String,Object> flowOptionObj = new HashMap<String,Object>();
-
- flowOptionObj.put(FLOW_PARAMETERS, this.flowParameters);
- flowOptionObj.put(NOTIFY_ON_FIRST_FAILURE, this.notifyOnFirstFailure);
- flowOptionObj.put(NOTIFY_ON_LAST_FAILURE, this.notifyOnLastFailure);
- flowOptionObj.put(SUCCESS_EMAILS, successEmails);
- flowOptionObj.put(FAILURE_EMAILS, failureEmails);
- flowOptionObj.put(FAILURE_ACTION, failureAction.toString());
- flowOptionObj.put(PIPELINE_LEVEL, pipelineLevel);
- flowOptionObj.put(PIPELINE_EXECID, pipelineExecId);
- flowOptionObj.put(QUEUE_LEVEL, queueLevel);
- flowOptionObj.put(CONCURRENT_OPTION, concurrentOption);
- flowOptionObj.put(DISABLE, initiallyDisabledJobs);
- flowOptionObj.put(FAILURE_EMAILS_OVERRIDE, failureEmailsOverride);
- flowOptionObj.put(SUCCESS_EMAILS_OVERRIDE, successEmailsOverride);
- flowOptionObj.put(MAIL_CREATOR, mailCreator);
- return flowOptionObj;
- }
-
- @SuppressWarnings("unchecked")
- public static ExecutionOptions createFromObject(Object obj) {
- if (obj == null || !(obj instanceof Map)) {
- return null;
- }
-
- Map<String,Object> optionsMap = (Map<String,Object>)obj;
- TypedMapWrapper<String,Object> wrapper = new TypedMapWrapper<String,Object>(optionsMap);
-
- ExecutionOptions options = new ExecutionOptions();
- if (optionsMap.containsKey(FLOW_PARAMETERS)) {
- options.flowParameters = new HashMap<String, String>();
- options.flowParameters.putAll(wrapper.<String,String>getMap(FLOW_PARAMETERS));
- }
- // Failure notification
- options.notifyOnFirstFailure = wrapper.getBool(NOTIFY_ON_FIRST_FAILURE, options.notifyOnFirstFailure);
- options.notifyOnLastFailure = wrapper.getBool(NOTIFY_ON_LAST_FAILURE, options.notifyOnLastFailure);
- options.concurrentOption = wrapper.getString(CONCURRENT_OPTION, options.concurrentOption);
-
- if (wrapper.containsKey(DISABLE)) {
- options.initiallyDisabledJobs = wrapper.<Object>getList(DISABLE);
- }
-
- if (optionsMap.containsKey(MAIL_CREATOR)) {
- options.mailCreator = (String)optionsMap.get(MAIL_CREATOR);
- }
-
- // Failure action
- options.failureAction = FailureAction.valueOf(wrapper.getString(FAILURE_ACTION, options.failureAction.toString()));
- options.pipelineLevel = wrapper.getInt(PIPELINE_LEVEL, options.pipelineLevel);
- options.pipelineExecId = wrapper.getInt(PIPELINE_EXECID, options.pipelineExecId);
- options.queueLevel = wrapper.getInt(QUEUE_LEVEL, options.queueLevel);
-
-
- // Success emails
- options.setSuccessEmails(wrapper.<String>getList(SUCCESS_EMAILS, Collections.<String>emptyList()));
- options.setFailureEmails(wrapper.<String>getList(FAILURE_EMAILS, Collections.<String>emptyList()));
-
- options.setSuccessEmailsOverridden(wrapper.getBool(SUCCESS_EMAILS_OVERRIDE, false));
- options.setFailureEmailsOverridden(wrapper.getBool(FAILURE_EMAILS_OVERRIDE, false));
-
- return options;
- }
+ public static final String CONCURRENT_OPTION_SKIP = "skip";
+ public static final String CONCURRENT_OPTION_PIPELINE = "pipeline";
+ public static final String CONCURRENT_OPTION_IGNORE = "ignore";
+
+ private static final String FLOW_PARAMETERS = "flowParameters";
+ private static final String NOTIFY_ON_FIRST_FAILURE = "notifyOnFirstFailure";
+ private static final String NOTIFY_ON_LAST_FAILURE = "notifyOnLastFailure";
+ private static final String SUCCESS_EMAILS = "successEmails";
+ private static final String FAILURE_EMAILS = "failureEmails";
+ private static final String FAILURE_ACTION = "failureAction";
+ private static final String PIPELINE_LEVEL = "pipelineLevel";
+ private static final String PIPELINE_EXECID = "pipelineExecId";
+ private static final String QUEUE_LEVEL = "queueLevel";
+ private static final String CONCURRENT_OPTION = "concurrentOption";
+ private static final String DISABLE = "disabled";
+ private static final String FAILURE_EMAILS_OVERRIDE = "failureEmailsOverride";
+ private static final String SUCCESS_EMAILS_OVERRIDE = "successEmailsOverride";
+ private static final String MAIL_CREATOR = "mailCreator";
+
+ private boolean notifyOnFirstFailure = true;
+ private boolean notifyOnLastFailure = false;
+ private boolean failureEmailsOverride = false;
+ private boolean successEmailsOverride = false;
+ private ArrayList<String> failureEmails = new ArrayList<String>();
+ private ArrayList<String> successEmails = new ArrayList<String>();
+
+ private Integer pipelineLevel = null;
+ private Integer pipelineExecId = null;
+ private Integer queueLevel = 0;
+ private String concurrentOption = CONCURRENT_OPTION_IGNORE;
+ private String mailCreator = DefaultMailCreator.DEFAULT_MAIL_CREATOR;
+ private Map<String, String> flowParameters = new HashMap<String, String>();
+
+ public enum FailureAction {
+ FINISH_CURRENTLY_RUNNING, CANCEL_ALL, FINISH_ALL_POSSIBLE
+ }
+
+ private FailureAction failureAction = FailureAction.FINISH_CURRENTLY_RUNNING;
+
+ private List<Object> initiallyDisabledJobs = new ArrayList<Object>();
+
+ public void addAllFlowParameters(Map<String, String> flowParam) {
+ flowParameters.putAll(flowParam);
+ }
+
+ public Map<String, String> getFlowParameters() {
+ return flowParameters;
+ }
+
+ public void setFailureEmails(Collection<String> emails) {
+ failureEmails = new ArrayList<String>(emails);
+ }
+
+ public boolean isFailureEmailsOverridden() {
+ return this.failureEmailsOverride;
+ }
+
+ public boolean isSuccessEmailsOverridden() {
+ return this.successEmailsOverride;
+ }
+
+ public void setSuccessEmailsOverridden(boolean override) {
+ this.successEmailsOverride = override;
+ }
+
+ public void setFailureEmailsOverridden(boolean override) {
+ this.failureEmailsOverride = override;
+ }
+
+ public List<String> getFailureEmails() {
+ return failureEmails;
+ }
+
+ public void setSuccessEmails(Collection<String> emails) {
+ successEmails = new ArrayList<String>(emails);
+ }
+
+ public List<String> getSuccessEmails() {
+ return successEmails;
+ }
+
+ public boolean getNotifyOnFirstFailure() {
+ return notifyOnFirstFailure;
+ }
+
+ public boolean getNotifyOnLastFailure() {
+ return notifyOnLastFailure;
+ }
+
+ public void setNotifyOnFirstFailure(boolean notify) {
+ this.notifyOnFirstFailure = notify;
+ }
+
+ public void setNotifyOnLastFailure(boolean notify) {
+ this.notifyOnLastFailure = notify;
+ }
+
+ public FailureAction getFailureAction() {
+ return failureAction;
+ }
+
+ public void setFailureAction(FailureAction action) {
+ failureAction = action;
+ }
+
+ public void setConcurrentOption(String concurrentOption) {
+ this.concurrentOption = concurrentOption;
+ }
+
+ public void setMailCreator(String mailCreator) {
+ this.mailCreator = mailCreator;
+ }
+
+ public String getConcurrentOption() {
+ return concurrentOption;
+ }
+
+ public String getMailCreator() {
+ return mailCreator;
+ }
+
+ public Integer getPipelineLevel() {
+ return pipelineLevel;
+ }
+
+ public Integer getPipelineExecutionId() {
+ return pipelineExecId;
+ }
+
+ public void setPipelineLevel(Integer level) {
+ pipelineLevel = level;
+ }
+
+ public void setPipelineExecutionId(Integer id) {
+ this.pipelineExecId = id;
+ }
+
+ public Integer getQueueLevel() {
+ return queueLevel;
+ }
+
+ public List<Object> getDisabledJobs() {
+ return new ArrayList<Object>(initiallyDisabledJobs);
+ }
+
+ public void setDisabledJobs(List<Object> disabledJobs) {
+ initiallyDisabledJobs = disabledJobs;
+ }
+
+ public Map<String, Object> toObject() {
+ HashMap<String, Object> flowOptionObj = new HashMap<String, Object>();
+
+ flowOptionObj.put(FLOW_PARAMETERS, this.flowParameters);
+ flowOptionObj.put(NOTIFY_ON_FIRST_FAILURE, this.notifyOnFirstFailure);
+ flowOptionObj.put(NOTIFY_ON_LAST_FAILURE, this.notifyOnLastFailure);
+ flowOptionObj.put(SUCCESS_EMAILS, successEmails);
+ flowOptionObj.put(FAILURE_EMAILS, failureEmails);
+ flowOptionObj.put(FAILURE_ACTION, failureAction.toString());
+ flowOptionObj.put(PIPELINE_LEVEL, pipelineLevel);
+ flowOptionObj.put(PIPELINE_EXECID, pipelineExecId);
+ flowOptionObj.put(QUEUE_LEVEL, queueLevel);
+ flowOptionObj.put(CONCURRENT_OPTION, concurrentOption);
+ flowOptionObj.put(DISABLE, initiallyDisabledJobs);
+ flowOptionObj.put(FAILURE_EMAILS_OVERRIDE, failureEmailsOverride);
+ flowOptionObj.put(SUCCESS_EMAILS_OVERRIDE, successEmailsOverride);
+ flowOptionObj.put(MAIL_CREATOR, mailCreator);
+ return flowOptionObj;
+ }
+
+ @SuppressWarnings("unchecked")
+ public static ExecutionOptions createFromObject(Object obj) {
+ if (obj == null || !(obj instanceof Map)) {
+ return null;
+ }
+
+ Map<String, Object> optionsMap = (Map<String, Object>) obj;
+ TypedMapWrapper<String, Object> wrapper =
+ new TypedMapWrapper<String, Object>(optionsMap);
+
+ ExecutionOptions options = new ExecutionOptions();
+ if (optionsMap.containsKey(FLOW_PARAMETERS)) {
+ options.flowParameters = new HashMap<String, String>();
+ options.flowParameters.putAll(wrapper
+ .<String, String> getMap(FLOW_PARAMETERS));
+ }
+ // Failure notification
+ options.notifyOnFirstFailure =
+ wrapper.getBool(NOTIFY_ON_FIRST_FAILURE, options.notifyOnFirstFailure);
+ options.notifyOnLastFailure =
+ wrapper.getBool(NOTIFY_ON_LAST_FAILURE, options.notifyOnLastFailure);
+ options.concurrentOption =
+ wrapper.getString(CONCURRENT_OPTION, options.concurrentOption);
+
+ if (wrapper.containsKey(DISABLE)) {
+ options.initiallyDisabledJobs = wrapper.<Object> getList(DISABLE);
+ }
+
+ if (optionsMap.containsKey(MAIL_CREATOR)) {
+ options.mailCreator = (String) optionsMap.get(MAIL_CREATOR);
+ }
+
+ // Failure action
+ options.failureAction =
+ FailureAction.valueOf(wrapper.getString(FAILURE_ACTION,
+ options.failureAction.toString()));
+ options.pipelineLevel =
+ wrapper.getInt(PIPELINE_LEVEL, options.pipelineLevel);
+ options.pipelineExecId =
+ wrapper.getInt(PIPELINE_EXECID, options.pipelineExecId);
+ options.queueLevel = wrapper.getInt(QUEUE_LEVEL, options.queueLevel);
+
+ // Success emails
+ options.setSuccessEmails(wrapper.<String> getList(SUCCESS_EMAILS,
+ Collections.<String> emptyList()));
+ options.setFailureEmails(wrapper.<String> getList(FAILURE_EMAILS,
+ Collections.<String> emptyList()));
+
+ options.setSuccessEmailsOverridden(wrapper.getBool(SUCCESS_EMAILS_OVERRIDE,
+ false));
+ options.setFailureEmailsOverridden(wrapper.getBool(FAILURE_EMAILS_OVERRIDE,
+ false));
+
+ return options;
+ }
}
diff --git a/src/main/java/azkaban/executor/ExecutionReference.java b/src/main/java/azkaban/executor/ExecutionReference.java
index 61eadbf..e314206 100644
--- a/src/main/java/azkaban/executor/ExecutionReference.java
+++ b/src/main/java/azkaban/executor/ExecutionReference.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -17,52 +17,52 @@
package azkaban.executor;
public class ExecutionReference {
- private final int execId;
- private final String host;
- private final int port;
- private long updateTime;
- private long nextCheckTime = -1;
- private int numErrors = 0;
-
- public ExecutionReference(int execId, String host, int port) {
- this.execId = execId;
- this.host = host;
- this.port = port;
- }
-
- public void setUpdateTime(long updateTime) {
- this.updateTime = updateTime;
- }
-
- public void setNextCheckTime(long nextCheckTime) {
- this.nextCheckTime = nextCheckTime;
- }
+ private final int execId;
+ private final String host;
+ private final int port;
+ private long updateTime;
+ private long nextCheckTime = -1;
+ private int numErrors = 0;
- public long getUpdateTime() {
- return updateTime;
- }
+ public ExecutionReference(int execId, String host, int port) {
+ this.execId = execId;
+ this.host = host;
+ this.port = port;
+ }
- public long getNextCheckTime() {
- return nextCheckTime;
- }
+ public void setUpdateTime(long updateTime) {
+ this.updateTime = updateTime;
+ }
- public int getExecId() {
- return execId;
- }
+ public void setNextCheckTime(long nextCheckTime) {
+ this.nextCheckTime = nextCheckTime;
+ }
- public String getHost() {
- return host;
- }
+ public long getUpdateTime() {
+ return updateTime;
+ }
- public int getPort() {
- return port;
- }
+ public long getNextCheckTime() {
+ return nextCheckTime;
+ }
- public int getNumErrors() {
- return numErrors;
- }
+ public int getExecId() {
+ return execId;
+ }
- public void setNumErrors(int numErrors) {
- this.numErrors = numErrors;
- }
- }
+ public String getHost() {
+ return host;
+ }
+
+ public int getPort() {
+ return port;
+ }
+
+ public int getNumErrors() {
+ return numErrors;
+ }
+
+ public void setNumErrors(int numErrors) {
+ this.numErrors = numErrors;
+ }
+}
src/main/java/azkaban/executor/ExecutorLoader.java 107(+67 -40)
diff --git a/src/main/java/azkaban/executor/ExecutorLoader.java b/src/main/java/azkaban/executor/ExecutorLoader.java
index 4763ee5..6dc0e11 100644
--- a/src/main/java/azkaban/executor/ExecutorLoader.java
+++ b/src/main/java/azkaban/executor/ExecutorLoader.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -25,58 +25,85 @@ import azkaban.utils.Pair;
import azkaban.utils.Props;
public interface ExecutorLoader {
- public void uploadExecutableFlow(ExecutableFlow flow) throws ExecutorManagerException;
-
- public ExecutableFlow fetchExecutableFlow(int execId) throws ExecutorManagerException;
-
- public Map<Integer,Pair<ExecutionReference, ExecutableFlow>> fetchActiveFlows() throws ExecutorManagerException;
+ public void uploadExecutableFlow(ExecutableFlow flow)
+ throws ExecutorManagerException;
- public List<ExecutableFlow> fetchFlowHistory(int skip, int num) throws ExecutorManagerException;
+ public ExecutableFlow fetchExecutableFlow(int execId)
+ throws ExecutorManagerException;
- public List<ExecutableFlow> fetchFlowHistory(int projectId, String flowId, int skip, int num) throws ExecutorManagerException;
+ public Map<Integer, Pair<ExecutionReference, ExecutableFlow>> fetchActiveFlows()
+ throws ExecutorManagerException;
- public List<ExecutableFlow> fetchFlowHistory(int projectId, String flowId, int skip, int num, Status status) throws ExecutorManagerException;
+ public List<ExecutableFlow> fetchFlowHistory(int skip, int num)
+ throws ExecutorManagerException;
- public List<ExecutableFlow> fetchFlowHistory(String projContain, String flowContains, String userNameContains, int status, long startData, long endData, int skip, int num) throws ExecutorManagerException;
+ public List<ExecutableFlow> fetchFlowHistory(int projectId, String flowId,
+ int skip, int num) throws ExecutorManagerException;
- public void addActiveExecutableReference(ExecutionReference ref) throws ExecutorManagerException;
+ public List<ExecutableFlow> fetchFlowHistory(int projectId, String flowId,
+ int skip, int num, Status status) throws ExecutorManagerException;
- public void removeActiveExecutableReference(int execId) throws ExecutorManagerException;
+ public List<ExecutableFlow> fetchFlowHistory(String projContain,
+ String flowContains, String userNameContains, int status, long startData,
+ long endData, int skip, int num) throws ExecutorManagerException;
- public boolean updateExecutableReference(int execId, long updateTime) throws ExecutorManagerException;
+ public void addActiveExecutableReference(ExecutionReference ref)
+ throws ExecutorManagerException;
- public LogData fetchLogs(int execId, String name, int attempt, int startByte, int endByte) throws ExecutorManagerException;
+ public void removeActiveExecutableReference(int execId)
+ throws ExecutorManagerException;
- public List<Object> fetchAttachments(int execId, String name, int attempt) throws ExecutorManagerException;
+ public boolean updateExecutableReference(int execId, long updateTime)
+ throws ExecutorManagerException;
- public void uploadLogFile(int execId, String name, int attempt, File ... files) throws ExecutorManagerException;
-
- public void uploadAttachmentFile(ExecutableNode node, File file) throws ExecutorManagerException;
+ public LogData fetchLogs(int execId, String name, int attempt, int startByte,
+ int endByte) throws ExecutorManagerException;
- public void updateExecutableFlow(ExecutableFlow flow) throws ExecutorManagerException;
+ public List<Object> fetchAttachments(int execId, String name, int attempt)
+ throws ExecutorManagerException;
- public void uploadExecutableNode(ExecutableNode node, Props inputParams) throws ExecutorManagerException;
+ public void uploadLogFile(int execId, String name, int attempt, File... files)
+ throws ExecutorManagerException;
- public List<ExecutableJobInfo> fetchJobInfoAttempts(int execId, String jobId) throws ExecutorManagerException;
+ public void uploadAttachmentFile(ExecutableNode node, File file)
+ throws ExecutorManagerException;
- public ExecutableJobInfo fetchJobInfo(int execId, String jobId, int attempt) throws ExecutorManagerException;
-
- public List<ExecutableJobInfo> fetchJobHistory(int projectId, String jobId, int skip, int size) throws ExecutorManagerException;
-
- public void updateExecutableNode(ExecutableNode node) throws ExecutorManagerException;
+ public void updateExecutableFlow(ExecutableFlow flow)
+ throws ExecutorManagerException;
- public int fetchNumExecutableFlows(int projectId, String flowId) throws ExecutorManagerException;
+ public void uploadExecutableNode(ExecutableNode node, Props inputParams)
+ throws ExecutorManagerException;
- public int fetchNumExecutableFlows() throws ExecutorManagerException;
-
- public int fetchNumExecutableNodes(int projectId, String jobId) throws ExecutorManagerException;
-
- public Props fetchExecutionJobInputProps(int execId, String jobId) throws ExecutorManagerException;
-
- public Props fetchExecutionJobOutputProps(int execId, String jobId) throws ExecutorManagerException;
-
- public Pair<Props, Props> fetchExecutionJobProps(int execId, String jobId) throws ExecutorManagerException;
+ public List<ExecutableJobInfo> fetchJobInfoAttempts(int execId, String jobId)
+ throws ExecutorManagerException;
+
+ public ExecutableJobInfo fetchJobInfo(int execId, String jobId, int attempt)
+ throws ExecutorManagerException;
+
+ public List<ExecutableJobInfo> fetchJobHistory(int projectId, String jobId,
+ int skip, int size) throws ExecutorManagerException;
+
+ public void updateExecutableNode(ExecutableNode node)
+ throws ExecutorManagerException;
+
+ public int fetchNumExecutableFlows(int projectId, String flowId)
+ throws ExecutorManagerException;
+
+ public int fetchNumExecutableFlows() throws ExecutorManagerException;
+
+ public int fetchNumExecutableNodes(int projectId, String jobId)
+ throws ExecutorManagerException;
+
+ public Props fetchExecutionJobInputProps(int execId, String jobId)
+ throws ExecutorManagerException;
+
+ public Props fetchExecutionJobOutputProps(int execId, String jobId)
+ throws ExecutorManagerException;
+
+ public Pair<Props, Props> fetchExecutionJobProps(int execId, String jobId)
+ throws ExecutorManagerException;
+
+ public int removeExecutionLogsByTime(long millis)
+ throws ExecutorManagerException;
- public int removeExecutionLogsByTime(long millis) throws ExecutorManagerException;
-
}
src/main/java/azkaban/executor/ExecutorManager.java 2454(+1280 -1174)
diff --git a/src/main/java/azkaban/executor/ExecutorManager.java b/src/main/java/azkaban/executor/ExecutorManager.java
index a48c524..a2c90c0 100644
--- a/src/main/java/azkaban/executor/ExecutorManager.java
+++ b/src/main/java/azkaban/executor/ExecutorManager.java
@@ -1,12 +1,12 @@
/*
* Copyright 2014 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -54,1176 +54,1282 @@ import azkaban.utils.Props;
/**
* Executor manager used to manage the client side job.
- *
+ *
*/
-public class ExecutorManager extends EventHandler implements ExecutorManagerAdapter {
- private static Logger logger = Logger.getLogger(ExecutorManager.class);
- private ExecutorLoader executorLoader;
- private String executorHost;
- private int executorPort;
-
- private CleanerThread cleanerThread;
-
- private ConcurrentHashMap<Integer, Pair<ExecutionReference, ExecutableFlow>> runningFlows = new ConcurrentHashMap<Integer, Pair<ExecutionReference, ExecutableFlow>>();
- private ConcurrentHashMap<Integer, ExecutableFlow> recentlyFinished = new ConcurrentHashMap<Integer, ExecutableFlow>();
-
- private ExecutingManagerUpdaterThread executingManager;
-
- private static final long DEFAULT_EXECUTION_LOGS_RETENTION_MS = 3*4*7*24*60*60*1000l;
- private long lastCleanerThreadCheckTime = -1;
-
- private long lastThreadCheckTime = -1;
- private String updaterStage = "not started";
-
- private Map<String, Alerter> alerters;
-
- File cacheDir;
-
- public ExecutorManager(Props props, ExecutorLoader loader, Map<String, Alerter> alters) throws ExecutorManagerException {
- this.executorLoader = loader;
- this.loadRunningFlows();
- executorHost = props.getString("executor.host", "localhost");
- executorPort = props.getInt("executor.port");
-
- alerters = alters;
-
- cacheDir = new File(props.getString("cache.directory", "cache"));
-
- executingManager = new ExecutingManagerUpdaterThread();
- executingManager.start();
-
- long executionLogsRetentionMs = props.getLong("execution.logs.retention.ms", DEFAULT_EXECUTION_LOGS_RETENTION_MS);
- cleanerThread = new CleanerThread(executionLogsRetentionMs);
- cleanerThread.start();
-
- }
-
- @Override
- public State getExecutorManagerThreadState() {
- return executingManager.getState();
- }
-
- public String getExecutorThreadStage() {
- return updaterStage;
- }
-
- @Override
- public boolean isExecutorManagerThreadActive() {
- return executingManager.isAlive();
- }
-
- @Override
- public long getLastExecutorManagerThreadCheckTime() {
- return lastThreadCheckTime;
- }
-
- public long getLastCleanerThreadCheckTime() {
- return this.lastCleanerThreadCheckTime;
- }
-
- @Override
- public Set<String> getPrimaryServerHosts() {
- // Only one for now. More probably later.
- HashSet<String> ports = new HashSet<String>();
- ports.add(executorHost + ":" + executorPort);
- return ports;
- }
-
- @Override
- public Set<String> getAllActiveExecutorServerHosts() {
- // Includes non primary server/hosts
- HashSet<String> ports = new HashSet<String>();
- ports.add(executorHost + ":" + executorPort);
- for(Pair<ExecutionReference, ExecutableFlow> running: runningFlows.values()) {
- ExecutionReference ref = running.getFirst();
- ports.add(ref.getHost() + ":" + ref.getPort());
- }
-
- return ports;
- }
-
- private void loadRunningFlows() throws ExecutorManagerException {
- runningFlows.putAll(executorLoader.fetchActiveFlows());
- }
-
- @Override
- public List<Integer> getRunningFlows(int projectId, String flowId) {
- ArrayList<Integer> executionIds = new ArrayList<Integer>();
- for (Pair<ExecutionReference, ExecutableFlow> ref : runningFlows.values()) {
- if (ref.getSecond().getFlowId().equals(flowId) && ref.getSecond().getProjectId() == projectId) {
- executionIds.add(ref.getFirst().getExecId());
- }
- }
- return executionIds;
- }
-
- @Override
- public boolean isFlowRunning(int projectId, String flowId) {
- for (Pair<ExecutionReference, ExecutableFlow> ref : runningFlows.values()) {
- if (ref.getSecond().getProjectId() == projectId && ref.getSecond().getFlowId().equals(flowId)) {
- return true;
- }
- }
- return false;
- }
-
- @Override
- public ExecutableFlow getExecutableFlow(int execId) throws ExecutorManagerException {
- Pair<ExecutionReference, ExecutableFlow> active = runningFlows.get(execId);
- if (active == null) {
- return executorLoader.fetchExecutableFlow(execId);
- }
- return active.getSecond();
- }
-
- @Override
- public List<ExecutableFlow> getRunningFlows() {
- ArrayList<ExecutableFlow> flows = new ArrayList<ExecutableFlow>();
- for (Pair<ExecutionReference, ExecutableFlow> ref : runningFlows.values()) {
- flows.add(ref.getSecond());
- }
- return flows;
- }
-
- public String getRunningFlowIds() {
- List<Integer> allIds = new ArrayList<Integer>();
- for (Pair<ExecutionReference, ExecutableFlow> ref : runningFlows.values()) {
- allIds.add(ref.getSecond().getExecutionId());
- }
- Collections.sort(allIds);
- return allIds.toString();
- }
-
- public List<ExecutableFlow> getRecentlyFinishedFlows() {
- return new ArrayList<ExecutableFlow>(recentlyFinished.values());
- }
-
- @Override
- public List<ExecutableFlow> getExecutableFlows(
- Project project, String flowId, int skip, int size)
- throws ExecutorManagerException {
- List<ExecutableFlow> flows = executorLoader.fetchFlowHistory(
- project.getId(), flowId, skip, size);
- return flows;
- }
-
- @Override
- public List<ExecutableFlow> getExecutableFlows(int skip, int size)
- throws ExecutorManagerException {
- List<ExecutableFlow> flows = executorLoader.fetchFlowHistory(skip, size);
- return flows;
- }
-
- @Override
- public List<ExecutableFlow> getExecutableFlows(
- String flowIdContains, int skip, int size)
- throws ExecutorManagerException {
- List<ExecutableFlow> flows = executorLoader.fetchFlowHistory(
- null, '%'+flowIdContains+'%', null, 0, -1, -1 , skip, size);
- return flows;
- }
-
- @Override
- public List<ExecutableFlow> getExecutableFlows(
- String projContain,
- String flowContain,
- String userContain,
- int status,
- long begin,
- long end,
- int skip,
- int size) throws ExecutorManagerException {
- List<ExecutableFlow> flows = executorLoader.fetchFlowHistory(
- projContain, flowContain, userContain, status, begin, end , skip, size);
- return flows;
- }
-
- @Override
- public List<ExecutableJobInfo> getExecutableJobs(Project project, String jobId, int skip, int size) throws ExecutorManagerException {
- List<ExecutableJobInfo> nodes = executorLoader.fetchJobHistory(project.getId(), jobId, skip, size);
- return nodes;
- }
-
- @Override
- public int getNumberOfJobExecutions(Project project, String jobId) throws ExecutorManagerException{
- return executorLoader.fetchNumExecutableNodes(project.getId(), jobId);
- }
-
- @Override
- public int getNumberOfExecutions(Project project, String flowId) throws ExecutorManagerException{
- return executorLoader.fetchNumExecutableFlows(project.getId(), flowId);
- }
-
- @Override
- public LogData getExecutableFlowLog(ExecutableFlow exFlow, int offset, int length) throws ExecutorManagerException {
- Pair<ExecutionReference, ExecutableFlow> pair = runningFlows.get(exFlow.getExecutionId());
- if (pair != null) {
- Pair<String,String> typeParam = new Pair<String,String>("type", "flow");
- Pair<String,String> offsetParam = new Pair<String,String>("offset", String.valueOf(offset));
- Pair<String,String> lengthParam = new Pair<String,String>("length", String.valueOf(length));
-
- @SuppressWarnings("unchecked")
- Map<String, Object> result = callExecutorServer(pair.getFirst(), ConnectorParams.LOG_ACTION, typeParam, offsetParam, lengthParam);
- return LogData.createLogDataFromObject(result);
- }
- else {
- LogData value = executorLoader.fetchLogs(exFlow.getExecutionId(), "", 0, offset, length);
- return value;
- }
- }
-
- @Override
- public LogData getExecutionJobLog(
- ExecutableFlow exFlow, String jobId, int offset, int length, int attempt)
- throws ExecutorManagerException {
- Pair<ExecutionReference, ExecutableFlow> pair =
- runningFlows.get(exFlow.getExecutionId());
- if (pair != null) {
- Pair<String,String> typeParam = new Pair<String,String>("type", "job");
- Pair<String,String> jobIdParam = new Pair<String,String>("jobId", jobId);
- Pair<String,String> offsetParam = new Pair<String,String>("offset", String.valueOf(offset));
- Pair<String,String> lengthParam = new Pair<String,String>("length", String.valueOf(length));
- Pair<String,String> attemptParam = new Pair<String,String>("attempt", String.valueOf(attempt));
-
- @SuppressWarnings("unchecked")
- Map<String, Object> result = callExecutorServer(
- pair.getFirst(),
- ConnectorParams.LOG_ACTION,
- typeParam,
- jobIdParam,
- offsetParam,
- lengthParam,
- attemptParam);
- return LogData.createLogDataFromObject(result);
- }
- else {
- LogData value = executorLoader.fetchLogs(
- exFlow.getExecutionId(), jobId, attempt, offset, length);
- return value;
- }
- }
-
- @Override
- public List<Object> getExecutionJobStats(
- ExecutableFlow exFlow, String jobId, int attempt)
- throws ExecutorManagerException {
- Pair<ExecutionReference, ExecutableFlow> pair =
- runningFlows.get(exFlow.getExecutionId());
- if (pair == null) {
- return executorLoader.fetchAttachments(
- exFlow.getExecutionId(), jobId, attempt);
- }
-
- Pair<String, String> jobIdParam = new Pair<String, String>("jobId", jobId);
- Pair<String,String> attemptParam = new Pair<String,String>("attempt", String.valueOf(attempt));
-
- @SuppressWarnings("unchecked")
- Map<String, Object> result = callExecutorServer(
- pair.getFirst(),
- ConnectorParams.ATTACHMENTS_ACTION,
- jobIdParam,
- attemptParam);
-
- @SuppressWarnings("unchecked")
- List<Object> jobStats = (List<Object>) result.get("attachments");
-
- return jobStats;
- }
-
- @Override
- public JobMetaData getExecutionJobMetaData(ExecutableFlow exFlow, String jobId, int offset, int length, int attempt) throws ExecutorManagerException {
- Pair<ExecutionReference, ExecutableFlow> pair = runningFlows.get(exFlow.getExecutionId());
- if (pair != null) {
-
- Pair<String,String> typeParam = new Pair<String,String>("type", "job");
- Pair<String,String> jobIdParam = new Pair<String,String>("jobId", jobId);
- Pair<String,String> offsetParam = new Pair<String,String>("offset", String.valueOf(offset));
- Pair<String,String> lengthParam = new Pair<String,String>("length", String.valueOf(length));
- Pair<String,String> attemptParam = new Pair<String,String>("attempt", String.valueOf(attempt));
-
- @SuppressWarnings("unchecked")
- Map<String, Object> result = callExecutorServer(pair.getFirst(), ConnectorParams.METADATA_ACTION, typeParam, jobIdParam, offsetParam, lengthParam, attemptParam);
- return JobMetaData.createJobMetaDataFromObject(result);
- }
- else {
- return null;
- }
- }
-
- @Override
- public void cancelFlow(ExecutableFlow exFlow, String userId) throws ExecutorManagerException {
- synchronized(exFlow) {
- Pair<ExecutionReference, ExecutableFlow> pair = runningFlows.get(exFlow.getExecutionId());
- if (pair == null) {
- throw new ExecutorManagerException("Execution " + exFlow.getExecutionId() + " of flow " + exFlow.getFlowId() + " isn't running.");
- }
- callExecutorServer(pair.getFirst(), ConnectorParams.CANCEL_ACTION, userId);
- }
- }
-
- @Override
- public void resumeFlow(ExecutableFlow exFlow, String userId) throws ExecutorManagerException {
- synchronized(exFlow) {
- Pair<ExecutionReference, ExecutableFlow> pair = runningFlows.get(exFlow.getExecutionId());
- if (pair == null) {
- throw new ExecutorManagerException("Execution " + exFlow.getExecutionId() + " of flow " + exFlow.getFlowId() + " isn't running.");
- }
- callExecutorServer(pair.getFirst(), ConnectorParams.RESUME_ACTION, userId);
- }
- }
-
- @Override
- public void pauseFlow(ExecutableFlow exFlow, String userId) throws ExecutorManagerException {
- synchronized(exFlow) {
- Pair<ExecutionReference, ExecutableFlow> pair = runningFlows.get(exFlow.getExecutionId());
- if (pair == null) {
- throw new ExecutorManagerException("Execution " + exFlow.getExecutionId() + " of flow " + exFlow.getFlowId() + " isn't running.");
- }
- callExecutorServer(pair.getFirst(), ConnectorParams.PAUSE_ACTION, userId);
- }
- }
-
- @Override
- public void pauseExecutingJobs(ExecutableFlow exFlow, String userId, String ... jobIds) throws ExecutorManagerException {
- modifyExecutingJobs(exFlow, ConnectorParams.MODIFY_PAUSE_JOBS, userId, jobIds);
- }
-
- @Override
- public void resumeExecutingJobs(ExecutableFlow exFlow, String userId, String ... jobIds) throws ExecutorManagerException {
- modifyExecutingJobs(exFlow, ConnectorParams.MODIFY_RESUME_JOBS, userId, jobIds);
- }
-
- @Override
- public void retryFailures(ExecutableFlow exFlow, String userId) throws ExecutorManagerException {
- modifyExecutingJobs(exFlow, ConnectorParams.MODIFY_RETRY_FAILURES, userId);
- }
-
- @Override
- public void retryExecutingJobs(ExecutableFlow exFlow, String userId, String ... jobIds) throws ExecutorManagerException {
- modifyExecutingJobs(exFlow, ConnectorParams.MODIFY_RETRY_JOBS, userId, jobIds);
- }
-
- @Override
- public void disableExecutingJobs(ExecutableFlow exFlow, String userId, String ... jobIds) throws ExecutorManagerException {
- modifyExecutingJobs(exFlow, ConnectorParams.MODIFY_DISABLE_JOBS, userId, jobIds);
- }
-
- @Override
- public void enableExecutingJobs(ExecutableFlow exFlow, String userId, String ... jobIds) throws ExecutorManagerException {
- modifyExecutingJobs(exFlow, ConnectorParams.MODIFY_ENABLE_JOBS, userId, jobIds);
- }
-
- @Override
- public void cancelExecutingJobs(ExecutableFlow exFlow, String userId, String ... jobIds) throws ExecutorManagerException {
- modifyExecutingJobs(exFlow, ConnectorParams.MODIFY_CANCEL_JOBS, userId, jobIds);
- }
-
- @SuppressWarnings("unchecked")
- private Map<String, Object> modifyExecutingJobs(ExecutableFlow exFlow, String command, String userId, String ... jobIds) throws ExecutorManagerException {
- synchronized(exFlow) {
- Pair<ExecutionReference, ExecutableFlow> pair = runningFlows.get(exFlow.getExecutionId());
- if (pair == null) {
- throw new ExecutorManagerException("Execution " + exFlow.getExecutionId() + " of flow " + exFlow.getFlowId() + " isn't running.");
- }
-
- Map<String, Object> response = null;
- if (jobIds != null && jobIds.length > 0) {
- for (String jobId: jobIds) {
- if (!jobId.isEmpty()) {
- ExecutableNode node = exFlow.getExecutableNode(jobId);
- if (node == null) {
- throw new ExecutorManagerException("Job " + jobId + " doesn't exist in execution " + exFlow.getExecutionId() + ".");
- }
- }
- }
- String ids = StringUtils.join(jobIds, ',');
- response = callExecutorServer(
- pair.getFirst(),
- ConnectorParams.MODIFY_EXECUTION_ACTION,
- userId,
- new Pair<String,String>(ConnectorParams.MODIFY_EXECUTION_ACTION_TYPE, command),
- new Pair<String,String>(ConnectorParams.MODIFY_JOBS_LIST, ids));
- }
- else {
- response = callExecutorServer(
- pair.getFirst(),
- ConnectorParams.MODIFY_EXECUTION_ACTION,
- userId,
- new Pair<String,String>(ConnectorParams.MODIFY_EXECUTION_ACTION_TYPE, command));
- }
-
- return response;
- }
- }
-
- private void applyDisabledJobs(List<Object> disabledJobs, ExecutableFlowBase exflow) {
- for (Object disabled: disabledJobs) {
- if (disabled instanceof String) {
- String nodeName = (String)disabled;
- ExecutableNode node = exflow.getExecutableNode(nodeName);
- if (node != null) {
- node.setStatus(Status.DISABLED);
- }
- }
- else if (disabled instanceof Map) {
- @SuppressWarnings("unchecked")
- Map<String,Object> nestedDisabled = (Map<String, Object>)disabled;
- String nodeName = (String)nestedDisabled.get("id");
- @SuppressWarnings("unchecked")
- List<Object> subDisabledJobs = (List<Object>)nestedDisabled.get("children");
-
- if (nodeName == null || subDisabledJobs == null) {
- return;
- }
-
- ExecutableNode node = exflow.getExecutableNode(nodeName);
- if (node != null && node instanceof ExecutableFlowBase) {
- applyDisabledJobs(subDisabledJobs, (ExecutableFlowBase)node);
- }
- }
- }
- }
-
- @Override
- public String submitExecutableFlow(ExecutableFlow exflow, String userId) throws ExecutorManagerException {
- synchronized(exflow) {
- logger.info("Submitting execution flow " + exflow.getFlowId() + " by " + userId);
-
- int projectId = exflow.getProjectId();
- String flowId = exflow.getFlowId();
- exflow.setSubmitUser(userId);
- exflow.setSubmitTime(System.currentTimeMillis());
-
- List<Integer> running = getRunningFlows(projectId, flowId);
-
- ExecutionOptions options = exflow.getExecutionOptions();
- if (options == null) {
- options = new ExecutionOptions();
- }
-
- String message = "";
- if (options.getDisabledJobs() != null) {
- applyDisabledJobs(options.getDisabledJobs(), exflow);
- }
-
- if (!running.isEmpty()) {
- if (options.getConcurrentOption().equals(ExecutionOptions.CONCURRENT_OPTION_PIPELINE)) {
- Collections.sort(running);
- Integer runningExecId = running.get(running.size() - 1);
-
- options.setPipelineExecutionId(runningExecId);
- message = "Flow " + flowId + " is already running with exec id " + runningExecId +". Pipelining level " + options.getPipelineLevel() + ". \n";
- }
- else if (options.getConcurrentOption().equals(ExecutionOptions.CONCURRENT_OPTION_SKIP)) {
- throw new ExecutorManagerException("Flow " + flowId + " is already running. Skipping execution.", ExecutorManagerException.Reason.SkippedExecution);
- }
- else {
- // The settings is to run anyways.
- message = "Flow " + flowId + " is already running with exec id " + StringUtils.join(running, ",") +". Will execute concurrently. \n";
- }
- }
-
- // The exflow id is set by the loader. So it's unavailable until after this call.
- executorLoader.uploadExecutableFlow(exflow);
-
- // We create an active flow reference in the datastore. If the upload fails, we remove the reference.
- ExecutionReference reference = new ExecutionReference(exflow.getExecutionId(), executorHost, executorPort);
- executorLoader.addActiveExecutableReference(reference);
- try {
- callExecutorServer(reference, ConnectorParams.EXECUTE_ACTION);
- runningFlows.put(exflow.getExecutionId(), new Pair<ExecutionReference, ExecutableFlow>(reference, exflow));
-
- message += "Execution submitted successfully with exec id " + exflow.getExecutionId();
- }
- catch (ExecutorManagerException e) {
- executorLoader.removeActiveExecutableReference(reference.getExecId());
- throw e;
- }
-
- return message;
- }
- }
-
-
- private void cleanOldExecutionLogs(long millis) {
- try {
- int count = executorLoader.removeExecutionLogsByTime(millis);
- logger.info("Cleaned up " + count + " log entries.");
- }
- catch (ExecutorManagerException e) {
- e.printStackTrace();
- }
- }
-
- private Map<String, Object> callExecutorServer(ExecutionReference ref, String action) throws ExecutorManagerException {
- try {
- return callExecutorServer(ref.getHost(), ref.getPort(), action, ref.getExecId(), null, (Pair<String,String>[])null);
- } catch (IOException e) {
- throw new ExecutorManagerException(e);
- }
- }
-
- private Map<String, Object> callExecutorServer(ExecutionReference ref, String action, String user) throws ExecutorManagerException {
- try {
- return callExecutorServer(ref.getHost(), ref.getPort(), action, ref.getExecId(), user, (Pair<String,String>[])null);
- } catch (IOException e) {
- throw new ExecutorManagerException(e);
- }
- }
-
- private Map<String, Object> callExecutorServer(ExecutionReference ref, String action, Pair<String,String> ... params) throws ExecutorManagerException {
- try {
- return callExecutorServer(ref.getHost(), ref.getPort(), action, ref.getExecId(), null, params);
- } catch (IOException e) {
- throw new ExecutorManagerException(e);
- }
- }
-
- private Map<String, Object> callExecutorServer(ExecutionReference ref, String action, String user, Pair<String,String> ... params) throws ExecutorManagerException {
- try {
- return callExecutorServer(ref.getHost(), ref.getPort(), action, ref.getExecId(), user, params);
- } catch (IOException e) {
- throw new ExecutorManagerException(e);
- }
- }
-
- private Map<String, Object> callExecutorServer(String host, int port, String action, Integer executionId, String user, Pair<String,String> ... params) throws IOException {
- URIBuilder builder = new URIBuilder();
- builder.setScheme("http")
- .setHost(host)
- .setPort(port)
- .setPath("/executor");
-
- builder.setParameter(ConnectorParams.ACTION_PARAM, action);
-
- if (executionId != null) {
- builder.setParameter(ConnectorParams.EXECID_PARAM,String.valueOf(executionId));
- }
-
- if (user != null) {
- builder.setParameter(ConnectorParams.USER_PARAM, user);
- }
-
- if (params != null) {
- for (Pair<String, String> pair: params) {
- builder.setParameter(pair.getFirst(), pair.getSecond());
- }
- }
-
- URI uri = null;
- try {
- uri = builder.build();
- } catch (URISyntaxException e) {
- throw new IOException(e);
- }
-
- ResponseHandler<String> responseHandler = new BasicResponseHandler();
-
- HttpClient httpclient = new DefaultHttpClient();
- HttpGet httpget = new HttpGet(uri);
- String response = null;
- try {
- response = httpclient.execute(httpget, responseHandler);
- } catch (IOException e) {
- throw e;
- }
- finally {
- httpclient.getConnectionManager().shutdown();
- }
-
- @SuppressWarnings("unchecked")
- Map<String, Object> jsonResponse = (Map<String, Object>)JSONUtils.parseJSONFromString(response);
- String error = (String)jsonResponse.get(ConnectorParams.RESPONSE_ERROR);
- if (error != null) {
- throw new IOException(error);
- }
-
- return jsonResponse;
- }
-
- @Override
- public Map<String, Object> callExecutorJMX(String hostPort, String action, String mBean) throws IOException {
- URIBuilder builder = new URIBuilder();
-
- String[] hostPortSplit = hostPort.split(":");
- builder.setScheme("http")
- .setHost(hostPortSplit[0])
- .setPort(Integer.parseInt(hostPortSplit[1]))
- .setPath("/jmx");
-
- builder.setParameter(action, "");
- if (mBean != null) {
- builder.setParameter(ConnectorParams.JMX_MBEAN, mBean);
- }
-
- URI uri = null;
- try {
- uri = builder.build();
- } catch (URISyntaxException e) {
- throw new IOException(e);
- }
-
- ResponseHandler<String> responseHandler = new BasicResponseHandler();
-
- HttpClient httpclient = new DefaultHttpClient();
- HttpGet httpget = new HttpGet(uri);
- String response = null;
- try {
- response = httpclient.execute(httpget, responseHandler);
- } catch (IOException e) {
- throw e;
- }
- finally {
- httpclient.getConnectionManager().shutdown();
- }
-
- @SuppressWarnings("unchecked")
- Map<String, Object> jsonResponse = (Map<String, Object>)JSONUtils.parseJSONFromString(response);
- String error = (String)jsonResponse.get(ConnectorParams.RESPONSE_ERROR);
- if (error != null) {
- throw new IOException(error);
- }
- return jsonResponse;
- }
-
- @Override
- public void shutdown() {
- executingManager.shutdown();
- }
-
- private class ExecutingManagerUpdaterThread extends Thread {
- private boolean shutdown = false;
-
- public ExecutingManagerUpdaterThread() {
- this.setName("ExecutorManagerUpdaterThread");
- }
-
- // 10 mins recently finished threshold.
- private long recentlyFinishedLifetimeMs = 600000;
- private int waitTimeIdleMs = 2000;
- private int waitTimeMs = 500;
-
- // When we have an http error, for that flow, we'll check every 10 secs, 6 times (1 mins) before we evict.
- private int numErrors = 6;
- private long errorThreshold = 10000;
-
- private void shutdown() {
- shutdown = true;
- }
-
- @SuppressWarnings("unchecked")
- public void run() {
- while(!shutdown) {
- try {
- lastThreadCheckTime = System.currentTimeMillis();
- updaterStage = "Starting update all flows.";
-
- Map<ConnectionInfo, List<ExecutableFlow>> exFlowMap = getFlowToExecutorMap();
- ArrayList<ExecutableFlow> finishedFlows = new ArrayList<ExecutableFlow>();
- ArrayList<ExecutableFlow> finalizeFlows = new ArrayList<ExecutableFlow>();
-
- if (exFlowMap.size() > 0) {
- for (Map.Entry<ConnectionInfo, List<ExecutableFlow>> entry: exFlowMap.entrySet()) {
- List<Long> updateTimesList = new ArrayList<Long>();
- List<Integer> executionIdsList = new ArrayList<Integer>();
-
- ConnectionInfo connection = entry.getKey();
-
- updaterStage = "Starting update flows on " + connection.getHost() + ":" + connection.getPort();
-
- // We pack the parameters of the same host together before we query.
- fillUpdateTimeAndExecId(entry.getValue(), executionIdsList, updateTimesList);
-
- Pair<String,String> updateTimes = new Pair<String, String>(
- ConnectorParams.UPDATE_TIME_LIST_PARAM,
- JSONUtils.toJSON(updateTimesList));
- Pair<String,String> executionIds = new Pair<String, String>(
- ConnectorParams.EXEC_ID_LIST_PARAM,
- JSONUtils.toJSON(executionIdsList));
-
-
- Map<String, Object> results = null;
- try {
- results = callExecutorServer(connection.getHost(), connection.getPort(), ConnectorParams.UPDATE_ACTION, null, null, executionIds, updateTimes);
- } catch (IOException e) {
- logger.error(e);
- for (ExecutableFlow flow: entry.getValue()) {
- Pair<ExecutionReference, ExecutableFlow> pair = runningFlows.get(flow.getExecutionId());
-
- updaterStage = "Failed to get update. Doing some clean up for flow " + pair.getSecond().getExecutionId();
-
- if (pair != null) {
- ExecutionReference ref = pair.getFirst();
- int numErrors = ref.getNumErrors();
- if (ref.getNumErrors() < this.numErrors) {
- ref.setNextCheckTime(System.currentTimeMillis() + errorThreshold);
- ref.setNumErrors(++numErrors);
- }
- else {
- logger.error("Evicting flow " + flow.getExecutionId() + ". The executor is unresponsive.");
- //TODO should send out an unresponsive email here.
- finalizeFlows.add(pair.getSecond());
- }
- }
- }
- }
-
- // We gets results
- if (results != null) {
- List<Map<String,Object>> executionUpdates = (List<Map<String,Object>>)results.get(ConnectorParams.RESPONSE_UPDATED_FLOWS);
- for (Map<String,Object> updateMap: executionUpdates) {
- try {
- ExecutableFlow flow = updateExecution(updateMap);
-
- updaterStage = "Updated flow " + flow.getExecutionId();
-
- if (isFinished(flow)) {
- finishedFlows.add(flow);
- finalizeFlows.add(flow);
- }
- } catch (ExecutorManagerException e) {
- ExecutableFlow flow = e.getExecutableFlow();
- logger.error(e);
-
- if (flow != null) {
- logger.error("Finalizing flow " + flow.getExecutionId());
- finalizeFlows.add(flow);
- }
- }
- }
- }
- }
-
- updaterStage = "Evicting old recently finished flows.";
-
- evictOldRecentlyFinished(recentlyFinishedLifetimeMs);
- // Add new finished
- for (ExecutableFlow flow: finishedFlows) {
- if(flow.getScheduleId() >= 0 && flow.getStatus() == Status.SUCCEEDED){
- ScheduleStatisticManager.invalidateCache(flow.getScheduleId(), cacheDir);
- }
- fireEventListeners(Event.create(flow, Type.FLOW_FINISHED));
- recentlyFinished.put(flow.getExecutionId(), flow);
- }
-
- updaterStage = "Finalizing " + finalizeFlows.size() + " error flows.";
-
- // Kill error flows
- for (ExecutableFlow flow: finalizeFlows) {
- finalizeFlows(flow);
- }
- }
-
- updaterStage = "Updated all active flows. Waiting for next round.";
-
- synchronized(this) {
- try {
- if (runningFlows.size() > 0) {
- this.wait(waitTimeMs);
- }
- else {
- this.wait(waitTimeIdleMs);
- }
- } catch (InterruptedException e) {
- }
- }
- }
- catch (Exception e) {
- logger.error(e);
- }
- }
- }
- }
-
- private void finalizeFlows(ExecutableFlow flow) {
-
- int execId = flow.getExecutionId();
-
- updaterStage = "finalizing flow " + execId;
- // First we check if the execution in the datastore is complete
- try {
- ExecutableFlow dsFlow;
- if(isFinished(flow)) {
- dsFlow = flow;
- }
- else {
- updaterStage = "finalizing flow " + execId + " loading from db";
- dsFlow = executorLoader.fetchExecutableFlow(execId);
-
- // If it's marked finished, we're good. If not, we fail everything and then mark it finished.
- if (!isFinished(dsFlow)) {
- updaterStage = "finalizing flow " + execId + " failing the flow";
- failEverything(dsFlow);
- executorLoader.updateExecutableFlow(dsFlow);
- }
- }
-
- updaterStage = "finalizing flow " + execId + " deleting active reference";
-
- // Delete the executing reference.
- if (flow.getEndTime() == -1) {
- flow.setEndTime(System.currentTimeMillis());
- executorLoader.updateExecutableFlow(dsFlow);
- }
- executorLoader.removeActiveExecutableReference(execId);
-
- updaterStage = "finalizing flow " + execId + " cleaning from memory";
- runningFlows.remove(execId);
- fireEventListeners(Event.create(dsFlow, Type.FLOW_FINISHED));
- recentlyFinished.put(execId, dsFlow);
-
- } catch (ExecutorManagerException e) {
- logger.error(e);
- }
-
- // TODO append to the flow log that we forced killed this flow because the target no longer had
- // the reference.
-
- updaterStage = "finalizing flow " + execId + " alerting and emailing";
- ExecutionOptions options = flow.getExecutionOptions();
- // But we can definitely email them.
- Alerter mailAlerter = alerters.get("email");
- if(flow.getStatus() == Status.FAILED || flow.getStatus() == Status.KILLED)
- {
- if(options.getFailureEmails() != null && !options.getFailureEmails().isEmpty())
- {
- try {
- mailAlerter.alertOnError(flow, "Executor no longer seems to be running this execution. Most likely due to executor bounce.");
- } catch (Exception e) {
- logger.error(e);
- }
- }
- if(options.getFlowParameters().containsKey("alert.type")) {
- String alertType = options.getFlowParameters().get("alert.type");
- Alerter alerter = alerters.get(alertType);
- if(alerter != null) {
- try {
- alerter.alertOnError(flow, "Executor no longer seems to be running this execution. Most likely due to executor bounce.");
- } catch (Exception e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- logger.error("Failed to alert by " + alertType);
- }
- }
- else {
- logger.error("Alerter type " + alertType + " doesn't exist. Failed to alert.");
- }
- }
- }
- else
- {
- if(options.getSuccessEmails() != null && !options.getSuccessEmails().isEmpty())
- {
- try {
-
- mailAlerter.alertOnSuccess(flow);
- } catch (Exception e) {
- logger.error(e);
- }
- }
- if(options.getFlowParameters().containsKey("alert.type")) {
- String alertType = options.getFlowParameters().get("alert.type");
- Alerter alerter = alerters.get(alertType);
- if(alerter != null) {
- try {
- alerter.alertOnSuccess(flow);
- } catch (Exception e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- logger.error("Failed to alert by " + alertType);
- }
- }
- else {
- logger.error("Alerter type " + alertType + " doesn't exist. Failed to alert.");
- }
- }
- }
-
- }
-
- private void failEverything(ExecutableFlow exFlow) {
- long time = System.currentTimeMillis();
- for (ExecutableNode node: exFlow.getExecutableNodes()) {
- switch(node.getStatus()) {
- case SUCCEEDED:
- case FAILED:
- case KILLED:
- case SKIPPED:
- case DISABLED:
- continue;
- //case UNKNOWN:
- case READY:
- node.setStatus(Status.KILLED);
- break;
- default:
- node.setStatus(Status.FAILED);
- break;
- }
-
- if (node.getStartTime() == -1) {
- node.setStartTime(time);
- }
- if (node.getEndTime() == -1) {
- node.setEndTime(time);
- }
- }
-
- if (exFlow.getEndTime() == -1) {
- exFlow.setEndTime(time);
- }
-
- exFlow.setStatus(Status.FAILED);
- }
-
- private void evictOldRecentlyFinished(long ageMs) {
- ArrayList<Integer> recentlyFinishedKeys = new ArrayList<Integer>(recentlyFinished.keySet());
- long oldAgeThreshold = System.currentTimeMillis() - ageMs;
- for (Integer key: recentlyFinishedKeys) {
- ExecutableFlow flow = recentlyFinished.get(key);
-
- if (flow.getEndTime() < oldAgeThreshold) {
- // Evict
- recentlyFinished.remove(key);
- }
- }
- }
-
- private ExecutableFlow updateExecution(Map<String,Object> updateData) throws ExecutorManagerException {
-
- Integer execId = (Integer)updateData.get(ConnectorParams.UPDATE_MAP_EXEC_ID);
- if (execId == null) {
- throw new ExecutorManagerException("Response is malformed. Need exec id to update.");
- }
-
- Pair<ExecutionReference, ExecutableFlow> refPair = this.runningFlows.get(execId);
- if (refPair == null) {
- throw new ExecutorManagerException("No running flow found with the execution id. Removing " + execId);
- }
-
- ExecutionReference ref = refPair.getFirst();
- ExecutableFlow flow = refPair.getSecond();
- if (updateData.containsKey("error")) {
- // The flow should be finished here.
- throw new ExecutorManagerException((String)updateData.get("error"), flow);
- }
-
- // Reset errors.
- ref.setNextCheckTime(0);
- ref.setNumErrors(0);
- Status oldStatus = flow.getStatus();
- flow.applyUpdateObject(updateData);
- Status newStatus = flow.getStatus();
-
- ExecutionOptions options = flow.getExecutionOptions();
- if (oldStatus != newStatus && newStatus.equals(Status.FAILED_FINISHING)) {
- // We want to see if we should give an email status on first failure.
- if (options.getNotifyOnFirstFailure()) {
- Alerter mailAlerter = alerters.get("email");
- try {
- mailAlerter.alertOnFirstError(flow);
- } catch (Exception e) {
- e.printStackTrace();
- logger.error("Failed to send first error email." + e.getMessage());
- }
- }
- if(options.getFlowParameters().containsKey("alert.type")) {
- String alertType = options.getFlowParameters().get("alert.type");
- Alerter alerter = alerters.get(alertType);
- if(alerter != null) {
- try {
- alerter.alertOnFirstError(flow);
- } catch (Exception e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- logger.error("Failed to alert by " + alertType);
- }
- }
- else {
- logger.error("Alerter type " + alertType + " doesn't exist. Failed to alert.");
- }
- }
- }
-
- return flow;
- }
-
- public boolean isFinished(ExecutableFlow flow) {
- switch(flow.getStatus()) {
- case SUCCEEDED:
- case FAILED:
- case KILLED:
- return true;
- default:
- return false;
- }
- }
-
- private void fillUpdateTimeAndExecId(List<ExecutableFlow> flows, List<Integer> executionIds, List<Long> updateTimes) {
- for (ExecutableFlow flow: flows) {
- executionIds.add(flow.getExecutionId());
- updateTimes.add(flow.getUpdateTime());
- }
- }
-
- private Map<ConnectionInfo, List<ExecutableFlow>> getFlowToExecutorMap() {
- HashMap<ConnectionInfo, List<ExecutableFlow>> exFlowMap = new HashMap<ConnectionInfo, List<ExecutableFlow>>();
-
- ConnectionInfo lastPort = new ConnectionInfo(executorHost, executorPort);
- for (Pair<ExecutionReference, ExecutableFlow> runningFlow: runningFlows.values()) {
- ExecutionReference ref = runningFlow.getFirst();
- ExecutableFlow flow = runningFlow.getSecond();
-
- // We can set the next check time to prevent the checking of certain flows.
- if (ref.getNextCheckTime() >= System.currentTimeMillis()) {
- continue;
- }
-
- // Just a silly way to reduce object creation construction of objects since it's most likely that the values will be the same.
- if (!lastPort.isEqual(ref.getHost(), ref.getPort())) {
- lastPort = new ConnectionInfo(ref.getHost(), ref.getPort());
- }
-
- List<ExecutableFlow> flows = exFlowMap.get(lastPort);
- if (flows == null) {
- flows = new ArrayList<ExecutableFlow>();
- exFlowMap.put(lastPort, flows);
- }
-
- flows.add(flow);
- }
-
- return exFlowMap;
- }
-
- private static class ConnectionInfo {
- private String host;
- private int port;
-
- public ConnectionInfo(String host, int port) {
- this.host = host;
- this.port = port;
- }
-
- @SuppressWarnings("unused")
- private ConnectionInfo getOuterType() {
- return ConnectionInfo.this;
- }
-
- public boolean isEqual(String host, int port) {
- return this.port == port && this.host.equals(host);
- }
-
- public String getHost() {
- return host;
- }
-
- public int getPort() {
- return port;
- }
-
- @Override
- public int hashCode() {
- final int prime = 31;
- int result = 1;
- result = prime * result + ((host == null) ? 0 : host.hashCode());
- result = prime * result + port;
- return result;
- }
-
- @Override
- public boolean equals(Object obj) {
- if (this == obj)
- return true;
- if (obj == null)
- return false;
- if (getClass() != obj.getClass())
- return false;
- ConnectionInfo other = (ConnectionInfo) obj;
- if (host == null) {
- if (other.host != null)
- return false;
- } else if (!host.equals(other.host))
- return false;
- if (port != other.port)
- return false;
- return true;
- }
- }
-
- @Override
- public int getExecutableFlows(
- int projectId,
- String flowId,
- int from,
- int length,
- List<ExecutableFlow> outputList) throws ExecutorManagerException {
- List<ExecutableFlow> flows = executorLoader.fetchFlowHistory(
- projectId, flowId, from, length);
- outputList.addAll(flows);
- return executorLoader.fetchNumExecutableFlows(projectId, flowId);
- }
-
- @Override
- public List<ExecutableFlow> getExecutableFlows(
- int projectId, String flowId, int from, int length, Status status)
- throws ExecutorManagerException {
- return executorLoader.fetchFlowHistory(
- projectId, flowId, from, length, status);
- }
-
- /*
- * cleaner thread to clean up execution_logs, etc in DB. Runs every day.
- *
- */
- private class CleanerThread extends Thread {
- // log file retention is 1 month.
-
- // check every day
- private static final long CLEANER_THREAD_WAIT_INTERVAL_MS = 24*60*60*1000;
-
- private final long executionLogsRetentionMs;
-
- private boolean shutdown = false;
- private long lastLogCleanTime = -1;
-
- public CleanerThread(long executionLogsRetentionMs) {
- this.executionLogsRetentionMs = executionLogsRetentionMs;
- this.setName("AzkabanWebServer-Cleaner-Thread");
- }
-
- @SuppressWarnings("unused")
- public void shutdown() {
- shutdown = true;
- this.interrupt();
- }
-
- public void run() {
- while (!shutdown) {
- synchronized (this) {
- try {
- lastCleanerThreadCheckTime = System.currentTimeMillis();
-
- // Cleanup old stuff.
- long currentTime = System.currentTimeMillis();
- if (currentTime - CLEANER_THREAD_WAIT_INTERVAL_MS > lastLogCleanTime) {
- cleanExecutionLogs();
- lastLogCleanTime = currentTime;
- }
-
-
- wait(CLEANER_THREAD_WAIT_INTERVAL_MS);
- } catch (InterruptedException e) {
- logger.info("Interrupted. Probably to shut down.");
- }
- }
- }
- }
-
- private void cleanExecutionLogs() {
- logger.info("Cleaning old logs from execution_logs");
- long cutoff = DateTime.now().getMillis() - executionLogsRetentionMs;
- logger.info("Cleaning old log files before " + new DateTime(cutoff).toString());
- cleanOldExecutionLogs(DateTime.now().getMillis() - executionLogsRetentionMs);
- }
- }
+public class ExecutorManager extends EventHandler implements
+ ExecutorManagerAdapter {
+ private static Logger logger = Logger.getLogger(ExecutorManager.class);
+ private ExecutorLoader executorLoader;
+ private String executorHost;
+ private int executorPort;
+
+ private CleanerThread cleanerThread;
+
+ private ConcurrentHashMap<Integer, Pair<ExecutionReference, ExecutableFlow>> runningFlows =
+ new ConcurrentHashMap<Integer, Pair<ExecutionReference, ExecutableFlow>>();
+ private ConcurrentHashMap<Integer, ExecutableFlow> recentlyFinished =
+ new ConcurrentHashMap<Integer, ExecutableFlow>();
+
+ private ExecutingManagerUpdaterThread executingManager;
+
+ private static final long DEFAULT_EXECUTION_LOGS_RETENTION_MS = 3 * 4 * 7
+ * 24 * 60 * 60 * 1000l;
+ private long lastCleanerThreadCheckTime = -1;
+
+ private long lastThreadCheckTime = -1;
+ private String updaterStage = "not started";
+
+ private Map<String, Alerter> alerters;
+
+ File cacheDir;
+
+ public ExecutorManager(Props props, ExecutorLoader loader,
+ Map<String, Alerter> alters) throws ExecutorManagerException {
+ this.executorLoader = loader;
+ this.loadRunningFlows();
+ executorHost = props.getString("executor.host", "localhost");
+ executorPort = props.getInt("executor.port");
+
+ alerters = alters;
+
+ cacheDir = new File(props.getString("cache.directory", "cache"));
+
+ executingManager = new ExecutingManagerUpdaterThread();
+ executingManager.start();
+
+ long executionLogsRetentionMs =
+ props.getLong("execution.logs.retention.ms",
+ DEFAULT_EXECUTION_LOGS_RETENTION_MS);
+ cleanerThread = new CleanerThread(executionLogsRetentionMs);
+ cleanerThread.start();
+
+ }
+
+ @Override
+ public State getExecutorManagerThreadState() {
+ return executingManager.getState();
+ }
+
+ public String getExecutorThreadStage() {
+ return updaterStage;
+ }
+
+ @Override
+ public boolean isExecutorManagerThreadActive() {
+ return executingManager.isAlive();
+ }
+
+ @Override
+ public long getLastExecutorManagerThreadCheckTime() {
+ return lastThreadCheckTime;
+ }
+
+ public long getLastCleanerThreadCheckTime() {
+ return this.lastCleanerThreadCheckTime;
+ }
+
+ @Override
+ public Set<String> getPrimaryServerHosts() {
+ // Only one for now. More probably later.
+ HashSet<String> ports = new HashSet<String>();
+ ports.add(executorHost + ":" + executorPort);
+ return ports;
+ }
+
+ @Override
+ public Set<String> getAllActiveExecutorServerHosts() {
+ // Includes non primary server/hosts
+ HashSet<String> ports = new HashSet<String>();
+ ports.add(executorHost + ":" + executorPort);
+ for (Pair<ExecutionReference, ExecutableFlow> running : runningFlows
+ .values()) {
+ ExecutionReference ref = running.getFirst();
+ ports.add(ref.getHost() + ":" + ref.getPort());
+ }
+
+ return ports;
+ }
+
+ private void loadRunningFlows() throws ExecutorManagerException {
+ runningFlows.putAll(executorLoader.fetchActiveFlows());
+ }
+
+ @Override
+ public List<Integer> getRunningFlows(int projectId, String flowId) {
+ ArrayList<Integer> executionIds = new ArrayList<Integer>();
+ for (Pair<ExecutionReference, ExecutableFlow> ref : runningFlows.values()) {
+ if (ref.getSecond().getFlowId().equals(flowId)
+ && ref.getSecond().getProjectId() == projectId) {
+ executionIds.add(ref.getFirst().getExecId());
+ }
+ }
+ return executionIds;
+ }
+
+ @Override
+ public boolean isFlowRunning(int projectId, String flowId) {
+ for (Pair<ExecutionReference, ExecutableFlow> ref : runningFlows.values()) {
+ if (ref.getSecond().getProjectId() == projectId
+ && ref.getSecond().getFlowId().equals(flowId)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ @Override
+ public ExecutableFlow getExecutableFlow(int execId)
+ throws ExecutorManagerException {
+ Pair<ExecutionReference, ExecutableFlow> active = runningFlows.get(execId);
+ if (active == null) {
+ return executorLoader.fetchExecutableFlow(execId);
+ }
+ return active.getSecond();
+ }
+
+ @Override
+ public List<ExecutableFlow> getRunningFlows() {
+ ArrayList<ExecutableFlow> flows = new ArrayList<ExecutableFlow>();
+ for (Pair<ExecutionReference, ExecutableFlow> ref : runningFlows.values()) {
+ flows.add(ref.getSecond());
+ }
+ return flows;
+ }
+
+ public String getRunningFlowIds() {
+ List<Integer> allIds = new ArrayList<Integer>();
+ for (Pair<ExecutionReference, ExecutableFlow> ref : runningFlows.values()) {
+ allIds.add(ref.getSecond().getExecutionId());
+ }
+ Collections.sort(allIds);
+ return allIds.toString();
+ }
+
+ public List<ExecutableFlow> getRecentlyFinishedFlows() {
+ return new ArrayList<ExecutableFlow>(recentlyFinished.values());
+ }
+
+ @Override
+ public List<ExecutableFlow> getExecutableFlows(Project project,
+ String flowId, int skip, int size) throws ExecutorManagerException {
+ List<ExecutableFlow> flows =
+ executorLoader.fetchFlowHistory(project.getId(), flowId, skip, size);
+ return flows;
+ }
+
+ @Override
+ public List<ExecutableFlow> getExecutableFlows(int skip, int size)
+ throws ExecutorManagerException {
+ List<ExecutableFlow> flows = executorLoader.fetchFlowHistory(skip, size);
+ return flows;
+ }
+
+ @Override
+ public List<ExecutableFlow> getExecutableFlows(String flowIdContains,
+ int skip, int size) throws ExecutorManagerException {
+ List<ExecutableFlow> flows =
+ executorLoader.fetchFlowHistory(null, '%' + flowIdContains + '%', null,
+ 0, -1, -1, skip, size);
+ return flows;
+ }
+
+ @Override
+ public List<ExecutableFlow> getExecutableFlows(String projContain,
+ String flowContain, String userContain, int status, long begin, long end,
+ int skip, int size) throws ExecutorManagerException {
+ List<ExecutableFlow> flows =
+ executorLoader.fetchFlowHistory(projContain, flowContain, userContain,
+ status, begin, end, skip, size);
+ return flows;
+ }
+
+ @Override
+ public List<ExecutableJobInfo> getExecutableJobs(Project project,
+ String jobId, int skip, int size) throws ExecutorManagerException {
+ List<ExecutableJobInfo> nodes =
+ executorLoader.fetchJobHistory(project.getId(), jobId, skip, size);
+ return nodes;
+ }
+
+ @Override
+ public int getNumberOfJobExecutions(Project project, String jobId)
+ throws ExecutorManagerException {
+ return executorLoader.fetchNumExecutableNodes(project.getId(), jobId);
+ }
+
+ @Override
+ public int getNumberOfExecutions(Project project, String flowId)
+ throws ExecutorManagerException {
+ return executorLoader.fetchNumExecutableFlows(project.getId(), flowId);
+ }
+
+ @Override
+ public LogData getExecutableFlowLog(ExecutableFlow exFlow, int offset,
+ int length) throws ExecutorManagerException {
+ Pair<ExecutionReference, ExecutableFlow> pair =
+ runningFlows.get(exFlow.getExecutionId());
+ if (pair != null) {
+ Pair<String, String> typeParam = new Pair<String, String>("type", "flow");
+ Pair<String, String> offsetParam =
+ new Pair<String, String>("offset", String.valueOf(offset));
+ Pair<String, String> lengthParam =
+ new Pair<String, String>("length", String.valueOf(length));
+
+ @SuppressWarnings("unchecked")
+ Map<String, Object> result =
+ callExecutorServer(pair.getFirst(), ConnectorParams.LOG_ACTION,
+ typeParam, offsetParam, lengthParam);
+ return LogData.createLogDataFromObject(result);
+ } else {
+ LogData value =
+ executorLoader.fetchLogs(exFlow.getExecutionId(), "", 0, offset,
+ length);
+ return value;
+ }
+ }
+
+ @Override
+ public LogData getExecutionJobLog(ExecutableFlow exFlow, String jobId,
+ int offset, int length, int attempt) throws ExecutorManagerException {
+ Pair<ExecutionReference, ExecutableFlow> pair =
+ runningFlows.get(exFlow.getExecutionId());
+ if (pair != null) {
+ Pair<String, String> typeParam = new Pair<String, String>("type", "job");
+ Pair<String, String> jobIdParam =
+ new Pair<String, String>("jobId", jobId);
+ Pair<String, String> offsetParam =
+ new Pair<String, String>("offset", String.valueOf(offset));
+ Pair<String, String> lengthParam =
+ new Pair<String, String>("length", String.valueOf(length));
+ Pair<String, String> attemptParam =
+ new Pair<String, String>("attempt", String.valueOf(attempt));
+
+ @SuppressWarnings("unchecked")
+ Map<String, Object> result =
+ callExecutorServer(pair.getFirst(), ConnectorParams.LOG_ACTION,
+ typeParam, jobIdParam, offsetParam, lengthParam, attemptParam);
+ return LogData.createLogDataFromObject(result);
+ } else {
+ LogData value =
+ executorLoader.fetchLogs(exFlow.getExecutionId(), jobId, attempt,
+ offset, length);
+ return value;
+ }
+ }
+
+ @Override
+ public List<Object> getExecutionJobStats(ExecutableFlow exFlow, String jobId,
+ int attempt) throws ExecutorManagerException {
+ Pair<ExecutionReference, ExecutableFlow> pair =
+ runningFlows.get(exFlow.getExecutionId());
+ if (pair == null) {
+ return executorLoader.fetchAttachments(exFlow.getExecutionId(), jobId,
+ attempt);
+ }
+
+ Pair<String, String> jobIdParam = new Pair<String, String>("jobId", jobId);
+ Pair<String, String> attemptParam =
+ new Pair<String, String>("attempt", String.valueOf(attempt));
+
+ @SuppressWarnings("unchecked")
+ Map<String, Object> result =
+ callExecutorServer(pair.getFirst(), ConnectorParams.ATTACHMENTS_ACTION,
+ jobIdParam, attemptParam);
+
+ @SuppressWarnings("unchecked")
+ List<Object> jobStats = (List<Object>) result.get("attachments");
+
+ return jobStats;
+ }
+
+ @Override
+ public JobMetaData getExecutionJobMetaData(ExecutableFlow exFlow,
+ String jobId, int offset, int length, int attempt)
+ throws ExecutorManagerException {
+ Pair<ExecutionReference, ExecutableFlow> pair =
+ runningFlows.get(exFlow.getExecutionId());
+ if (pair != null) {
+
+ Pair<String, String> typeParam = new Pair<String, String>("type", "job");
+ Pair<String, String> jobIdParam =
+ new Pair<String, String>("jobId", jobId);
+ Pair<String, String> offsetParam =
+ new Pair<String, String>("offset", String.valueOf(offset));
+ Pair<String, String> lengthParam =
+ new Pair<String, String>("length", String.valueOf(length));
+ Pair<String, String> attemptParam =
+ new Pair<String, String>("attempt", String.valueOf(attempt));
+
+ @SuppressWarnings("unchecked")
+ Map<String, Object> result =
+ callExecutorServer(pair.getFirst(), ConnectorParams.METADATA_ACTION,
+ typeParam, jobIdParam, offsetParam, lengthParam, attemptParam);
+ return JobMetaData.createJobMetaDataFromObject(result);
+ } else {
+ return null;
+ }
+ }
+
+ @Override
+ public void cancelFlow(ExecutableFlow exFlow, String userId)
+ throws ExecutorManagerException {
+ synchronized (exFlow) {
+ Pair<ExecutionReference, ExecutableFlow> pair =
+ runningFlows.get(exFlow.getExecutionId());
+ if (pair == null) {
+ throw new ExecutorManagerException("Execution "
+ + exFlow.getExecutionId() + " of flow " + exFlow.getFlowId()
+ + " isn't running.");
+ }
+ callExecutorServer(pair.getFirst(), ConnectorParams.CANCEL_ACTION, userId);
+ }
+ }
+
+ @Override
+ public void resumeFlow(ExecutableFlow exFlow, String userId)
+ throws ExecutorManagerException {
+ synchronized (exFlow) {
+ Pair<ExecutionReference, ExecutableFlow> pair =
+ runningFlows.get(exFlow.getExecutionId());
+ if (pair == null) {
+ throw new ExecutorManagerException("Execution "
+ + exFlow.getExecutionId() + " of flow " + exFlow.getFlowId()
+ + " isn't running.");
+ }
+ callExecutorServer(pair.getFirst(), ConnectorParams.RESUME_ACTION, userId);
+ }
+ }
+
+ @Override
+ public void pauseFlow(ExecutableFlow exFlow, String userId)
+ throws ExecutorManagerException {
+ synchronized (exFlow) {
+ Pair<ExecutionReference, ExecutableFlow> pair =
+ runningFlows.get(exFlow.getExecutionId());
+ if (pair == null) {
+ throw new ExecutorManagerException("Execution "
+ + exFlow.getExecutionId() + " of flow " + exFlow.getFlowId()
+ + " isn't running.");
+ }
+ callExecutorServer(pair.getFirst(), ConnectorParams.PAUSE_ACTION, userId);
+ }
+ }
+
+ @Override
+ public void pauseExecutingJobs(ExecutableFlow exFlow, String userId,
+ String... jobIds) throws ExecutorManagerException {
+ modifyExecutingJobs(exFlow, ConnectorParams.MODIFY_PAUSE_JOBS, userId,
+ jobIds);
+ }
+
+ @Override
+ public void resumeExecutingJobs(ExecutableFlow exFlow, String userId,
+ String... jobIds) throws ExecutorManagerException {
+ modifyExecutingJobs(exFlow, ConnectorParams.MODIFY_RESUME_JOBS, userId,
+ jobIds);
+ }
+
+ @Override
+ public void retryFailures(ExecutableFlow exFlow, String userId)
+ throws ExecutorManagerException {
+ modifyExecutingJobs(exFlow, ConnectorParams.MODIFY_RETRY_FAILURES, userId);
+ }
+
+ @Override
+ public void retryExecutingJobs(ExecutableFlow exFlow, String userId,
+ String... jobIds) throws ExecutorManagerException {
+ modifyExecutingJobs(exFlow, ConnectorParams.MODIFY_RETRY_JOBS, userId,
+ jobIds);
+ }
+
+ @Override
+ public void disableExecutingJobs(ExecutableFlow exFlow, String userId,
+ String... jobIds) throws ExecutorManagerException {
+ modifyExecutingJobs(exFlow, ConnectorParams.MODIFY_DISABLE_JOBS, userId,
+ jobIds);
+ }
+
+ @Override
+ public void enableExecutingJobs(ExecutableFlow exFlow, String userId,
+ String... jobIds) throws ExecutorManagerException {
+ modifyExecutingJobs(exFlow, ConnectorParams.MODIFY_ENABLE_JOBS, userId,
+ jobIds);
+ }
+
+ @Override
+ public void cancelExecutingJobs(ExecutableFlow exFlow, String userId,
+ String... jobIds) throws ExecutorManagerException {
+ modifyExecutingJobs(exFlow, ConnectorParams.MODIFY_CANCEL_JOBS, userId,
+ jobIds);
+ }
+
+ @SuppressWarnings("unchecked")
+ private Map<String, Object> modifyExecutingJobs(ExecutableFlow exFlow,
+ String command, String userId, String... jobIds)
+ throws ExecutorManagerException {
+ synchronized (exFlow) {
+ Pair<ExecutionReference, ExecutableFlow> pair =
+ runningFlows.get(exFlow.getExecutionId());
+ if (pair == null) {
+ throw new ExecutorManagerException("Execution "
+ + exFlow.getExecutionId() + " of flow " + exFlow.getFlowId()
+ + " isn't running.");
+ }
+
+ Map<String, Object> response = null;
+ if (jobIds != null && jobIds.length > 0) {
+ for (String jobId : jobIds) {
+ if (!jobId.isEmpty()) {
+ ExecutableNode node = exFlow.getExecutableNode(jobId);
+ if (node == null) {
+ throw new ExecutorManagerException("Job " + jobId
+ + " doesn't exist in execution " + exFlow.getExecutionId()
+ + ".");
+ }
+ }
+ }
+ String ids = StringUtils.join(jobIds, ',');
+ response =
+ callExecutorServer(pair.getFirst(),
+ ConnectorParams.MODIFY_EXECUTION_ACTION, userId,
+ new Pair<String, String>(
+ ConnectorParams.MODIFY_EXECUTION_ACTION_TYPE, command),
+ new Pair<String, String>(ConnectorParams.MODIFY_JOBS_LIST, ids));
+ } else {
+ response =
+ callExecutorServer(pair.getFirst(),
+ ConnectorParams.MODIFY_EXECUTION_ACTION, userId,
+ new Pair<String, String>(
+ ConnectorParams.MODIFY_EXECUTION_ACTION_TYPE, command));
+ }
+
+ return response;
+ }
+ }
+
+ private void applyDisabledJobs(List<Object> disabledJobs,
+ ExecutableFlowBase exflow) {
+ for (Object disabled : disabledJobs) {
+ if (disabled instanceof String) {
+ String nodeName = (String) disabled;
+ ExecutableNode node = exflow.getExecutableNode(nodeName);
+ if (node != null) {
+ node.setStatus(Status.DISABLED);
+ }
+ } else if (disabled instanceof Map) {
+ @SuppressWarnings("unchecked")
+ Map<String, Object> nestedDisabled = (Map<String, Object>) disabled;
+ String nodeName = (String) nestedDisabled.get("id");
+ @SuppressWarnings("unchecked")
+ List<Object> subDisabledJobs =
+ (List<Object>) nestedDisabled.get("children");
+
+ if (nodeName == null || subDisabledJobs == null) {
+ return;
+ }
+
+ ExecutableNode node = exflow.getExecutableNode(nodeName);
+ if (node != null && node instanceof ExecutableFlowBase) {
+ applyDisabledJobs(subDisabledJobs, (ExecutableFlowBase) node);
+ }
+ }
+ }
+ }
+
+ @Override
+ public String submitExecutableFlow(ExecutableFlow exflow, String userId)
+ throws ExecutorManagerException {
+ synchronized (exflow) {
+ logger.info("Submitting execution flow " + exflow.getFlowId() + " by "
+ + userId);
+
+ int projectId = exflow.getProjectId();
+ String flowId = exflow.getFlowId();
+ exflow.setSubmitUser(userId);
+ exflow.setSubmitTime(System.currentTimeMillis());
+
+ List<Integer> running = getRunningFlows(projectId, flowId);
+
+ ExecutionOptions options = exflow.getExecutionOptions();
+ if (options == null) {
+ options = new ExecutionOptions();
+ }
+
+ String message = "";
+ if (options.getDisabledJobs() != null) {
+ applyDisabledJobs(options.getDisabledJobs(), exflow);
+ }
+
+ if (!running.isEmpty()) {
+ if (options.getConcurrentOption().equals(
+ ExecutionOptions.CONCURRENT_OPTION_PIPELINE)) {
+ Collections.sort(running);
+ Integer runningExecId = running.get(running.size() - 1);
+
+ options.setPipelineExecutionId(runningExecId);
+ message =
+ "Flow " + flowId + " is already running with exec id "
+ + runningExecId + ". Pipelining level "
+ + options.getPipelineLevel() + ". \n";
+ } else if (options.getConcurrentOption().equals(
+ ExecutionOptions.CONCURRENT_OPTION_SKIP)) {
+ throw new ExecutorManagerException("Flow " + flowId
+ + " is already running. Skipping execution.",
+ ExecutorManagerException.Reason.SkippedExecution);
+ } else {
+ // The settings is to run anyways.
+ message =
+ "Flow " + flowId + " is already running with exec id "
+ + StringUtils.join(running, ",")
+ + ". Will execute concurrently. \n";
+ }
+ }
+
+ // The exflow id is set by the loader. So it's unavailable until after
+ // this call.
+ executorLoader.uploadExecutableFlow(exflow);
+
+ // We create an active flow reference in the datastore. If the upload
+ // fails, we remove the reference.
+ ExecutionReference reference =
+ new ExecutionReference(exflow.getExecutionId(), executorHost,
+ executorPort);
+ executorLoader.addActiveExecutableReference(reference);
+ try {
+ callExecutorServer(reference, ConnectorParams.EXECUTE_ACTION);
+ runningFlows.put(exflow.getExecutionId(),
+ new Pair<ExecutionReference, ExecutableFlow>(reference, exflow));
+
+ message +=
+ "Execution submitted successfully with exec id "
+ + exflow.getExecutionId();
+ } catch (ExecutorManagerException e) {
+ executorLoader.removeActiveExecutableReference(reference.getExecId());
+ throw e;
+ }
+
+ return message;
+ }
+ }
+
+ private void cleanOldExecutionLogs(long millis) {
+ try {
+ int count = executorLoader.removeExecutionLogsByTime(millis);
+ logger.info("Cleaned up " + count + " log entries.");
+ } catch (ExecutorManagerException e) {
+ e.printStackTrace();
+ }
+ }
+
+ private Map<String, Object> callExecutorServer(ExecutionReference ref,
+ String action) throws ExecutorManagerException {
+ try {
+ return callExecutorServer(ref.getHost(), ref.getPort(), action,
+ ref.getExecId(), null, (Pair<String, String>[]) null);
+ } catch (IOException e) {
+ throw new ExecutorManagerException(e);
+ }
+ }
+
+ private Map<String, Object> callExecutorServer(ExecutionReference ref,
+ String action, String user) throws ExecutorManagerException {
+ try {
+ return callExecutorServer(ref.getHost(), ref.getPort(), action,
+ ref.getExecId(), user, (Pair<String, String>[]) null);
+ } catch (IOException e) {
+ throw new ExecutorManagerException(e);
+ }
+ }
+
+ private Map<String, Object> callExecutorServer(ExecutionReference ref,
+ String action, Pair<String, String>... params)
+ throws ExecutorManagerException {
+ try {
+ return callExecutorServer(ref.getHost(), ref.getPort(), action,
+ ref.getExecId(), null, params);
+ } catch (IOException e) {
+ throw new ExecutorManagerException(e);
+ }
+ }
+
+ private Map<String, Object> callExecutorServer(ExecutionReference ref,
+ String action, String user, Pair<String, String>... params)
+ throws ExecutorManagerException {
+ try {
+ return callExecutorServer(ref.getHost(), ref.getPort(), action,
+ ref.getExecId(), user, params);
+ } catch (IOException e) {
+ throw new ExecutorManagerException(e);
+ }
+ }
+
+ private Map<String, Object> callExecutorServer(String host, int port,
+ String action, Integer executionId, String user,
+ Pair<String, String>... params) throws IOException {
+ URIBuilder builder = new URIBuilder();
+ builder.setScheme("http").setHost(host).setPort(port).setPath("/executor");
+
+ builder.setParameter(ConnectorParams.ACTION_PARAM, action);
+
+ if (executionId != null) {
+ builder.setParameter(ConnectorParams.EXECID_PARAM,
+ String.valueOf(executionId));
+ }
+
+ if (user != null) {
+ builder.setParameter(ConnectorParams.USER_PARAM, user);
+ }
+
+ if (params != null) {
+ for (Pair<String, String> pair : params) {
+ builder.setParameter(pair.getFirst(), pair.getSecond());
+ }
+ }
+
+ URI uri = null;
+ try {
+ uri = builder.build();
+ } catch (URISyntaxException e) {
+ throw new IOException(e);
+ }
+
+ ResponseHandler<String> responseHandler = new BasicResponseHandler();
+
+ HttpClient httpclient = new DefaultHttpClient();
+ HttpGet httpget = new HttpGet(uri);
+ String response = null;
+ try {
+ response = httpclient.execute(httpget, responseHandler);
+ } catch (IOException e) {
+ throw e;
+ } finally {
+ httpclient.getConnectionManager().shutdown();
+ }
+
+ @SuppressWarnings("unchecked")
+ Map<String, Object> jsonResponse =
+ (Map<String, Object>) JSONUtils.parseJSONFromString(response);
+ String error = (String) jsonResponse.get(ConnectorParams.RESPONSE_ERROR);
+ if (error != null) {
+ throw new IOException(error);
+ }
+
+ return jsonResponse;
+ }
+
+ @Override
+ public Map<String, Object> callExecutorJMX(String hostPort, String action,
+ String mBean) throws IOException {
+ URIBuilder builder = new URIBuilder();
+
+ String[] hostPortSplit = hostPort.split(":");
+ builder.setScheme("http").setHost(hostPortSplit[0])
+ .setPort(Integer.parseInt(hostPortSplit[1])).setPath("/jmx");
+
+ builder.setParameter(action, "");
+ if (mBean != null) {
+ builder.setParameter(ConnectorParams.JMX_MBEAN, mBean);
+ }
+
+ URI uri = null;
+ try {
+ uri = builder.build();
+ } catch (URISyntaxException e) {
+ throw new IOException(e);
+ }
+
+ ResponseHandler<String> responseHandler = new BasicResponseHandler();
+
+ HttpClient httpclient = new DefaultHttpClient();
+ HttpGet httpget = new HttpGet(uri);
+ String response = null;
+ try {
+ response = httpclient.execute(httpget, responseHandler);
+ } catch (IOException e) {
+ throw e;
+ } finally {
+ httpclient.getConnectionManager().shutdown();
+ }
+
+ @SuppressWarnings("unchecked")
+ Map<String, Object> jsonResponse =
+ (Map<String, Object>) JSONUtils.parseJSONFromString(response);
+ String error = (String) jsonResponse.get(ConnectorParams.RESPONSE_ERROR);
+ if (error != null) {
+ throw new IOException(error);
+ }
+ return jsonResponse;
+ }
+
+ @Override
+ public void shutdown() {
+ executingManager.shutdown();
+ }
+
+ private class ExecutingManagerUpdaterThread extends Thread {
+ private boolean shutdown = false;
+
+ public ExecutingManagerUpdaterThread() {
+ this.setName("ExecutorManagerUpdaterThread");
+ }
+
+ // 10 mins recently finished threshold.
+ private long recentlyFinishedLifetimeMs = 600000;
+ private int waitTimeIdleMs = 2000;
+ private int waitTimeMs = 500;
+
+ // When we have an http error, for that flow, we'll check every 10 secs, 6
+ // times (1 mins) before we evict.
+ private int numErrors = 6;
+ private long errorThreshold = 10000;
+
+ private void shutdown() {
+ shutdown = true;
+ }
+
+ @SuppressWarnings("unchecked")
+ public void run() {
+ while (!shutdown) {
+ try {
+ lastThreadCheckTime = System.currentTimeMillis();
+ updaterStage = "Starting update all flows.";
+
+ Map<ConnectionInfo, List<ExecutableFlow>> exFlowMap =
+ getFlowToExecutorMap();
+ ArrayList<ExecutableFlow> finishedFlows =
+ new ArrayList<ExecutableFlow>();
+ ArrayList<ExecutableFlow> finalizeFlows =
+ new ArrayList<ExecutableFlow>();
+
+ if (exFlowMap.size() > 0) {
+ for (Map.Entry<ConnectionInfo, List<ExecutableFlow>> entry : exFlowMap
+ .entrySet()) {
+ List<Long> updateTimesList = new ArrayList<Long>();
+ List<Integer> executionIdsList = new ArrayList<Integer>();
+
+ ConnectionInfo connection = entry.getKey();
+
+ updaterStage =
+ "Starting update flows on " + connection.getHost() + ":"
+ + connection.getPort();
+
+ // We pack the parameters of the same host together before we
+ // query.
+ fillUpdateTimeAndExecId(entry.getValue(), executionIdsList,
+ updateTimesList);
+
+ Pair<String, String> updateTimes =
+ new Pair<String, String>(
+ ConnectorParams.UPDATE_TIME_LIST_PARAM,
+ JSONUtils.toJSON(updateTimesList));
+ Pair<String, String> executionIds =
+ new Pair<String, String>(ConnectorParams.EXEC_ID_LIST_PARAM,
+ JSONUtils.toJSON(executionIdsList));
+
+ Map<String, Object> results = null;
+ try {
+ results =
+ callExecutorServer(connection.getHost(),
+ connection.getPort(), ConnectorParams.UPDATE_ACTION,
+ null, null, executionIds, updateTimes);
+ } catch (IOException e) {
+ logger.error(e);
+ for (ExecutableFlow flow : entry.getValue()) {
+ Pair<ExecutionReference, ExecutableFlow> pair =
+ runningFlows.get(flow.getExecutionId());
+
+ updaterStage =
+ "Failed to get update. Doing some clean up for flow "
+ + pair.getSecond().getExecutionId();
+
+ if (pair != null) {
+ ExecutionReference ref = pair.getFirst();
+ int numErrors = ref.getNumErrors();
+ if (ref.getNumErrors() < this.numErrors) {
+ ref.setNextCheckTime(System.currentTimeMillis()
+ + errorThreshold);
+ ref.setNumErrors(++numErrors);
+ } else {
+ logger.error("Evicting flow " + flow.getExecutionId()
+ + ". The executor is unresponsive.");
+ // TODO should send out an unresponsive email here.
+ finalizeFlows.add(pair.getSecond());
+ }
+ }
+ }
+ }
+
+ // We gets results
+ if (results != null) {
+ List<Map<String, Object>> executionUpdates =
+ (List<Map<String, Object>>) results
+ .get(ConnectorParams.RESPONSE_UPDATED_FLOWS);
+ for (Map<String, Object> updateMap : executionUpdates) {
+ try {
+ ExecutableFlow flow = updateExecution(updateMap);
+
+ updaterStage = "Updated flow " + flow.getExecutionId();
+
+ if (isFinished(flow)) {
+ finishedFlows.add(flow);
+ finalizeFlows.add(flow);
+ }
+ } catch (ExecutorManagerException e) {
+ ExecutableFlow flow = e.getExecutableFlow();
+ logger.error(e);
+
+ if (flow != null) {
+ logger.error("Finalizing flow " + flow.getExecutionId());
+ finalizeFlows.add(flow);
+ }
+ }
+ }
+ }
+ }
+
+ updaterStage = "Evicting old recently finished flows.";
+
+ evictOldRecentlyFinished(recentlyFinishedLifetimeMs);
+ // Add new finished
+ for (ExecutableFlow flow : finishedFlows) {
+ if (flow.getScheduleId() >= 0
+ && flow.getStatus() == Status.SUCCEEDED) {
+ ScheduleStatisticManager.invalidateCache(flow.getScheduleId(),
+ cacheDir);
+ }
+ fireEventListeners(Event.create(flow, Type.FLOW_FINISHED));
+ recentlyFinished.put(flow.getExecutionId(), flow);
+ }
+
+ updaterStage =
+ "Finalizing " + finalizeFlows.size() + " error flows.";
+
+ // Kill error flows
+ for (ExecutableFlow flow : finalizeFlows) {
+ finalizeFlows(flow);
+ }
+ }
+
+ updaterStage = "Updated all active flows. Waiting for next round.";
+
+ synchronized (this) {
+ try {
+ if (runningFlows.size() > 0) {
+ this.wait(waitTimeMs);
+ } else {
+ this.wait(waitTimeIdleMs);
+ }
+ } catch (InterruptedException e) {
+ }
+ }
+ } catch (Exception e) {
+ logger.error(e);
+ }
+ }
+ }
+ }
+
+ private void finalizeFlows(ExecutableFlow flow) {
+
+ int execId = flow.getExecutionId();
+
+ updaterStage = "finalizing flow " + execId;
+ // First we check if the execution in the datastore is complete
+ try {
+ ExecutableFlow dsFlow;
+ if (isFinished(flow)) {
+ dsFlow = flow;
+ } else {
+ updaterStage = "finalizing flow " + execId + " loading from db";
+ dsFlow = executorLoader.fetchExecutableFlow(execId);
+
+ // If it's marked finished, we're good. If not, we fail everything and
+ // then mark it finished.
+ if (!isFinished(dsFlow)) {
+ updaterStage = "finalizing flow " + execId + " failing the flow";
+ failEverything(dsFlow);
+ executorLoader.updateExecutableFlow(dsFlow);
+ }
+ }
+
+ updaterStage = "finalizing flow " + execId + " deleting active reference";
+
+ // Delete the executing reference.
+ if (flow.getEndTime() == -1) {
+ flow.setEndTime(System.currentTimeMillis());
+ executorLoader.updateExecutableFlow(dsFlow);
+ }
+ executorLoader.removeActiveExecutableReference(execId);
+
+ updaterStage = "finalizing flow " + execId + " cleaning from memory";
+ runningFlows.remove(execId);
+ fireEventListeners(Event.create(dsFlow, Type.FLOW_FINISHED));
+ recentlyFinished.put(execId, dsFlow);
+
+ } catch (ExecutorManagerException e) {
+ logger.error(e);
+ }
+
+ // TODO append to the flow log that we forced killed this flow because the
+ // target no longer had
+ // the reference.
+
+ updaterStage = "finalizing flow " + execId + " alerting and emailing";
+ ExecutionOptions options = flow.getExecutionOptions();
+ // But we can definitely email them.
+ Alerter mailAlerter = alerters.get("email");
+ if (flow.getStatus() == Status.FAILED || flow.getStatus() == Status.KILLED) {
+ if (options.getFailureEmails() != null
+ && !options.getFailureEmails().isEmpty()) {
+ try {
+ mailAlerter
+ .alertOnError(
+ flow,
+ "Executor no longer seems to be running this execution. Most likely due to executor bounce.");
+ } catch (Exception e) {
+ logger.error(e);
+ }
+ }
+ if (options.getFlowParameters().containsKey("alert.type")) {
+ String alertType = options.getFlowParameters().get("alert.type");
+ Alerter alerter = alerters.get(alertType);
+ if (alerter != null) {
+ try {
+ alerter
+ .alertOnError(
+ flow,
+ "Executor no longer seems to be running this execution. Most likely due to executor bounce.");
+ } catch (Exception e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ logger.error("Failed to alert by " + alertType);
+ }
+ } else {
+ logger.error("Alerter type " + alertType
+ + " doesn't exist. Failed to alert.");
+ }
+ }
+ } else {
+ if (options.getSuccessEmails() != null
+ && !options.getSuccessEmails().isEmpty()) {
+ try {
+
+ mailAlerter.alertOnSuccess(flow);
+ } catch (Exception e) {
+ logger.error(e);
+ }
+ }
+ if (options.getFlowParameters().containsKey("alert.type")) {
+ String alertType = options.getFlowParameters().get("alert.type");
+ Alerter alerter = alerters.get(alertType);
+ if (alerter != null) {
+ try {
+ alerter.alertOnSuccess(flow);
+ } catch (Exception e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ logger.error("Failed to alert by " + alertType);
+ }
+ } else {
+ logger.error("Alerter type " + alertType
+ + " doesn't exist. Failed to alert.");
+ }
+ }
+ }
+
+ }
+
+ private void failEverything(ExecutableFlow exFlow) {
+ long time = System.currentTimeMillis();
+ for (ExecutableNode node : exFlow.getExecutableNodes()) {
+ switch (node.getStatus()) {
+ case SUCCEEDED:
+ case FAILED:
+ case KILLED:
+ case SKIPPED:
+ case DISABLED:
+ continue;
+ // case UNKNOWN:
+ case READY:
+ node.setStatus(Status.KILLED);
+ break;
+ default:
+ node.setStatus(Status.FAILED);
+ break;
+ }
+
+ if (node.getStartTime() == -1) {
+ node.setStartTime(time);
+ }
+ if (node.getEndTime() == -1) {
+ node.setEndTime(time);
+ }
+ }
+
+ if (exFlow.getEndTime() == -1) {
+ exFlow.setEndTime(time);
+ }
+
+ exFlow.setStatus(Status.FAILED);
+ }
+
+ private void evictOldRecentlyFinished(long ageMs) {
+ ArrayList<Integer> recentlyFinishedKeys =
+ new ArrayList<Integer>(recentlyFinished.keySet());
+ long oldAgeThreshold = System.currentTimeMillis() - ageMs;
+ for (Integer key : recentlyFinishedKeys) {
+ ExecutableFlow flow = recentlyFinished.get(key);
+
+ if (flow.getEndTime() < oldAgeThreshold) {
+ // Evict
+ recentlyFinished.remove(key);
+ }
+ }
+ }
+
+ private ExecutableFlow updateExecution(Map<String, Object> updateData)
+ throws ExecutorManagerException {
+
+ Integer execId =
+ (Integer) updateData.get(ConnectorParams.UPDATE_MAP_EXEC_ID);
+ if (execId == null) {
+ throw new ExecutorManagerException(
+ "Response is malformed. Need exec id to update.");
+ }
+
+ Pair<ExecutionReference, ExecutableFlow> refPair =
+ this.runningFlows.get(execId);
+ if (refPair == null) {
+ throw new ExecutorManagerException(
+ "No running flow found with the execution id. Removing " + execId);
+ }
+
+ ExecutionReference ref = refPair.getFirst();
+ ExecutableFlow flow = refPair.getSecond();
+ if (updateData.containsKey("error")) {
+ // The flow should be finished here.
+ throw new ExecutorManagerException((String) updateData.get("error"), flow);
+ }
+
+ // Reset errors.
+ ref.setNextCheckTime(0);
+ ref.setNumErrors(0);
+ Status oldStatus = flow.getStatus();
+ flow.applyUpdateObject(updateData);
+ Status newStatus = flow.getStatus();
+
+ ExecutionOptions options = flow.getExecutionOptions();
+ if (oldStatus != newStatus && newStatus.equals(Status.FAILED_FINISHING)) {
+ // We want to see if we should give an email status on first failure.
+ if (options.getNotifyOnFirstFailure()) {
+ Alerter mailAlerter = alerters.get("email");
+ try {
+ mailAlerter.alertOnFirstError(flow);
+ } catch (Exception e) {
+ e.printStackTrace();
+ logger.error("Failed to send first error email." + e.getMessage());
+ }
+ }
+ if (options.getFlowParameters().containsKey("alert.type")) {
+ String alertType = options.getFlowParameters().get("alert.type");
+ Alerter alerter = alerters.get(alertType);
+ if (alerter != null) {
+ try {
+ alerter.alertOnFirstError(flow);
+ } catch (Exception e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ logger.error("Failed to alert by " + alertType);
+ }
+ } else {
+ logger.error("Alerter type " + alertType
+ + " doesn't exist. Failed to alert.");
+ }
+ }
+ }
+
+ return flow;
+ }
+
+ public boolean isFinished(ExecutableFlow flow) {
+ switch (flow.getStatus()) {
+ case SUCCEEDED:
+ case FAILED:
+ case KILLED:
+ return true;
+ default:
+ return false;
+ }
+ }
+
+ private void fillUpdateTimeAndExecId(List<ExecutableFlow> flows,
+ List<Integer> executionIds, List<Long> updateTimes) {
+ for (ExecutableFlow flow : flows) {
+ executionIds.add(flow.getExecutionId());
+ updateTimes.add(flow.getUpdateTime());
+ }
+ }
+
+ private Map<ConnectionInfo, List<ExecutableFlow>> getFlowToExecutorMap() {
+ HashMap<ConnectionInfo, List<ExecutableFlow>> exFlowMap =
+ new HashMap<ConnectionInfo, List<ExecutableFlow>>();
+
+ ConnectionInfo lastPort = new ConnectionInfo(executorHost, executorPort);
+ for (Pair<ExecutionReference, ExecutableFlow> runningFlow : runningFlows
+ .values()) {
+ ExecutionReference ref = runningFlow.getFirst();
+ ExecutableFlow flow = runningFlow.getSecond();
+
+ // We can set the next check time to prevent the checking of certain
+ // flows.
+ if (ref.getNextCheckTime() >= System.currentTimeMillis()) {
+ continue;
+ }
+
+ // Just a silly way to reduce object creation construction of objects
+ // since it's most likely that the values will be the same.
+ if (!lastPort.isEqual(ref.getHost(), ref.getPort())) {
+ lastPort = new ConnectionInfo(ref.getHost(), ref.getPort());
+ }
+
+ List<ExecutableFlow> flows = exFlowMap.get(lastPort);
+ if (flows == null) {
+ flows = new ArrayList<ExecutableFlow>();
+ exFlowMap.put(lastPort, flows);
+ }
+
+ flows.add(flow);
+ }
+
+ return exFlowMap;
+ }
+
+ private static class ConnectionInfo {
+ private String host;
+ private int port;
+
+ public ConnectionInfo(String host, int port) {
+ this.host = host;
+ this.port = port;
+ }
+
+ @SuppressWarnings("unused")
+ private ConnectionInfo getOuterType() {
+ return ConnectionInfo.this;
+ }
+
+ public boolean isEqual(String host, int port) {
+ return this.port == port && this.host.equals(host);
+ }
+
+ public String getHost() {
+ return host;
+ }
+
+ public int getPort() {
+ return port;
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + ((host == null) ? 0 : host.hashCode());
+ result = prime * result + port;
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj)
+ return true;
+ if (obj == null)
+ return false;
+ if (getClass() != obj.getClass())
+ return false;
+ ConnectionInfo other = (ConnectionInfo) obj;
+ if (host == null) {
+ if (other.host != null)
+ return false;
+ } else if (!host.equals(other.host))
+ return false;
+ if (port != other.port)
+ return false;
+ return true;
+ }
+ }
+
+ @Override
+ public int getExecutableFlows(int projectId, String flowId, int from,
+ int length, List<ExecutableFlow> outputList)
+ throws ExecutorManagerException {
+ List<ExecutableFlow> flows =
+ executorLoader.fetchFlowHistory(projectId, flowId, from, length);
+ outputList.addAll(flows);
+ return executorLoader.fetchNumExecutableFlows(projectId, flowId);
+ }
+
+ @Override
+ public List<ExecutableFlow> getExecutableFlows(int projectId, String flowId,
+ int from, int length, Status status) throws ExecutorManagerException {
+ return executorLoader.fetchFlowHistory(projectId, flowId, from, length,
+ status);
+ }
+
+ /*
+ * cleaner thread to clean up execution_logs, etc in DB. Runs every day.
+ */
+ private class CleanerThread extends Thread {
+ // log file retention is 1 month.
+
+ // check every day
+ private static final long CLEANER_THREAD_WAIT_INTERVAL_MS =
+ 24 * 60 * 60 * 1000;
+
+ private final long executionLogsRetentionMs;
+
+ private boolean shutdown = false;
+ private long lastLogCleanTime = -1;
+
+ public CleanerThread(long executionLogsRetentionMs) {
+ this.executionLogsRetentionMs = executionLogsRetentionMs;
+ this.setName("AzkabanWebServer-Cleaner-Thread");
+ }
+
+ @SuppressWarnings("unused")
+ public void shutdown() {
+ shutdown = true;
+ this.interrupt();
+ }
+
+ public void run() {
+ while (!shutdown) {
+ synchronized (this) {
+ try {
+ lastCleanerThreadCheckTime = System.currentTimeMillis();
+
+ // Cleanup old stuff.
+ long currentTime = System.currentTimeMillis();
+ if (currentTime - CLEANER_THREAD_WAIT_INTERVAL_MS > lastLogCleanTime) {
+ cleanExecutionLogs();
+ lastLogCleanTime = currentTime;
+ }
+
+ wait(CLEANER_THREAD_WAIT_INTERVAL_MS);
+ } catch (InterruptedException e) {
+ logger.info("Interrupted. Probably to shut down.");
+ }
+ }
+ }
+ }
+
+ private void cleanExecutionLogs() {
+ logger.info("Cleaning old logs from execution_logs");
+ long cutoff = DateTime.now().getMillis() - executionLogsRetentionMs;
+ logger.info("Cleaning old log files before "
+ + new DateTime(cutoff).toString());
+ cleanOldExecutionLogs(DateTime.now().getMillis()
+ - executionLogsRetentionMs);
+ }
+ }
}
src/main/java/azkaban/executor/ExecutorManagerAdapter.java 290(+169 -121)
diff --git a/src/main/java/azkaban/executor/ExecutorManagerAdapter.java b/src/main/java/azkaban/executor/ExecutorManagerAdapter.java
index af64ec4..c7e8913 100644
--- a/src/main/java/azkaban/executor/ExecutorManagerAdapter.java
+++ b/src/main/java/azkaban/executor/ExecutorManagerAdapter.java
@@ -1,3 +1,19 @@
+/*
+ * Copyright 2014 LinkedIn Corp.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
package azkaban.executor;
import java.io.IOException;
@@ -10,125 +26,157 @@ import azkaban.project.Project;
import azkaban.utils.FileIOUtils.JobMetaData;
import azkaban.utils.FileIOUtils.LogData;
-public interface ExecutorManagerAdapter{
-
- public static final String LOCAL_MODE = "local";
- public static final String REMOTE_MODE = "remote";
-
- public static final String REMOTE_EXECUTOR_MANAGER_HOST = "remote.executor.manager.host";
- public static final String REMOTE_EXECUTOR_MANAGER_PORT = "remote.executor.manager.port";
- public static final String REMOTE_EXECUTOR_MANAGER_URL = "/executormanager";
-
- public static final String ACTION_GET_FLOW_LOG = "getFlowLog";
- public static final String ACTION_GET_JOB_LOG = "getJobLog";
- public static final String ACTION_CANCEL_FLOW = "cancelFlow";
- public static final String ACTION_SUBMIT_FLOW = "submitFlow";
- public static final String ACTION_RESUME_FLOW = "resumeFlow";
- public static final String ACTION_PAUSE_FLOW = "pauseFlow";
- public static final String ACTION_MODIFY_EXECUTION = "modifyExecution";
- public static final String ACTION_UPDATE = "update";
- public static final String ACTION_GET_JMX = "getJMX";
-
- public static final String COMMAND_MODIFY_PAUSE_JOBS = "modifyPauseJobs";
- public static final String COMMAND_MODIFY_RESUME_JOBS = "modifyResumeJobs";
- public static final String COMMAND_MODIFY_RETRY_FAILURES = "modifyRetryFailures";
- public static final String COMMAND_MODIFY_RETRY_JOBS = "modifyRetryJobs";
- public static final String COMMAND_MODIFY_DISABLE_JOBS = "modifyDisableJobs";
- public static final String COMMAND_MODIFY_ENABLE_JOBS = "modifyEnableJobs";
- public static final String COMMAND_MODIFY_CANCEL_JOBS = "modifyCancelJobs";
-
- public static final String INFO_JMX_TYPE = "jmxType";
- public static final String INFO_JMX_DATA = "jmxData";
- public static final String INFO_ACTION = "action";
- public static final String INFO_TYPE = "type";
- public static final String INFO_EXEC_ID = "execId";
- public static final String INFO_EXEC_FLOW_JSON = "execFlowJson";
- public static final String INFO_PROJECT_ID = "projectId";
- public static final String INFO_FLOW_NAME = "flowName";
- public static final String INFO_JOB_NAME = "jobName";
- public static final String INFO_OFFSET = "offset";
- public static final String INFO_LENGTH = "length";
- public static final String INFO_ATTEMPT = "attempt";
- public static final String INFO_MODIFY_JOB_IDS = "modifyJobIds";
- public static final String INFO_MODIFY_COMMAND = "modifyCommand";
- public static final String INFO_MESSAGE = "message";
- public static final String INFO_ERROR = "error";
- public static final String INFO_UPDATE_TIME_LIST = "updateTimeList";
- public static final String INFO_EXEC_ID_LIST = "execIdList";
- public static final String INFO_UPDATES = "updates";
- public static final String INFO_USER_ID = "userId";
- public static final String INFO_LOG = "logData";
-
- public boolean isFlowRunning(int projectId, String flowId);
-
- public ExecutableFlow getExecutableFlow(int execId) throws ExecutorManagerException;
-
- public List<Integer> getRunningFlows(int projectId, String flowId);
-
- public List<ExecutableFlow> getRunningFlows() throws IOException;
-
- public List<ExecutableFlow> getRecentlyFinishedFlows();
-
- public List<ExecutableFlow> getExecutableFlows(Project project, String flowId, int skip, int size) throws ExecutorManagerException;
-
- public List<ExecutableFlow> getExecutableFlows(int skip, int size) throws ExecutorManagerException;
-
- public List<ExecutableFlow> getExecutableFlows(String flowIdContains, int skip, int size) throws ExecutorManagerException;
-
- public List<ExecutableFlow> getExecutableFlows(String projContain, String flowContain, String userContain, int status, long begin, long end, int skip, int size) throws ExecutorManagerException;
-
- public int getExecutableFlows(int projectId, String flowId, int from, int length, List<ExecutableFlow> outputList) throws ExecutorManagerException;
-
- public List<ExecutableFlow> getExecutableFlows(int projectId, String flowId, int from, int length, Status status) throws ExecutorManagerException;
-
- public List<ExecutableJobInfo> getExecutableJobs(Project project, String jobId, int skip, int size) throws ExecutorManagerException;
-
- public int getNumberOfJobExecutions(Project project, String jobId) throws ExecutorManagerException;
-
- public int getNumberOfExecutions(Project project, String flowId) throws ExecutorManagerException;
-
- public LogData getExecutableFlowLog(ExecutableFlow exFlow, int offset, int length) throws ExecutorManagerException;
-
- public LogData getExecutionJobLog(ExecutableFlow exFlow, String jobId, int offset, int length, int attempt) throws ExecutorManagerException;
-
- public List<Object> getExecutionJobStats(ExecutableFlow exflow, String jobId, int attempt) throws ExecutorManagerException;
-
- public JobMetaData getExecutionJobMetaData(ExecutableFlow exFlow, String jobId, int offset, int length, int attempt) throws ExecutorManagerException;
-
- public void cancelFlow(ExecutableFlow exFlow, String userId) throws ExecutorManagerException;
-
- public void resumeFlow(ExecutableFlow exFlow, String userId) throws ExecutorManagerException;
-
- public void pauseFlow(ExecutableFlow exFlow, String userId) throws ExecutorManagerException;
-
- public void pauseExecutingJobs(ExecutableFlow exFlow, String userId, String ... jobIds) throws ExecutorManagerException;
-
- public void resumeExecutingJobs(ExecutableFlow exFlow, String userId, String ... jobIds) throws ExecutorManagerException;
-
- public void retryFailures(ExecutableFlow exFlow, String userId) throws ExecutorManagerException;
-
- public void retryExecutingJobs(ExecutableFlow exFlow, String userId, String ... jobIds) throws ExecutorManagerException;
-
- public void disableExecutingJobs(ExecutableFlow exFlow, String userId, String ... jobIds) throws ExecutorManagerException;
-
- public void enableExecutingJobs(ExecutableFlow exFlow, String userId, String ... jobIds) throws ExecutorManagerException;
-
- public void cancelExecutingJobs(ExecutableFlow exFlow, String userId, String ... jobIds) throws ExecutorManagerException;
-
- public String submitExecutableFlow(ExecutableFlow exflow, String userId) throws ExecutorManagerException;
-
- public Map<String, Object> callExecutorJMX(String hostPort, String action, String mBean) throws IOException;
-
- public void shutdown();
-
- public Set<String> getAllActiveExecutorServerHosts();
-
- public State getExecutorManagerThreadState();
-
- public boolean isExecutorManagerThreadActive();
-
- public long getLastExecutorManagerThreadCheckTime();
-
- public Set<? extends String> getPrimaryServerHosts();
-
+public interface ExecutorManagerAdapter {
+
+ public static final String LOCAL_MODE = "local";
+ public static final String REMOTE_MODE = "remote";
+
+ public static final String REMOTE_EXECUTOR_MANAGER_HOST =
+ "remote.executor.manager.host";
+ public static final String REMOTE_EXECUTOR_MANAGER_PORT =
+ "remote.executor.manager.port";
+ public static final String REMOTE_EXECUTOR_MANAGER_URL = "/executormanager";
+
+ public static final String ACTION_GET_FLOW_LOG = "getFlowLog";
+ public static final String ACTION_GET_JOB_LOG = "getJobLog";
+ public static final String ACTION_CANCEL_FLOW = "cancelFlow";
+ public static final String ACTION_SUBMIT_FLOW = "submitFlow";
+ public static final String ACTION_RESUME_FLOW = "resumeFlow";
+ public static final String ACTION_PAUSE_FLOW = "pauseFlow";
+ public static final String ACTION_MODIFY_EXECUTION = "modifyExecution";
+ public static final String ACTION_UPDATE = "update";
+ public static final String ACTION_GET_JMX = "getJMX";
+
+ public static final String COMMAND_MODIFY_PAUSE_JOBS = "modifyPauseJobs";
+ public static final String COMMAND_MODIFY_RESUME_JOBS = "modifyResumeJobs";
+ public static final String COMMAND_MODIFY_RETRY_FAILURES =
+ "modifyRetryFailures";
+ public static final String COMMAND_MODIFY_RETRY_JOBS = "modifyRetryJobs";
+ public static final String COMMAND_MODIFY_DISABLE_JOBS = "modifyDisableJobs";
+ public static final String COMMAND_MODIFY_ENABLE_JOBS = "modifyEnableJobs";
+ public static final String COMMAND_MODIFY_CANCEL_JOBS = "modifyCancelJobs";
+
+ public static final String INFO_JMX_TYPE = "jmxType";
+ public static final String INFO_JMX_DATA = "jmxData";
+ public static final String INFO_ACTION = "action";
+ public static final String INFO_TYPE = "type";
+ public static final String INFO_EXEC_ID = "execId";
+ public static final String INFO_EXEC_FLOW_JSON = "execFlowJson";
+ public static final String INFO_PROJECT_ID = "projectId";
+ public static final String INFO_FLOW_NAME = "flowName";
+ public static final String INFO_JOB_NAME = "jobName";
+ public static final String INFO_OFFSET = "offset";
+ public static final String INFO_LENGTH = "length";
+ public static final String INFO_ATTEMPT = "attempt";
+ public static final String INFO_MODIFY_JOB_IDS = "modifyJobIds";
+ public static final String INFO_MODIFY_COMMAND = "modifyCommand";
+ public static final String INFO_MESSAGE = "message";
+ public static final String INFO_ERROR = "error";
+ public static final String INFO_UPDATE_TIME_LIST = "updateTimeList";
+ public static final String INFO_EXEC_ID_LIST = "execIdList";
+ public static final String INFO_UPDATES = "updates";
+ public static final String INFO_USER_ID = "userId";
+ public static final String INFO_LOG = "logData";
+
+ public boolean isFlowRunning(int projectId, String flowId);
+
+ public ExecutableFlow getExecutableFlow(int execId)
+ throws ExecutorManagerException;
+
+ public List<Integer> getRunningFlows(int projectId, String flowId);
+
+ public List<ExecutableFlow> getRunningFlows() throws IOException;
+
+ public List<ExecutableFlow> getRecentlyFinishedFlows();
+
+ public List<ExecutableFlow> getExecutableFlows(Project project,
+ String flowId, int skip, int size) throws ExecutorManagerException;
+
+ public List<ExecutableFlow> getExecutableFlows(int skip, int size)
+ throws ExecutorManagerException;
+
+ public List<ExecutableFlow> getExecutableFlows(String flowIdContains,
+ int skip, int size) throws ExecutorManagerException;
+
+ public List<ExecutableFlow> getExecutableFlows(String projContain,
+ String flowContain, String userContain, int status, long begin, long end,
+ int skip, int size) throws ExecutorManagerException;
+
+ public int getExecutableFlows(int projectId, String flowId, int from,
+ int length, List<ExecutableFlow> outputList)
+ throws ExecutorManagerException;
+
+ public List<ExecutableFlow> getExecutableFlows(int projectId, String flowId,
+ int from, int length, Status status) throws ExecutorManagerException;
+
+ public List<ExecutableJobInfo> getExecutableJobs(Project project,
+ String jobId, int skip, int size) throws ExecutorManagerException;
+
+ public int getNumberOfJobExecutions(Project project, String jobId)
+ throws ExecutorManagerException;
+
+ public int getNumberOfExecutions(Project project, String flowId)
+ throws ExecutorManagerException;
+
+ public LogData getExecutableFlowLog(ExecutableFlow exFlow, int offset,
+ int length) throws ExecutorManagerException;
+
+ public LogData getExecutionJobLog(ExecutableFlow exFlow, String jobId,
+ int offset, int length, int attempt) throws ExecutorManagerException;
+
+ public List<Object> getExecutionJobStats(ExecutableFlow exflow, String jobId,
+ int attempt) throws ExecutorManagerException;
+
+ public JobMetaData getExecutionJobMetaData(ExecutableFlow exFlow,
+ String jobId, int offset, int length, int attempt)
+ throws ExecutorManagerException;
+
+ public void cancelFlow(ExecutableFlow exFlow, String userId)
+ throws ExecutorManagerException;
+
+ public void resumeFlow(ExecutableFlow exFlow, String userId)
+ throws ExecutorManagerException;
+
+ public void pauseFlow(ExecutableFlow exFlow, String userId)
+ throws ExecutorManagerException;
+
+ public void pauseExecutingJobs(ExecutableFlow exFlow, String userId,
+ String... jobIds) throws ExecutorManagerException;
+
+ public void resumeExecutingJobs(ExecutableFlow exFlow, String userId,
+ String... jobIds) throws ExecutorManagerException;
+
+ public void retryFailures(ExecutableFlow exFlow, String userId)
+ throws ExecutorManagerException;
+
+ public void retryExecutingJobs(ExecutableFlow exFlow, String userId,
+ String... jobIds) throws ExecutorManagerException;
+
+ public void disableExecutingJobs(ExecutableFlow exFlow, String userId,
+ String... jobIds) throws ExecutorManagerException;
+
+ public void enableExecutingJobs(ExecutableFlow exFlow, String userId,
+ String... jobIds) throws ExecutorManagerException;
+
+ public void cancelExecutingJobs(ExecutableFlow exFlow, String userId,
+ String... jobIds) throws ExecutorManagerException;
+
+ public String submitExecutableFlow(ExecutableFlow exflow, String userId)
+ throws ExecutorManagerException;
+
+ public Map<String, Object> callExecutorJMX(String hostPort, String action,
+ String mBean) throws IOException;
+
+ public void shutdown();
+
+ public Set<String> getAllActiveExecutorServerHosts();
+
+ public State getExecutorManagerThreadState();
+
+ public boolean isExecutorManagerThreadActive();
+
+ public long getLastExecutorManagerThreadCheckTime();
+
+ public Set<? extends String> getPrimaryServerHosts();
+
}
diff --git a/src/main/java/azkaban/executor/ExecutorManagerException.java b/src/main/java/azkaban/executor/ExecutorManagerException.java
index 09b8953..a02073c 100644
--- a/src/main/java/azkaban/executor/ExecutorManagerException.java
+++ b/src/main/java/azkaban/executor/ExecutorManagerException.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -17,41 +17,41 @@
package azkaban.executor;
public class ExecutorManagerException extends Exception {
- public enum Reason {
- SkippedExecution
- }
-
- private static final long serialVersionUID = 1L;
- private ExecutableFlow flow = null;
- private Reason reason = null;
-
- public ExecutorManagerException(Exception e) {
- super(e);
- }
-
- public ExecutorManagerException(String message) {
- super(message);
- }
-
- public ExecutorManagerException(String message, ExecutableFlow flow) {
- super(message);
- this.flow = flow;
- }
-
- public ExecutorManagerException(String message, Reason reason) {
- super(message);
- this.reason = reason;
- }
-
- public ExecutorManagerException(String message, Throwable cause) {
- super(message, cause);
- }
-
- public ExecutableFlow getExecutableFlow() {
- return flow;
- }
-
- public Reason getReason() {
- return reason;
- }
+ public enum Reason {
+ SkippedExecution
+ }
+
+ private static final long serialVersionUID = 1L;
+ private ExecutableFlow flow = null;
+ private Reason reason = null;
+
+ public ExecutorManagerException(Exception e) {
+ super(e);
+ }
+
+ public ExecutorManagerException(String message) {
+ super(message);
+ }
+
+ public ExecutorManagerException(String message, ExecutableFlow flow) {
+ super(message);
+ this.flow = flow;
+ }
+
+ public ExecutorManagerException(String message, Reason reason) {
+ super(message);
+ this.reason = reason;
+ }
+
+ public ExecutorManagerException(String message, Throwable cause) {
+ super(message, cause);
+ }
+
+ public ExecutableFlow getExecutableFlow() {
+ return flow;
+ }
+
+ public Reason getReason() {
+ return reason;
+ }
}
src/main/java/azkaban/executor/ExecutorManagerServlet.java 410(+205 -205)
diff --git a/src/main/java/azkaban/executor/ExecutorManagerServlet.java b/src/main/java/azkaban/executor/ExecutorManagerServlet.java
index ab8d5b2..d88856d 100644
--- a/src/main/java/azkaban/executor/ExecutorManagerServlet.java
+++ b/src/main/java/azkaban/executor/ExecutorManagerServlet.java
@@ -17,209 +17,209 @@ import azkaban.webapp.servlet.AbstractServiceServlet;
public class ExecutorManagerServlet extends AbstractServiceServlet {
- private final ExecutorManagerAdapter executorManager;
-
- public static final String URL = "executorManager";
- private static final long serialVersionUID = 1L;
- private static final Logger logger = Logger.getLogger(ExecutorManagerServlet.class);
-
- public ExecutorManagerServlet(ExecutorManagerAdapter executorManager) {
- this.executorManager = executorManager;
- }
-
- @Override
- public void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
- HashMap<String,Object> respMap= new HashMap<String,Object>();
- //logger.info("ExecutorServer called by " + req.getRemoteAddr());
- try {
- if (!hasParam(req, ExecutorManagerAdapter.INFO_ACTION)) {
- logger.error("Parameter action not set");
- respMap.put("error", "Parameter action not set");
- }
- else {
- String action = getParam(req, ExecutorManagerAdapter.INFO_ACTION);
- if (action.equals(ExecutorManagerAdapter.ACTION_UPDATE)) {
- //logger.info("Updated called");
- handleAjaxUpdateRequest(req, respMap);
- }
- else {
- int execid = Integer.parseInt(getParam(req, ExecutorManagerAdapter.INFO_EXEC_ID));
- String user = getParam(req, ExecutorManagerAdapter.INFO_USER_ID, null);
-
- logger.info("User " + user + " has called action " + action + " on " + execid);
- if (action.equals(ExecutorManagerAdapter.ACTION_GET_FLOW_LOG)) {
- handleFetchFlowLogEvent(execid, req, resp, respMap);
- } else if (action.equals(ExecutorManagerAdapter.ACTION_GET_JOB_LOG)) {
- handleFetchJobLogEvent(execid, req, resp, respMap);
- }
- else if (action.equals(ExecutorManagerAdapter.ACTION_SUBMIT_FLOW)) {
- handleAjaxSubmitFlow(req, respMap, execid);
- }
- else if (action.equals(ExecutorManagerAdapter.ACTION_CANCEL_FLOW)) {
- logger.info("Cancel called.");
- handleAjaxCancelFlow(respMap, execid, user);
- }
- else if (action.equals(ExecutorManagerAdapter.ACTION_PAUSE_FLOW)) {
- logger.info("Paused called.");
- handleAjaxPauseFlow(respMap, execid, user);
- }
- else if (action.equals(ExecutorManagerAdapter.ACTION_RESUME_FLOW)) {
- logger.info("Resume called.");
- handleAjaxResumeFlow(respMap, execid, user);
- }
- else if (action.equals(ExecutorManagerAdapter.ACTION_MODIFY_EXECUTION)) {
- logger.info("Modify Execution Action");
- handleModifyExecution(respMap, execid, user, req);
- }
- else {
- logger.error("action: '" + action + "' not supported.");
- respMap.put("error", "action: '" + action + "' not supported.");
- }
- }
- }
- } catch (Exception e) {
- logger.error(e);
- respMap.put(ExecutorManagerAdapter.INFO_ERROR, e.getMessage());
- }
- writeJSON(resp, respMap);
- resp.flushBuffer();
- }
-
- private void handleModifyExecution(HashMap<String, Object> respMap,
- int execid, String user, HttpServletRequest req) {
- if (!hasParam(req, ExecutorManagerAdapter.INFO_MODIFY_COMMAND)) {
- respMap.put(ExecutorManagerAdapter.INFO_ERROR, "Modification command not set.");
- return;
- }
-
- try {
- String modificationType = getParam(req, ExecutorManagerAdapter.INFO_MODIFY_COMMAND);
- ExecutableFlow exflow = executorManager.getExecutableFlow(execid);
- if (ExecutorManagerAdapter.COMMAND_MODIFY_RETRY_FAILURES.equals(modificationType)) {
- executorManager.retryFailures(exflow, user);
- }
- else {
-// String modifiedJobList = getParam(req, MODIFY_JOBS_LIST);
-// String[] jobIds = modifiedJobList.split("\\s*,\\s*");
-//
-// if (MODIFY_RETRY_JOBS.equals(modificationType)) {
-// }
-// else if (MODIFY_CANCEL_JOBS.equals(modificationType)) {
-// }
-// else if (MODIFY_DISABLE_JOBS.equals(modificationType)) {
-// }
-// else if (MODIFY_ENABLE_JOBS.equals(modificationType)) {
-// }
-// else if (MODIFY_PAUSE_JOBS.equals(modificationType)) {
-// }
-// else if (MODIFY_RESUME_JOBS.equals(modificationType)) {
-// }
- }
- } catch (Exception e) {
- respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
- }
- }
-
- private void handleAjaxResumeFlow(HashMap<String, Object> respMap, int execid, String user) {
- try {
- ExecutableFlow exFlow = executorManager.getExecutableFlow(execid);
- executorManager.resumeFlow(exFlow, user);
- } catch (Exception e) {
- respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
- }
-
- }
-
- private void handleAjaxPauseFlow(HashMap<String, Object> respMap, int execid, String user) {
- try {
- ExecutableFlow exFlow = executorManager.getExecutableFlow(execid);
- executorManager.pauseFlow(exFlow, user);
- } catch (Exception e) {
- respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
- }
- }
-
- private void handleAjaxCancelFlow(HashMap<String, Object> respMap, int execid, String user) {
- try {
- ExecutableFlow exFlow = executorManager.getExecutableFlow(execid);
- executorManager.cancelFlow(exFlow, user);
- } catch (Exception e) {
- respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
- }
- }
-
- private void handleAjaxSubmitFlow(HttpServletRequest req, HashMap<String, Object> respMap, int execid) {
- try{
- String execFlowJson = getParam(req, ExecutorManagerAdapter.INFO_EXEC_FLOW_JSON);
- ExecutableFlow exflow = ExecutableFlow.createExecutableFlowFromObject(JSONUtils.parseJSONFromString(execFlowJson));
- String user = getParam(req, ExecutorManagerAdapter.INFO_USER_ID);
- executorManager.submitExecutableFlow(exflow, user);
- respMap.put(ExecutorManagerAdapter.INFO_EXEC_ID, exflow.getExecutionId());
- } catch (Exception e) {
- e.printStackTrace();
- respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
- }
- }
-
- private void handleFetchJobLogEvent(int execid, HttpServletRequest req, HttpServletResponse resp, HashMap<String, Object> respMap) {
- try{
- ExecutableFlow exFlow = executorManager.getExecutableFlow(execid);
- String jobId = getParam(req, ExecutorManagerAdapter.INFO_JOB_NAME);
- int offset = getIntParam(req, ExecutorManagerAdapter.INFO_OFFSET);
- int length = getIntParam(req, ExecutorManagerAdapter.INFO_LENGTH);
- int attempt = getIntParam(req, ExecutorManagerAdapter.INFO_ATTEMPT);
- LogData log = executorManager.getExecutionJobLog(exFlow, jobId, offset, length, attempt);
- respMap.put(ExecutorManagerAdapter.INFO_LOG, JSONUtils.toJSON(log.toObject()));
- } catch (Exception e) {
- e.printStackTrace();
- respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
- }
- }
-
- private void handleFetchFlowLogEvent(int execid, HttpServletRequest req, HttpServletResponse resp, HashMap<String, Object> respMap) {
- try{
- ExecutableFlow exFlow = executorManager.getExecutableFlow(execid);
- int offset = getIntParam(req, ExecutorManagerAdapter.INFO_OFFSET);
- int length = getIntParam(req, ExecutorManagerAdapter.INFO_LENGTH);
- LogData log = executorManager.getExecutableFlowLog(exFlow, offset, length);
- respMap.put(ExecutorManagerAdapter.INFO_LOG, JSONUtils.toJSON(log.toObject()));
- } catch (Exception e) {
- e.printStackTrace();
- respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
- }
-
- }
-
- @SuppressWarnings("unchecked")
- private void handleAjaxUpdateRequest(HttpServletRequest req, HashMap<String, Object> respMap) {
- try {
- ArrayList<Object> updateTimesList = (ArrayList<Object>)JSONUtils.parseJSONFromString(getParam(req, ExecutorManagerAdapter.INFO_UPDATE_TIME_LIST));
- ArrayList<Object> execIDList = (ArrayList<Object>)JSONUtils.parseJSONFromString(getParam(req, ExecutorManagerAdapter.INFO_EXEC_ID_LIST));
-
- ArrayList<Object> updateList = new ArrayList<Object>();
- for (int i = 0; i < execIDList.size(); ++i) {
- long updateTime = JSONUtils.getLongFromObject(updateTimesList.get(i));
- int execId = (Integer)execIDList.get(i);
-
- ExecutableFlow flow = executorManager.getExecutableFlow(execId);
- if (flow == null) {
- Map<String, Object> errorResponse = new HashMap<String,Object>();
- errorResponse.put(ExecutorManagerAdapter.INFO_ERROR, "Flow does not exist");
- errorResponse.put(ExecutorManagerAdapter.INFO_EXEC_ID, execId);
- updateList.add(errorResponse);
- continue;
- }
-
- if (flow.getUpdateTime() > updateTime) {
- updateList.add(flow.toUpdateObject(updateTime));
- }
- }
-
- respMap.put(ExecutorManagerAdapter.INFO_UPDATES, updateList);
- } catch (Exception e) {
- e.printStackTrace();
- respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
- }
- }
-
+ private final ExecutorManagerAdapter executorManager;
+
+ public static final String URL = "executorManager";
+ private static final long serialVersionUID = 1L;
+ private static final Logger logger = Logger.getLogger(ExecutorManagerServlet.class);
+
+ public ExecutorManagerServlet(ExecutorManagerAdapter executorManager) {
+ this.executorManager = executorManager;
+ }
+
+ @Override
+ public void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
+ HashMap<String,Object> respMap= new HashMap<String,Object>();
+ //logger.info("ExecutorServer called by " + req.getRemoteAddr());
+ try {
+ if (!hasParam(req, ExecutorManagerAdapter.INFO_ACTION)) {
+ logger.error("Parameter action not set");
+ respMap.put("error", "Parameter action not set");
+ }
+ else {
+ String action = getParam(req, ExecutorManagerAdapter.INFO_ACTION);
+ if (action.equals(ExecutorManagerAdapter.ACTION_UPDATE)) {
+ //logger.info("Updated called");
+ handleAjaxUpdateRequest(req, respMap);
+ }
+ else {
+ int execid = Integer.parseInt(getParam(req, ExecutorManagerAdapter.INFO_EXEC_ID));
+ String user = getParam(req, ExecutorManagerAdapter.INFO_USER_ID, null);
+
+ logger.info("User " + user + " has called action " + action + " on " + execid);
+ if (action.equals(ExecutorManagerAdapter.ACTION_GET_FLOW_LOG)) {
+ handleFetchFlowLogEvent(execid, req, resp, respMap);
+ } else if (action.equals(ExecutorManagerAdapter.ACTION_GET_JOB_LOG)) {
+ handleFetchJobLogEvent(execid, req, resp, respMap);
+ }
+ else if (action.equals(ExecutorManagerAdapter.ACTION_SUBMIT_FLOW)) {
+ handleAjaxSubmitFlow(req, respMap, execid);
+ }
+ else if (action.equals(ExecutorManagerAdapter.ACTION_CANCEL_FLOW)) {
+ logger.info("Cancel called.");
+ handleAjaxCancelFlow(respMap, execid, user);
+ }
+ else if (action.equals(ExecutorManagerAdapter.ACTION_PAUSE_FLOW)) {
+ logger.info("Paused called.");
+ handleAjaxPauseFlow(respMap, execid, user);
+ }
+ else if (action.equals(ExecutorManagerAdapter.ACTION_RESUME_FLOW)) {
+ logger.info("Resume called.");
+ handleAjaxResumeFlow(respMap, execid, user);
+ }
+ else if (action.equals(ExecutorManagerAdapter.ACTION_MODIFY_EXECUTION)) {
+ logger.info("Modify Execution Action");
+ handleModifyExecution(respMap, execid, user, req);
+ }
+ else {
+ logger.error("action: '" + action + "' not supported.");
+ respMap.put("error", "action: '" + action + "' not supported.");
+ }
+ }
+ }
+ } catch (Exception e) {
+ logger.error(e);
+ respMap.put(ExecutorManagerAdapter.INFO_ERROR, e.getMessage());
+ }
+ writeJSON(resp, respMap);
+ resp.flushBuffer();
+ }
+
+ private void handleModifyExecution(HashMap<String, Object> respMap,
+ int execid, String user, HttpServletRequest req) {
+ if (!hasParam(req, ExecutorManagerAdapter.INFO_MODIFY_COMMAND)) {
+ respMap.put(ExecutorManagerAdapter.INFO_ERROR, "Modification command not set.");
+ return;
+ }
+
+ try {
+ String modificationType = getParam(req, ExecutorManagerAdapter.INFO_MODIFY_COMMAND);
+ ExecutableFlow exflow = executorManager.getExecutableFlow(execid);
+ if (ExecutorManagerAdapter.COMMAND_MODIFY_RETRY_FAILURES.equals(modificationType)) {
+ executorManager.retryFailures(exflow, user);
+ }
+ else {
+// String modifiedJobList = getParam(req, MODIFY_JOBS_LIST);
+// String[] jobIds = modifiedJobList.split("\\s*,\\s*");
+//
+// if (MODIFY_RETRY_JOBS.equals(modificationType)) {
+// }
+// else if (MODIFY_CANCEL_JOBS.equals(modificationType)) {
+// }
+// else if (MODIFY_DISABLE_JOBS.equals(modificationType)) {
+// }
+// else if (MODIFY_ENABLE_JOBS.equals(modificationType)) {
+// }
+// else if (MODIFY_PAUSE_JOBS.equals(modificationType)) {
+// }
+// else if (MODIFY_RESUME_JOBS.equals(modificationType)) {
+// }
+ }
+ } catch (Exception e) {
+ respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
+ }
+ }
+
+ private void handleAjaxResumeFlow(HashMap<String, Object> respMap, int execid, String user) {
+ try {
+ ExecutableFlow exFlow = executorManager.getExecutableFlow(execid);
+ executorManager.resumeFlow(exFlow, user);
+ } catch (Exception e) {
+ respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
+ }
+
+ }
+
+ private void handleAjaxPauseFlow(HashMap<String, Object> respMap, int execid, String user) {
+ try {
+ ExecutableFlow exFlow = executorManager.getExecutableFlow(execid);
+ executorManager.pauseFlow(exFlow, user);
+ } catch (Exception e) {
+ respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
+ }
+ }
+
+ private void handleAjaxCancelFlow(HashMap<String, Object> respMap, int execid, String user) {
+ try {
+ ExecutableFlow exFlow = executorManager.getExecutableFlow(execid);
+ executorManager.cancelFlow(exFlow, user);
+ } catch (Exception e) {
+ respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
+ }
+ }
+
+ private void handleAjaxSubmitFlow(HttpServletRequest req, HashMap<String, Object> respMap, int execid) {
+ try{
+ String execFlowJson = getParam(req, ExecutorManagerAdapter.INFO_EXEC_FLOW_JSON);
+ ExecutableFlow exflow = ExecutableFlow.createExecutableFlowFromObject(JSONUtils.parseJSONFromString(execFlowJson));
+ String user = getParam(req, ExecutorManagerAdapter.INFO_USER_ID);
+ executorManager.submitExecutableFlow(exflow, user);
+ respMap.put(ExecutorManagerAdapter.INFO_EXEC_ID, exflow.getExecutionId());
+ } catch (Exception e) {
+ e.printStackTrace();
+ respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
+ }
+ }
+
+ private void handleFetchJobLogEvent(int execid, HttpServletRequest req, HttpServletResponse resp, HashMap<String, Object> respMap) {
+ try{
+ ExecutableFlow exFlow = executorManager.getExecutableFlow(execid);
+ String jobId = getParam(req, ExecutorManagerAdapter.INFO_JOB_NAME);
+ int offset = getIntParam(req, ExecutorManagerAdapter.INFO_OFFSET);
+ int length = getIntParam(req, ExecutorManagerAdapter.INFO_LENGTH);
+ int attempt = getIntParam(req, ExecutorManagerAdapter.INFO_ATTEMPT);
+ LogData log = executorManager.getExecutionJobLog(exFlow, jobId, offset, length, attempt);
+ respMap.put(ExecutorManagerAdapter.INFO_LOG, JSONUtils.toJSON(log.toObject()));
+ } catch (Exception e) {
+ e.printStackTrace();
+ respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
+ }
+ }
+
+ private void handleFetchFlowLogEvent(int execid, HttpServletRequest req, HttpServletResponse resp, HashMap<String, Object> respMap) {
+ try{
+ ExecutableFlow exFlow = executorManager.getExecutableFlow(execid);
+ int offset = getIntParam(req, ExecutorManagerAdapter.INFO_OFFSET);
+ int length = getIntParam(req, ExecutorManagerAdapter.INFO_LENGTH);
+ LogData log = executorManager.getExecutableFlowLog(exFlow, offset, length);
+ respMap.put(ExecutorManagerAdapter.INFO_LOG, JSONUtils.toJSON(log.toObject()));
+ } catch (Exception e) {
+ e.printStackTrace();
+ respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
+ }
+
+ }
+
+ @SuppressWarnings("unchecked")
+ private void handleAjaxUpdateRequest(HttpServletRequest req, HashMap<String, Object> respMap) {
+ try {
+ ArrayList<Object> updateTimesList = (ArrayList<Object>)JSONUtils.parseJSONFromString(getParam(req, ExecutorManagerAdapter.INFO_UPDATE_TIME_LIST));
+ ArrayList<Object> execIDList = (ArrayList<Object>)JSONUtils.parseJSONFromString(getParam(req, ExecutorManagerAdapter.INFO_EXEC_ID_LIST));
+
+ ArrayList<Object> updateList = new ArrayList<Object>();
+ for (int i = 0; i < execIDList.size(); ++i) {
+ long updateTime = JSONUtils.getLongFromObject(updateTimesList.get(i));
+ int execId = (Integer)execIDList.get(i);
+
+ ExecutableFlow flow = executorManager.getExecutableFlow(execId);
+ if (flow == null) {
+ Map<String, Object> errorResponse = new HashMap<String,Object>();
+ errorResponse.put(ExecutorManagerAdapter.INFO_ERROR, "Flow does not exist");
+ errorResponse.put(ExecutorManagerAdapter.INFO_EXEC_ID, execId);
+ updateList.add(errorResponse);
+ continue;
+ }
+
+ if (flow.getUpdateTime() > updateTime) {
+ updateList.add(flow.toUpdateObject(updateTime));
+ }
+ }
+
+ respMap.put(ExecutorManagerAdapter.INFO_UPDATES, updateList);
+ } catch (Exception e) {
+ e.printStackTrace();
+ respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
+ }
+ }
+
}
src/main/java/azkaban/executor/JdbcExecutorLoader.java 2337(+1085 -1252)
diff --git a/src/main/java/azkaban/executor/JdbcExecutorLoader.java b/src/main/java/azkaban/executor/JdbcExecutorLoader.java
index cb8e00e..02c01c9 100644
--- a/src/main/java/azkaban/executor/JdbcExecutorLoader.java
+++ b/src/main/java/azkaban/executor/JdbcExecutorLoader.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -48,1253 +48,1086 @@ import azkaban.utils.Pair;
import azkaban.utils.Props;
import azkaban.utils.PropsUtils;
-public class JdbcExecutorLoader extends AbstractJdbcLoader
- implements ExecutorLoader {
- private static final Logger logger =
- Logger.getLogger(JdbcExecutorLoader.class);
-
- private EncodingType defaultEncodingType = EncodingType.GZIP;
-
- public JdbcExecutorLoader(Props props) {
- super(props);
- }
-
- public EncodingType getDefaultEncodingType() {
- return defaultEncodingType;
- }
-
- public void setDefaultEncodingType(EncodingType defaultEncodingType) {
- this.defaultEncodingType = defaultEncodingType;
- }
-
- @Override
- public synchronized void uploadExecutableFlow(ExecutableFlow flow)
- throws ExecutorManagerException {
- Connection connection = getConnection();
- try {
- uploadExecutableFlow(connection, flow, defaultEncodingType);
- }
- catch (IOException e) {
- throw new ExecutorManagerException("Error uploading flow", e);
- }
- finally {
- DbUtils.closeQuietly(connection);
- }
- }
-
- private synchronized void uploadExecutableFlow(Connection connection,
- ExecutableFlow flow, EncodingType encType)
- throws ExecutorManagerException, IOException {
- final String INSERT_EXECUTABLE_FLOW =
- "INSERT INTO execution_flows " +
- "(project_id, flow_id, version, status, submit_time, submit_user, update_time) " +
- "values (?,?,?,?,?,?,?)";
- QueryRunner runner = new QueryRunner();
- long submitTime = System.currentTimeMillis();
-
- long id;
- try {
- flow.setStatus(Status.PREPARING);
- runner.update(
- connection,
- INSERT_EXECUTABLE_FLOW,
- flow.getProjectId(),
- flow.getFlowId(),
- flow.getVersion(),
- Status.PREPARING.getNumVal(),
- submitTime,
- flow.getSubmitUser(),
- submitTime);
- connection.commit();
- id = runner.query(
- connection, LastInsertID.LAST_INSERT_ID, new LastInsertID());
-
- if (id == -1l) {
- throw new ExecutorManagerException("Execution id is not properly created.");
- }
- logger.info("Flow given " + flow.getFlowId() + " given id " + id);
- flow.setExecutionId((int)id);
-
- updateExecutableFlow(connection, flow, encType);
- }
- catch (SQLException e) {
- throw new ExecutorManagerException("Error creating execution.", e);
- }
- }
-
- @Override
- public void updateExecutableFlow(ExecutableFlow flow)
- throws ExecutorManagerException {
- Connection connection = this.getConnection();
-
- try {
- updateExecutableFlow(connection, flow, defaultEncodingType);
- }
- finally {
- DbUtils.closeQuietly(connection);
- }
- }
-
- private void updateExecutableFlow(
- Connection connection, ExecutableFlow flow, EncodingType encType)
- throws ExecutorManagerException {
- final String UPDATE_EXECUTABLE_FLOW_DATA =
- "UPDATE execution_flows " +
- "SET status=?,update_time=?,start_time=?,end_time=?,enc_type=?,flow_data=? " +
- "WHERE exec_id=?";
- QueryRunner runner = new QueryRunner();
-
- String json = JSONUtils.toJSON(flow.toObject());
- byte[] data = null;
- try {
- byte[] stringData = json.getBytes("UTF-8");
- data = stringData;
-
- if (encType == EncodingType.GZIP) {
- data = GZIPUtils.gzipBytes(stringData);
- }
- }
- catch (IOException e) {
- throw new ExecutorManagerException("Error encoding the execution flow.");
- }
-
- try {
- runner.update(
- connection,
- UPDATE_EXECUTABLE_FLOW_DATA,
- flow.getStatus().getNumVal(),
- flow.getUpdateTime(),
- flow.getStartTime(),
- flow.getEndTime(),
- encType.getNumVal(),
- data,
- flow.getExecutionId());
- connection.commit();
- }
- catch (SQLException e) {
- throw new ExecutorManagerException("Error updating flow.", e);
- }
- }
-
- @Override
- public ExecutableFlow fetchExecutableFlow(int id)
- throws ExecutorManagerException {
- QueryRunner runner = createQueryRunner();
- FetchExecutableFlows flowHandler = new FetchExecutableFlows();
-
- try {
- List<ExecutableFlow> properties = runner.query(
- FetchExecutableFlows.FETCH_EXECUTABLE_FLOW, flowHandler, id);
- return properties.get(0);
- }
- catch (SQLException e) {
- throw new ExecutorManagerException("Error fetching flow id " + id, e);
- }
- }
-
- @Override
- public Map<Integer, Pair<ExecutionReference, ExecutableFlow>> fetchActiveFlows()
- throws ExecutorManagerException {
- QueryRunner runner = createQueryRunner();
- FetchActiveExecutableFlows flowHandler = new FetchActiveExecutableFlows();
-
- try {
- Map<Integer, Pair<ExecutionReference, ExecutableFlow>> properties =
- runner.query(
- FetchActiveExecutableFlows.FETCH_ACTIVE_EXECUTABLE_FLOW,
- flowHandler);
- return properties;
- }
- catch (SQLException e) {
- throw new ExecutorManagerException("Error fetching active flows", e);
- }
- }
-
- @Override
- public int fetchNumExecutableFlows() throws ExecutorManagerException {
- QueryRunner runner = createQueryRunner();
-
- IntHandler intHandler = new IntHandler();
- try {
- int count = runner.query(IntHandler.NUM_EXECUTIONS, intHandler);
- return count;
- }
- catch (SQLException e) {
- throw new ExecutorManagerException("Error fetching num executions", e);
- }
- }
-
- @Override
- public int fetchNumExecutableFlows(int projectId, String flowId)
- throws ExecutorManagerException {
- QueryRunner runner = createQueryRunner();
-
- IntHandler intHandler = new IntHandler();
- try {
- int count = runner.query(
- IntHandler.NUM_FLOW_EXECUTIONS, intHandler, projectId, flowId);
- return count;
- }
- catch (SQLException e) {
- throw new ExecutorManagerException("Error fetching num executions", e);
- }
- }
-
- @Override
- public int fetchNumExecutableNodes(int projectId, String jobId)
- throws ExecutorManagerException {
- QueryRunner runner = createQueryRunner();
-
- IntHandler intHandler = new IntHandler();
- try {
- int count = runner.query(
- IntHandler.NUM_JOB_EXECUTIONS, intHandler, projectId, jobId);
- return count;
- }
- catch (SQLException e) {
- throw new ExecutorManagerException("Error fetching num executions", e);
- }
- }
-
- @Override
- public List<ExecutableFlow> fetchFlowHistory(int projectId, String flowId,
- int skip, int num) throws ExecutorManagerException {
- QueryRunner runner = createQueryRunner();
- FetchExecutableFlows flowHandler = new FetchExecutableFlows();
-
- try {
- List<ExecutableFlow> properties = runner.query(
- FetchExecutableFlows.FETCH_EXECUTABLE_FLOW_HISTORY,
- flowHandler,
- projectId,
- flowId,
- skip,
- num);
- return properties;
- }
- catch (SQLException e) {
- throw new ExecutorManagerException("Error fetching active flows", e);
- }
- }
-
- @Override
- public List<ExecutableFlow> fetchFlowHistory(
- int projectId, String flowId, int skip, int num, Status status)
- throws ExecutorManagerException {
- QueryRunner runner = createQueryRunner();
- FetchExecutableFlows flowHandler = new FetchExecutableFlows();
-
- try {
- List<ExecutableFlow> properties = runner.query(
- FetchExecutableFlows.FETCH_EXECUTABLE_FLOW_BY_STATUS,
- flowHandler,
- projectId,
- flowId,
- status.getNumVal(),
- skip,
- num);
- return properties;
- }
- catch (SQLException e) {
- throw new ExecutorManagerException("Error fetching active flows", e);
- }
- }
-
- @Override
- public List<ExecutableFlow> fetchFlowHistory(int skip, int num)
- throws ExecutorManagerException {
- QueryRunner runner = createQueryRunner();
-
- FetchExecutableFlows flowHandler = new FetchExecutableFlows();
-
- try {
- List<ExecutableFlow> properties = runner.query(
- FetchExecutableFlows.FETCH_ALL_EXECUTABLE_FLOW_HISTORY,
- flowHandler,
- skip,
- num);
- return properties;
- }
- catch (SQLException e) {
- throw new ExecutorManagerException("Error fetching active flows", e);
- }
- }
-
-
- @Override
- public List<ExecutableFlow> fetchFlowHistory(
- String projContain,
- String flowContains,
- String userNameContains,
- int status,
- long startTime,
- long endTime,
- int skip,
- int num) throws ExecutorManagerException {
- String query = FetchExecutableFlows.FETCH_BASE_EXECUTABLE_FLOW_QUERY;
- ArrayList<Object> params = new ArrayList<Object>();
-
- boolean first = true;
- if (projContain != null && !projContain.isEmpty()) {
- query += " ef JOIN projects p ON ef.project_id = p.id WHERE name LIKE ?";
- params.add('%'+projContain+'%');
- first = false;
- }
-
- if (flowContains != null && !flowContains.isEmpty()) {
- if (first) {
- query += " WHERE ";
- first = false;
- }
- else {
- query += " AND ";
- }
-
- query += " flow_id LIKE ?";
- params.add('%'+flowContains+'%');
- }
-
- if (userNameContains != null && !userNameContains.isEmpty()) {
- if (first) {
- query += " WHERE ";
- first = false;
- }
- else {
- query += " AND ";
- }
- query += " submit_user LIKE ?";
- params.add('%'+userNameContains+'%');
- }
-
- if (status != 0) {
- if (first) {
- query += " WHERE ";
- first = false;
- }
- else {
- query += " AND ";
- }
- query += " status = ?";
- params.add(status);
- }
-
- if (startTime > 0) {
- if (first) {
- query += " WHERE ";
- first = false;
- }
- else {
- query += " AND ";
- }
- query += " start_time > ?";
- params.add(startTime);
- }
-
- if (endTime > 0) {
- if (first) {
- query += " WHERE ";
- first = false;
- }
- else {
- query += " AND ";
- }
- query += " end_time < ?";
- params.add(endTime);
- }
-
- if (skip > -1 && num > 0) {
- query += " ORDER BY exec_id DESC LIMIT ?, ?";
- params.add(skip);
- params.add(num);
- }
-
- QueryRunner runner = createQueryRunner();
- FetchExecutableFlows flowHandler = new FetchExecutableFlows();
-
- try {
- List<ExecutableFlow> properties = runner.query(
- query, flowHandler, params.toArray());
- return properties;
- } catch (SQLException e) {
- throw new ExecutorManagerException("Error fetching active flows", e);
- }
- }
-
- @Override
- public void addActiveExecutableReference(ExecutionReference reference)
- throws ExecutorManagerException {
- final String INSERT =
- "INSERT INTO active_executing_flows " +
- "(exec_id, host, port, update_time) values (?,?,?,?)";
- QueryRunner runner = createQueryRunner();
-
- try {
- runner.update(
- INSERT,
- reference.getExecId(),
- reference.getHost(),
- reference.getPort(),
- reference.getUpdateTime());
- }
- catch (SQLException e) {
- throw new ExecutorManagerException(
- "Error updating active flow reference " + reference.getExecId(), e);
- }
- }
-
- @Override
- public void removeActiveExecutableReference(int execid)
- throws ExecutorManagerException {
- final String DELETE = "DELETE FROM active_executing_flows WHERE exec_id=?";
-
- QueryRunner runner = createQueryRunner();
- try {
- runner.update(DELETE, execid);
- }
- catch (SQLException e) {
- throw new ExecutorManagerException(
- "Error deleting active flow reference " + execid, e);
- }
- }
-
- @Override
- public boolean updateExecutableReference(int execId, long updateTime)
- throws ExecutorManagerException {
- final String DELETE =
- "UPDATE active_executing_flows set update_time=? WHERE exec_id=?";
-
- QueryRunner runner = createQueryRunner();
- int updateNum = 0;
- try {
- updateNum = runner.update(DELETE, updateTime, execId);
- }
- catch (SQLException e) {
- throw new ExecutorManagerException(
- "Error deleting active flow reference " + execId, e);
- }
-
- // Should be 1.
- return updateNum > 0;
- }
-
- @Override
- public void uploadExecutableNode(ExecutableNode node, Props inputProps)
- throws ExecutorManagerException {
- final String INSERT_EXECUTION_NODE =
- "INSERT INTO execution_jobs " +
- "(exec_id, project_id, version, flow_id, job_id, start_time, " +
- "end_time, status, input_params, attempt) VALUES (?,?,?,?,?,?,?,?,?,?)";
-
- byte[] inputParam = null;
- if (inputProps != null) {
- try {
- String jsonString =
- JSONUtils.toJSON(PropsUtils.toHierarchicalMap(inputProps));
- inputParam = GZIPUtils.gzipString(jsonString, "UTF-8");
- }
- catch (IOException e) {
- throw new ExecutorManagerException("Error encoding input params");
- }
- }
-
- ExecutableFlow flow = node.getExecutableFlow();
- String flowId = node.getParentFlow().getFlowPath();
- System.out.println("Uploading flowId " + flowId);
- QueryRunner runner = createQueryRunner();
- try {
- runner.update(
- INSERT_EXECUTION_NODE,
- flow.getExecutionId(),
- flow.getProjectId(),
- flow.getVersion(),
- flowId,
- node.getId(),
- node.getStartTime(),
- node.getEndTime(),
- node.getStatus().getNumVal(),
- inputParam,
- node.getAttempt());
- } catch (SQLException e) {
- throw new ExecutorManagerException(
- "Error writing job " + node.getId(), e);
- }
- }
-
- @Override
- public void updateExecutableNode(ExecutableNode node)
- throws ExecutorManagerException {
- final String UPSERT_EXECUTION_NODE =
- "UPDATE execution_jobs " +
- "SET start_time=?, end_time=?, status=?, output_params=? " +
- "WHERE exec_id=? AND flow_id=? AND job_id=? AND attempt=?";
-
- byte[] outputParam = null;
- Props outputProps = node.getOutputProps();
- if (outputProps != null) {
- try {
- String jsonString =
- JSONUtils.toJSON(PropsUtils.toHierarchicalMap(outputProps));
- outputParam = GZIPUtils.gzipString(jsonString, "UTF-8");
- }
- catch (IOException e) {
- throw new ExecutorManagerException("Error encoding input params");
- }
- }
-
- QueryRunner runner = createQueryRunner();
- try {
- runner.update(
- UPSERT_EXECUTION_NODE,
- node.getStartTime(),
- node.getEndTime(),
- node.getStatus().getNumVal(),
- outputParam,
- node.getExecutableFlow().getExecutionId(),
- node.getParentFlow().getFlowPath(),
- node.getId(),
- node.getAttempt());
- } catch (SQLException e) {
- throw new ExecutorManagerException(
- "Error updating job " + node.getId(), e);
- }
- }
-
- @Override
- public List<ExecutableJobInfo> fetchJobInfoAttempts(int execId, String jobId)
- throws ExecutorManagerException {
- QueryRunner runner = createQueryRunner();
-
- try {
- List<ExecutableJobInfo> info = runner.query(
- FetchExecutableJobHandler.FETCH_EXECUTABLE_NODE_ATTEMPTS,
- new FetchExecutableJobHandler(),
- execId,
- jobId);
- if (info == null || info.isEmpty()) {
- return null;
- }
- return info;
- }
- catch (SQLException e) {
- throw new ExecutorManagerException("Error querying job info " + jobId, e);
- }
- }
-
- @Override
- public ExecutableJobInfo fetchJobInfo(int execId, String jobId, int attempts)
- throws ExecutorManagerException {
- QueryRunner runner = createQueryRunner();
-
- try {
- List<ExecutableJobInfo> info = runner.query(
- FetchExecutableJobHandler.FETCH_EXECUTABLE_NODE,
- new FetchExecutableJobHandler(),
- execId,
- jobId,
- attempts);
- if (info == null || info.isEmpty()) {
- return null;
- }
- return info.get(0);
- }
- catch (SQLException e) {
- throw new ExecutorManagerException("Error querying job info " + jobId, e);
- }
- }
-
- @Override
- public Props fetchExecutionJobInputProps(int execId, String jobId)
- throws ExecutorManagerException {
- QueryRunner runner = createQueryRunner();
- try {
- Pair<Props, Props> props = runner.query(
- FetchExecutableJobPropsHandler.FETCH_INPUT_PARAM_EXECUTABLE_NODE,
- new FetchExecutableJobPropsHandler(),
- execId,
- jobId);
- return props.getFirst();
- }
- catch (SQLException e) {
- throw new ExecutorManagerException(
- "Error querying job params " + execId + " " + jobId, e);
- }
- }
-
- @Override
- public Props fetchExecutionJobOutputProps(int execId, String jobId)
- throws ExecutorManagerException {
- QueryRunner runner = createQueryRunner();
- try {
- Pair<Props, Props> props = runner.query(
- FetchExecutableJobPropsHandler.FETCH_OUTPUT_PARAM_EXECUTABLE_NODE,
- new FetchExecutableJobPropsHandler(),
- execId,
- jobId);
- return props.getFirst();
- }
- catch (SQLException e) {
- throw new ExecutorManagerException(
- "Error querying job params " + execId + " " + jobId, e);
- }
- }
-
- @Override
- public Pair<Props, Props> fetchExecutionJobProps(int execId, String jobId)
- throws ExecutorManagerException {
- QueryRunner runner = createQueryRunner();
- try {
- Pair<Props, Props> props = runner.query(
- FetchExecutableJobPropsHandler.FETCH_INPUT_OUTPUT_PARAM_EXECUTABLE_NODE,
- new FetchExecutableJobPropsHandler(),
- execId,
- jobId);
- return props;
- }
- catch (SQLException e) {
- throw new ExecutorManagerException(
- "Error querying job params " + execId + " " + jobId, e);
- }
- }
-
- @Override
- public List<ExecutableJobInfo> fetchJobHistory(
- int projectId, String jobId, int skip, int size)
- throws ExecutorManagerException {
- QueryRunner runner = createQueryRunner();
-
- try {
- List<ExecutableJobInfo> info = runner.query(
- FetchExecutableJobHandler.FETCH_PROJECT_EXECUTABLE_NODE,
- new FetchExecutableJobHandler(),
- projectId,
- jobId,
- skip,
- size);
- if (info == null || info.isEmpty()) {
- return null;
- }
- return info;
- }
- catch (SQLException e) {
- throw new ExecutorManagerException("Error querying job info " + jobId, e);
- }
- }
-
- @Override
- public LogData fetchLogs(
- int execId, String name, int attempt, int startByte, int length)
- throws ExecutorManagerException {
- QueryRunner runner = createQueryRunner();
-
- FetchLogsHandler handler = new FetchLogsHandler(startByte, length + startByte);
- try {
- LogData result = runner.query(
- FetchLogsHandler.FETCH_LOGS,
- handler,
- execId,
- name,
- attempt,
- startByte,
- startByte + length);
- return result;
- }
- catch (SQLException e) {
- throw new ExecutorManagerException(
- "Error fetching logs " + execId + " : " + name, e);
- }
- }
-
- @Override
- public List<Object> fetchAttachments(int execId, String jobId, int attempt)
- throws ExecutorManagerException {
- QueryRunner runner = createQueryRunner();
-
- try {
- String attachments = runner.query(
- FetchExecutableJobAttachmentsHandler.FETCH_ATTACHMENTS_EXECUTABLE_NODE,
- new FetchExecutableJobAttachmentsHandler(),
- execId,
- jobId);
- if (attachments == null) {
- return null;
- }
-
- @SuppressWarnings("unchecked")
- List<Object> attachmentList = (List<Object>) JSONUtils.parseJSONFromString(attachments);
-
- return attachmentList;
- }
- catch (IOException e) {
- throw new ExecutorManagerException(
- "Error converting job attachments to JSON " + jobId, e);
- }
- catch (SQLException e) {
- throw new ExecutorManagerException(
- "Error query job attachments " + jobId, e);
- }
- }
-
- @Override
- public void uploadLogFile(
- int execId, String name, int attempt, File ... files)
- throws ExecutorManagerException {
- Connection connection = getConnection();
- try {
- uploadLogFile(
- connection, execId, name, attempt, files, defaultEncodingType);
- connection.commit();
- }
- catch (SQLException e) {
- throw new ExecutorManagerException("Error committing log", e);
- }
- catch (IOException e) {
- throw new ExecutorManagerException("Error committing log", e);
- }
- finally {
- DbUtils.closeQuietly(connection);
- }
- }
-
- private void uploadLogFile(
- Connection connection,
- int execId,
- String name,
- int attempt,
- File[] files,
- EncodingType encType) throws ExecutorManagerException, IOException {
- // 50K buffer... if logs are greater than this, we chunk.
- // However, we better prevent large log files from being uploaded somehow
- byte[] buffer = new byte[50*1024];
- int pos = 0;
- int length = buffer.length;
- int startByte = 0;
- try {
- for (int i = 0; i < files.length; ++i) {
- File file = files[i];
-
- BufferedInputStream bufferedStream = new BufferedInputStream(new FileInputStream(file));
- try {
- int size = bufferedStream.read(buffer, pos, length);
- while (size >= 0) {
- if (pos + size == buffer.length) {
- // Flush here.
- uploadLogPart(
- connection,
- execId,
- name,
- attempt,
- startByte,
- startByte + buffer.length,
- encType,
- buffer,
- buffer.length);
-
- pos = 0;
- length = buffer.length;
- startByte += buffer.length;
- }
- else {
- // Usually end of file.
- pos += size;
- length = buffer.length - pos;
- }
- size = bufferedStream.read(buffer, pos, length);
- }
- } finally {
- IOUtils.closeQuietly(bufferedStream);
- }
- }
-
- // Final commit of buffer.
- if (pos > 0) {
- uploadLogPart(
- connection,
- execId,
- name,
- attempt,
- startByte,
- startByte + pos,
- encType,
- buffer,
- pos);
- }
- }
- catch (SQLException e) {
- throw new ExecutorManagerException("Error writing log part.", e);
- }
- catch (IOException e) {
- throw new ExecutorManagerException("Error chunking", e);
- }
- }
-
- private void uploadLogPart(
- Connection connection,
- int execId,
- String name,
- int attempt,
- int startByte,
- int endByte,
- EncodingType encType,
- byte[] buffer,
- int length) throws SQLException, IOException {
- final String INSERT_EXECUTION_LOGS =
- "INSERT INTO execution_logs " +
- "(exec_id, name, attempt, enc_type, start_byte, end_byte, " +
- "log, upload_time) VALUES (?,?,?,?,?,?,?,?)";
-
- QueryRunner runner = new QueryRunner();
- byte[] buf = buffer;
- if (encType == EncodingType.GZIP) {
- buf = GZIPUtils.gzipBytes(buf, 0, length);
- }
- else if (length < buf.length) {
- buf = Arrays.copyOf(buffer, length);
- }
-
- runner.update(
- connection,
- INSERT_EXECUTION_LOGS,
- execId,
- name,
- attempt,
- encType.getNumVal(),
- startByte,
- startByte + length,
- buf,
- DateTime.now().getMillis());
- }
-
- @Override
- public void uploadAttachmentFile(ExecutableNode node, File file)
- throws ExecutorManagerException {
- Connection connection = getConnection();
- try {
- uploadAttachmentFile(connection, node, file, defaultEncodingType);
- connection.commit();
- }
- catch (SQLException e) {
- throw new ExecutorManagerException("Error committing attachments ", e);
- }
- catch (IOException e) {
- throw new ExecutorManagerException("Error uploading attachments ", e);
- }
- finally {
- DbUtils.closeQuietly(connection);
- }
- }
-
- private void uploadAttachmentFile(
- Connection connection,
- ExecutableNode node,
- File file,
- EncodingType encType) throws SQLException, IOException {
-
- String jsonString = FileUtils.readFileToString(file);
- byte[] attachments = GZIPUtils.gzipString(jsonString, "UTF-8");
-
- final String UPDATE_EXECUTION_NODE_ATTACHMENTS =
- "UPDATE execution_jobs " +
- "SET attachments=? " +
- "WHERE exec_id=? AND flow_id=? AND job_id=? AND attempt=?";
-
- QueryRunner runner = new QueryRunner();
- runner.update(
- connection,
- UPDATE_EXECUTION_NODE_ATTACHMENTS,
- attachments,
- node.getExecutableFlow().getExecutionId(),
- node.getParentFlow().getNestedId(),
- node.getId(),
- node.getAttempt());
- }
-
- private Connection getConnection() throws ExecutorManagerException {
- Connection connection = null;
- try {
- connection = super.getDBConnection(false);
- }
- catch (Exception e) {
- DbUtils.closeQuietly(connection);
- throw new ExecutorManagerException("Error getting DB connection.", e);
- }
- return connection;
- }
-
- private static class LastInsertID implements ResultSetHandler<Long> {
- private static String LAST_INSERT_ID = "SELECT LAST_INSERT_ID()";
- @Override
- public Long handle(ResultSet rs) throws SQLException {
- if (!rs.next()) {
- return -1l;
- }
- long id = rs.getLong(1);
- return id;
- }
- }
-
- private static class FetchLogsHandler implements ResultSetHandler<LogData> {
- private static String FETCH_LOGS =
- "SELECT exec_id, name, attempt, enc_type, start_byte, end_byte, log " +
- "FROM execution_logs " +
- "WHERE exec_id=? AND name=? AND attempt=? AND end_byte > ? " +
- "AND start_byte <= ? ORDER BY start_byte";
-
- private int startByte;
- private int endByte;
-
- public FetchLogsHandler(int startByte, int endByte) {
- this.startByte = startByte;
- this.endByte = endByte;
- }
-
- @Override
- public LogData handle(ResultSet rs) throws SQLException {
- if (!rs.next()) {
- return null;
- }
-
- ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
-
- do {
- //int execId = rs.getInt(1);
- //String name = rs.getString(2);
- @SuppressWarnings("unused")
- int attempt = rs.getInt(3);
- EncodingType encType = EncodingType.fromInteger(rs.getInt(4));
- int startByte = rs.getInt(5);
- int endByte = rs.getInt(6);
-
- byte[] data = rs.getBytes(7);
-
- int offset = this.startByte > startByte
- ? this.startByte - startByte
- : 0;
- int length = this.endByte < endByte
- ? this.endByte - startByte - offset
- : endByte - startByte - offset;
- try {
- byte[] buffer = data;
- if (encType == EncodingType.GZIP) {
- buffer = GZIPUtils.unGzipBytes(data);
- }
-
- byteStream.write(buffer, offset, length);
- }
- catch (IOException e) {
- throw new SQLException(e);
- }
- } while (rs.next());
-
- byte[] buffer = byteStream.toByteArray();
- Pair<Integer,Integer> result = FileIOUtils.getUtf8Range(
- buffer, 0, buffer.length);
-
- return new LogData(
- startByte + result.getFirst(),
- result.getSecond(),
- new String(buffer, result.getFirst(), result.getSecond()));
- }
- }
-
- private static class FetchExecutableJobHandler
- implements ResultSetHandler<List<ExecutableJobInfo>> {
- private static String FETCH_EXECUTABLE_NODE =
- "SELECT exec_id, project_id, version, flow_id, job_id, " +
- "start_time, end_time, status, attempt " +
- "FROM execution_jobs WHERE exec_id=? " +
- "AND job_id=? AND attempt_id=?";
- private static String FETCH_EXECUTABLE_NODE_ATTEMPTS =
- "SELECT exec_id, project_id, version, flow_id, job_id, " +
- "start_time, end_time, status, attempt FROM execution_jobs " +
- "WHERE exec_id=? AND job_id=?";
- private static String FETCH_PROJECT_EXECUTABLE_NODE =
- "SELECT exec_id, project_id, version, flow_id, job_id, " +
- "start_time, end_time, status, attempt FROM execution_jobs " +
- "WHERE project_id=? AND job_id=? " +
- "ORDER BY exec_id DESC LIMIT ?, ? ";
-
- @Override
- public List<ExecutableJobInfo> handle(ResultSet rs) throws SQLException {
- if (!rs.next()) {
- return Collections.<ExecutableJobInfo>emptyList();
- }
-
- List<ExecutableJobInfo> execNodes = new ArrayList<ExecutableJobInfo>();
- do {
- int execId = rs.getInt(1);
- int projectId = rs.getInt(2);
- int version = rs.getInt(3);
- String flowId = rs.getString(4);
- String jobId = rs.getString(5);
- long startTime = rs.getLong(6);
- long endTime = rs.getLong(7);
- Status status = Status.fromInteger(rs.getInt(8));
- int attempt = rs.getInt(9);
-
- ExecutableJobInfo info = new ExecutableJobInfo(
- execId,
- projectId,
- version,
- flowId,
- jobId,
- startTime,
- endTime,
- status,
- attempt);
- execNodes.add(info);
- } while (rs.next());
-
- return execNodes;
- }
- }
-
- private static class FetchExecutableJobAttachmentsHandler
- implements ResultSetHandler<String> {
- private static String FETCH_ATTACHMENTS_EXECUTABLE_NODE =
- "SELECT attachments FROM execution_jobs WHERE exec_id=? AND job_id=?";
-
- @Override
- public String handle(ResultSet rs) throws SQLException {
- String attachmentsJson = null;
- if (rs.next()) {
- try {
- byte[] attachments = rs.getBytes(1);
- if (attachments != null) {
- attachmentsJson = GZIPUtils.unGzipString(attachments, "UTF-8");
- }
- }
- catch (IOException e) {
- throw new SQLException("Error decoding job attachments", e);
- }
- }
- return attachmentsJson;
- }
- }
-
- private static class FetchExecutableJobPropsHandler
- implements ResultSetHandler<Pair<Props, Props>> {
- private static String FETCH_OUTPUT_PARAM_EXECUTABLE_NODE =
- "SELECT output_params FROM execution_jobs WHERE exec_id=? AND job_id=?";
- private static String FETCH_INPUT_PARAM_EXECUTABLE_NODE =
- "SELECT input_params FROM execution_jobs WHERE exec_id=? AND job_id=?";
- private static String FETCH_INPUT_OUTPUT_PARAM_EXECUTABLE_NODE =
- "SELECT input_params, output_params " +
- "FROM execution_jobs WHERE exec_id=? AND job_id=?";
-
- @SuppressWarnings("unchecked")
- @Override
- public Pair<Props, Props> handle(ResultSet rs) throws SQLException {
- if (!rs.next()) {
- return new Pair<Props, Props>(null, null);
- }
-
- if (rs.getMetaData().getColumnCount() > 1) {
- byte[] input = rs.getBytes(1);
- byte[] output = rs.getBytes(2);
-
- Props inputProps = null;
- Props outputProps = null;
- try {
- if (input != null) {
- String jsonInputString = GZIPUtils.unGzipString(input, "UTF-8");
- inputProps = PropsUtils.fromHierarchicalMap(
- (Map<String, Object>)JSONUtils.parseJSONFromString(jsonInputString));
-
- }
- if (output != null) {
- String jsonOutputString = GZIPUtils.unGzipString(output, "UTF-8");
- outputProps = PropsUtils.fromHierarchicalMap(
- (Map<String, Object>) JSONUtils.parseJSONFromString(jsonOutputString));
- }
- }
- catch (IOException e) {
- throw new SQLException("Error decoding param data", e);
- }
-
- return new Pair<Props, Props>(inputProps, outputProps);
- }
- else {
- byte[] params = rs.getBytes(1);
- Props props = null;
- try {
- if (params != null) {
- String jsonProps = GZIPUtils.unGzipString(params, "UTF-8");
-
- props = PropsUtils.fromHierarchicalMap(
- (Map<String, Object>)JSONUtils.parseJSONFromString(jsonProps));
- }
- }
- catch (IOException e) {
- throw new SQLException("Error decoding param data", e);
- }
-
- return new Pair<Props,Props>(props, null);
- }
- }
- }
-
- private static class FetchActiveExecutableFlows
- implements ResultSetHandler<Map<Integer, Pair<ExecutionReference,ExecutableFlow>>> {
- private static String FETCH_ACTIVE_EXECUTABLE_FLOW =
- "SELECT ex.exec_id exec_id, ex.enc_type enc_type, ex.flow_data " +
- "flow_data, ax.host host, ax.port port, ax.update_time " +
- "axUpdateTime " +
- "FROM execution_flows ex " +
- "INNER JOIN active_executing_flows ax ON ex.exec_id = ax.exec_id";
-
- @Override
- public Map<Integer, Pair<ExecutionReference,ExecutableFlow>> handle(ResultSet rs)
- throws SQLException {
- if (!rs.next()) {
- return Collections.<Integer, Pair<ExecutionReference,ExecutableFlow>>emptyMap();
- }
-
- Map<Integer, Pair<ExecutionReference,ExecutableFlow>> execFlows =
- new HashMap<Integer, Pair<ExecutionReference,ExecutableFlow>>();
- do {
- int id = rs.getInt(1);
- int encodingType = rs.getInt(2);
- byte[] data = rs.getBytes(3);
- String host = rs.getString(4);
- int port = rs.getInt(5);
- long updateTime = rs.getLong(6);
-
- if (data == null) {
- execFlows.put(id, null);
- }
- else {
- EncodingType encType = EncodingType.fromInteger(encodingType);
- Object flowObj;
- try {
- // Convoluted way to inflate strings. Should find common package or
- // helper function.
- if (encType == EncodingType.GZIP) {
- // Decompress the sucker.
- String jsonString = GZIPUtils.unGzipString(data, "UTF-8");
- flowObj = JSONUtils.parseJSONFromString(jsonString);
- }
- else {
- String jsonString = new String(data, "UTF-8");
- flowObj = JSONUtils.parseJSONFromString(jsonString);
- }
-
- ExecutableFlow exFlow =
- ExecutableFlow.createExecutableFlowFromObject(flowObj);
- ExecutionReference ref = new ExecutionReference(id, host, port);
- ref.setUpdateTime(updateTime);
-
- execFlows.put(
- id, new Pair<ExecutionReference, ExecutableFlow>(ref, exFlow));
- }
- catch (IOException e) {
- throw new SQLException("Error retrieving flow data " + id, e);
- }
- }
- } while (rs.next());
-
- return execFlows;
- }
- }
-
- private static class FetchExecutableFlows
- implements ResultSetHandler<List<ExecutableFlow>> {
- private static String FETCH_BASE_EXECUTABLE_FLOW_QUERY =
- "SELECT exec_id, enc_type, flow_data FROM execution_flows ";
- private static String FETCH_EXECUTABLE_FLOW =
- "SELECT exec_id, enc_type, flow_data FROM execution_flows " +
- "WHERE exec_id=?";
- //private static String FETCH_ACTIVE_EXECUTABLE_FLOW =
- // "SELECT ex.exec_id exec_id, ex.enc_type enc_type, ex.flow_data flow_data " +
- // "FROM execution_flows ex " +
- // "INNER JOIN active_executing_flows ax ON ex.exec_id = ax.exec_id";
- private static String FETCH_ALL_EXECUTABLE_FLOW_HISTORY =
- "SELECT exec_id, enc_type, flow_data FROM execution_flows " +
- "ORDER BY exec_id DESC LIMIT ?, ?";
- private static String FETCH_EXECUTABLE_FLOW_HISTORY =
- "SELECT exec_id, enc_type, flow_data FROM execution_flows " +
- "WHERE project_id=? AND flow_id=? " +
- "ORDER BY exec_id DESC LIMIT ?, ?";
- private static String FETCH_EXECUTABLE_FLOW_BY_STATUS =
- "SELECT exec_id, enc_type, flow_data FROM execution_flows " +
- "WHERE project_id=? AND flow_id=? AND status=? " +
- "ORDER BY exec_id DESC LIMIT ?, ?";
-
- @Override
- public List<ExecutableFlow> handle(ResultSet rs) throws SQLException {
- if (!rs.next()) {
- return Collections.<ExecutableFlow>emptyList();
- }
-
- List<ExecutableFlow> execFlows = new ArrayList<ExecutableFlow>();
- do {
- int id = rs.getInt(1);
- int encodingType = rs.getInt(2);
- byte[] data = rs.getBytes(3);
-
- if (data != null) {
- EncodingType encType = EncodingType.fromInteger(encodingType);
- Object flowObj;
- try {
- // Convoluted way to inflate strings. Should find common package
- // or helper function.
- if (encType == EncodingType.GZIP) {
- // Decompress the sucker.
- String jsonString = GZIPUtils.unGzipString(data, "UTF-8");
- flowObj = JSONUtils.parseJSONFromString(jsonString);
- }
- else {
- String jsonString = new String(data, "UTF-8");
- flowObj = JSONUtils.parseJSONFromString(jsonString);
- }
-
- ExecutableFlow exFlow =
- ExecutableFlow.createExecutableFlowFromObject(flowObj);
- execFlows.add(exFlow);
- }
- catch (IOException e) {
- throw new SQLException("Error retrieving flow data " + id, e);
- }
- }
- } while (rs.next());
-
- return execFlows;
- }
- }
-
- private static class IntHandler implements ResultSetHandler<Integer> {
- private static String NUM_EXECUTIONS =
- "SELECT COUNT(1) FROM execution_flows";
- private static String NUM_FLOW_EXECUTIONS =
- "SELECT COUNT(1) FROM execution_flows WHERE project_id=? AND flow_id=?";
- private static String NUM_JOB_EXECUTIONS =
- "SELECT COUNT(1) FROM execution_jobs WHERE project_id=? AND job_id=?";
-
- @Override
- public Integer handle(ResultSet rs) throws SQLException {
- if (!rs.next()) {
- return 0;
- }
- return rs.getInt(1);
- }
- }
-
- @Override
- public int removeExecutionLogsByTime(long millis)
- throws ExecutorManagerException {
- final String DELETE_BY_TIME =
- "DELETE FROM execution_logs WHERE upload_time < ?";
-
- QueryRunner runner = createQueryRunner();
- int updateNum = 0;
- try {
- updateNum = runner.update(DELETE_BY_TIME, millis);
- }
- catch (SQLException e) {
- e.printStackTrace();
- throw new ExecutorManagerException(
- "Error deleting old execution_logs before " + millis, e);
- }
-
- return updateNum;
- }
+public class JdbcExecutorLoader extends AbstractJdbcLoader implements
+ ExecutorLoader {
+ private static final Logger logger = Logger
+ .getLogger(JdbcExecutorLoader.class);
+
+ private EncodingType defaultEncodingType = EncodingType.GZIP;
+
+ public JdbcExecutorLoader(Props props) {
+ super(props);
+ }
+
+ public EncodingType getDefaultEncodingType() {
+ return defaultEncodingType;
+ }
+
+ public void setDefaultEncodingType(EncodingType defaultEncodingType) {
+ this.defaultEncodingType = defaultEncodingType;
+ }
+
+ @Override
+ public synchronized void uploadExecutableFlow(ExecutableFlow flow)
+ throws ExecutorManagerException {
+ Connection connection = getConnection();
+ try {
+ uploadExecutableFlow(connection, flow, defaultEncodingType);
+ } catch (IOException e) {
+ throw new ExecutorManagerException("Error uploading flow", e);
+ } finally {
+ DbUtils.closeQuietly(connection);
+ }
+ }
+
+ private synchronized void uploadExecutableFlow(Connection connection,
+ ExecutableFlow flow, EncodingType encType)
+ throws ExecutorManagerException, IOException {
+ final String INSERT_EXECUTABLE_FLOW =
+ "INSERT INTO execution_flows "
+ + "(project_id, flow_id, version, status, submit_time, submit_user, update_time) "
+ + "values (?,?,?,?,?,?,?)";
+ QueryRunner runner = new QueryRunner();
+ long submitTime = System.currentTimeMillis();
+
+ long id;
+ try {
+ flow.setStatus(Status.PREPARING);
+ runner.update(connection, INSERT_EXECUTABLE_FLOW, flow.getProjectId(),
+ flow.getFlowId(), flow.getVersion(), Status.PREPARING.getNumVal(),
+ submitTime, flow.getSubmitUser(), submitTime);
+ connection.commit();
+ id =
+ runner.query(connection, LastInsertID.LAST_INSERT_ID,
+ new LastInsertID());
+
+ if (id == -1l) {
+ throw new ExecutorManagerException(
+ "Execution id is not properly created.");
+ }
+ logger.info("Flow given " + flow.getFlowId() + " given id " + id);
+ flow.setExecutionId((int) id);
+
+ updateExecutableFlow(connection, flow, encType);
+ } catch (SQLException e) {
+ throw new ExecutorManagerException("Error creating execution.", e);
+ }
+ }
+
+ @Override
+ public void updateExecutableFlow(ExecutableFlow flow)
+ throws ExecutorManagerException {
+ Connection connection = this.getConnection();
+
+ try {
+ updateExecutableFlow(connection, flow, defaultEncodingType);
+ } finally {
+ DbUtils.closeQuietly(connection);
+ }
+ }
+
+ private void updateExecutableFlow(Connection connection, ExecutableFlow flow,
+ EncodingType encType) throws ExecutorManagerException {
+ final String UPDATE_EXECUTABLE_FLOW_DATA =
+ "UPDATE execution_flows "
+ + "SET status=?,update_time=?,start_time=?,end_time=?,enc_type=?,flow_data=? "
+ + "WHERE exec_id=?";
+ QueryRunner runner = new QueryRunner();
+
+ String json = JSONUtils.toJSON(flow.toObject());
+ byte[] data = null;
+ try {
+ byte[] stringData = json.getBytes("UTF-8");
+ data = stringData;
+
+ if (encType == EncodingType.GZIP) {
+ data = GZIPUtils.gzipBytes(stringData);
+ }
+ } catch (IOException e) {
+ throw new ExecutorManagerException("Error encoding the execution flow.");
+ }
+
+ try {
+ runner.update(connection, UPDATE_EXECUTABLE_FLOW_DATA, flow.getStatus()
+ .getNumVal(), flow.getUpdateTime(), flow.getStartTime(), flow
+ .getEndTime(), encType.getNumVal(), data, flow.getExecutionId());
+ connection.commit();
+ } catch (SQLException e) {
+ throw new ExecutorManagerException("Error updating flow.", e);
+ }
+ }
+
+ @Override
+ public ExecutableFlow fetchExecutableFlow(int id)
+ throws ExecutorManagerException {
+ QueryRunner runner = createQueryRunner();
+ FetchExecutableFlows flowHandler = new FetchExecutableFlows();
+
+ try {
+ List<ExecutableFlow> properties =
+ runner.query(FetchExecutableFlows.FETCH_EXECUTABLE_FLOW, flowHandler,
+ id);
+ return properties.get(0);
+ } catch (SQLException e) {
+ throw new ExecutorManagerException("Error fetching flow id " + id, e);
+ }
+ }
+
+ @Override
+ public Map<Integer, Pair<ExecutionReference, ExecutableFlow>> fetchActiveFlows()
+ throws ExecutorManagerException {
+ QueryRunner runner = createQueryRunner();
+ FetchActiveExecutableFlows flowHandler = new FetchActiveExecutableFlows();
+
+ try {
+ Map<Integer, Pair<ExecutionReference, ExecutableFlow>> properties =
+ runner.query(FetchActiveExecutableFlows.FETCH_ACTIVE_EXECUTABLE_FLOW,
+ flowHandler);
+ return properties;
+ } catch (SQLException e) {
+ throw new ExecutorManagerException("Error fetching active flows", e);
+ }
+ }
+
+ @Override
+ public int fetchNumExecutableFlows() throws ExecutorManagerException {
+ QueryRunner runner = createQueryRunner();
+
+ IntHandler intHandler = new IntHandler();
+ try {
+ int count = runner.query(IntHandler.NUM_EXECUTIONS, intHandler);
+ return count;
+ } catch (SQLException e) {
+ throw new ExecutorManagerException("Error fetching num executions", e);
+ }
+ }
+
+ @Override
+ public int fetchNumExecutableFlows(int projectId, String flowId)
+ throws ExecutorManagerException {
+ QueryRunner runner = createQueryRunner();
+
+ IntHandler intHandler = new IntHandler();
+ try {
+ int count =
+ runner.query(IntHandler.NUM_FLOW_EXECUTIONS, intHandler, projectId,
+ flowId);
+ return count;
+ } catch (SQLException e) {
+ throw new ExecutorManagerException("Error fetching num executions", e);
+ }
+ }
+
+ @Override
+ public int fetchNumExecutableNodes(int projectId, String jobId)
+ throws ExecutorManagerException {
+ QueryRunner runner = createQueryRunner();
+
+ IntHandler intHandler = new IntHandler();
+ try {
+ int count =
+ runner.query(IntHandler.NUM_JOB_EXECUTIONS, intHandler, projectId,
+ jobId);
+ return count;
+ } catch (SQLException e) {
+ throw new ExecutorManagerException("Error fetching num executions", e);
+ }
+ }
+
+ @Override
+ public List<ExecutableFlow> fetchFlowHistory(int projectId, String flowId,
+ int skip, int num) throws ExecutorManagerException {
+ QueryRunner runner = createQueryRunner();
+ FetchExecutableFlows flowHandler = new FetchExecutableFlows();
+
+ try {
+ List<ExecutableFlow> properties =
+ runner.query(FetchExecutableFlows.FETCH_EXECUTABLE_FLOW_HISTORY,
+ flowHandler, projectId, flowId, skip, num);
+ return properties;
+ } catch (SQLException e) {
+ throw new ExecutorManagerException("Error fetching active flows", e);
+ }
+ }
+
+ @Override
+ public List<ExecutableFlow> fetchFlowHistory(int projectId, String flowId,
+ int skip, int num, Status status) throws ExecutorManagerException {
+ QueryRunner runner = createQueryRunner();
+ FetchExecutableFlows flowHandler = new FetchExecutableFlows();
+
+ try {
+ List<ExecutableFlow> properties =
+ runner.query(FetchExecutableFlows.FETCH_EXECUTABLE_FLOW_BY_STATUS,
+ flowHandler, projectId, flowId, status.getNumVal(), skip, num);
+ return properties;
+ } catch (SQLException e) {
+ throw new ExecutorManagerException("Error fetching active flows", e);
+ }
+ }
+
+ @Override
+ public List<ExecutableFlow> fetchFlowHistory(int skip, int num)
+ throws ExecutorManagerException {
+ QueryRunner runner = createQueryRunner();
+
+ FetchExecutableFlows flowHandler = new FetchExecutableFlows();
+
+ try {
+ List<ExecutableFlow> properties =
+ runner.query(FetchExecutableFlows.FETCH_ALL_EXECUTABLE_FLOW_HISTORY,
+ flowHandler, skip, num);
+ return properties;
+ } catch (SQLException e) {
+ throw new ExecutorManagerException("Error fetching active flows", e);
+ }
+ }
+
+ @Override
+ public List<ExecutableFlow> fetchFlowHistory(String projContain,
+ String flowContains, String userNameContains, int status, long startTime,
+ long endTime, int skip, int num) throws ExecutorManagerException {
+ String query = FetchExecutableFlows.FETCH_BASE_EXECUTABLE_FLOW_QUERY;
+ ArrayList<Object> params = new ArrayList<Object>();
+
+ boolean first = true;
+ if (projContain != null && !projContain.isEmpty()) {
+ query += " ef JOIN projects p ON ef.project_id = p.id WHERE name LIKE ?";
+ params.add('%' + projContain + '%');
+ first = false;
+ }
+
+ if (flowContains != null && !flowContains.isEmpty()) {
+ if (first) {
+ query += " WHERE ";
+ first = false;
+ } else {
+ query += " AND ";
+ }
+
+ query += " flow_id LIKE ?";
+ params.add('%' + flowContains + '%');
+ }
+
+ if (userNameContains != null && !userNameContains.isEmpty()) {
+ if (first) {
+ query += " WHERE ";
+ first = false;
+ } else {
+ query += " AND ";
+ }
+ query += " submit_user LIKE ?";
+ params.add('%' + userNameContains + '%');
+ }
+
+ if (status != 0) {
+ if (first) {
+ query += " WHERE ";
+ first = false;
+ } else {
+ query += " AND ";
+ }
+ query += " status = ?";
+ params.add(status);
+ }
+
+ if (startTime > 0) {
+ if (first) {
+ query += " WHERE ";
+ first = false;
+ } else {
+ query += " AND ";
+ }
+ query += " start_time > ?";
+ params.add(startTime);
+ }
+
+ if (endTime > 0) {
+ if (first) {
+ query += " WHERE ";
+ first = false;
+ } else {
+ query += " AND ";
+ }
+ query += " end_time < ?";
+ params.add(endTime);
+ }
+
+ if (skip > -1 && num > 0) {
+ query += " ORDER BY exec_id DESC LIMIT ?, ?";
+ params.add(skip);
+ params.add(num);
+ }
+
+ QueryRunner runner = createQueryRunner();
+ FetchExecutableFlows flowHandler = new FetchExecutableFlows();
+
+ try {
+ List<ExecutableFlow> properties =
+ runner.query(query, flowHandler, params.toArray());
+ return properties;
+ } catch (SQLException e) {
+ throw new ExecutorManagerException("Error fetching active flows", e);
+ }
+ }
+
+ @Override
+ public void addActiveExecutableReference(ExecutionReference reference)
+ throws ExecutorManagerException {
+ final String INSERT =
+ "INSERT INTO active_executing_flows "
+ + "(exec_id, host, port, update_time) values (?,?,?,?)";
+ QueryRunner runner = createQueryRunner();
+
+ try {
+ runner.update(INSERT, reference.getExecId(), reference.getHost(),
+ reference.getPort(), reference.getUpdateTime());
+ } catch (SQLException e) {
+ throw new ExecutorManagerException(
+ "Error updating active flow reference " + reference.getExecId(), e);
+ }
+ }
+
+ @Override
+ public void removeActiveExecutableReference(int execid)
+ throws ExecutorManagerException {
+ final String DELETE = "DELETE FROM active_executing_flows WHERE exec_id=?";
+
+ QueryRunner runner = createQueryRunner();
+ try {
+ runner.update(DELETE, execid);
+ } catch (SQLException e) {
+ throw new ExecutorManagerException(
+ "Error deleting active flow reference " + execid, e);
+ }
+ }
+
+ @Override
+ public boolean updateExecutableReference(int execId, long updateTime)
+ throws ExecutorManagerException {
+ final String DELETE =
+ "UPDATE active_executing_flows set update_time=? WHERE exec_id=?";
+
+ QueryRunner runner = createQueryRunner();
+ int updateNum = 0;
+ try {
+ updateNum = runner.update(DELETE, updateTime, execId);
+ } catch (SQLException e) {
+ throw new ExecutorManagerException(
+ "Error deleting active flow reference " + execId, e);
+ }
+
+ // Should be 1.
+ return updateNum > 0;
+ }
+
+ @Override
+ public void uploadExecutableNode(ExecutableNode node, Props inputProps)
+ throws ExecutorManagerException {
+ final String INSERT_EXECUTION_NODE =
+ "INSERT INTO execution_jobs "
+ + "(exec_id, project_id, version, flow_id, job_id, start_time, "
+ + "end_time, status, input_params, attempt) VALUES (?,?,?,?,?,?,?,?,?,?)";
+
+ byte[] inputParam = null;
+ if (inputProps != null) {
+ try {
+ String jsonString =
+ JSONUtils.toJSON(PropsUtils.toHierarchicalMap(inputProps));
+ inputParam = GZIPUtils.gzipString(jsonString, "UTF-8");
+ } catch (IOException e) {
+ throw new ExecutorManagerException("Error encoding input params");
+ }
+ }
+
+ ExecutableFlow flow = node.getExecutableFlow();
+ String flowId = node.getParentFlow().getFlowPath();
+ System.out.println("Uploading flowId " + flowId);
+ QueryRunner runner = createQueryRunner();
+ try {
+ runner.update(INSERT_EXECUTION_NODE, flow.getExecutionId(),
+ flow.getProjectId(), flow.getVersion(), flowId, node.getId(),
+ node.getStartTime(), node.getEndTime(), node.getStatus().getNumVal(),
+ inputParam, node.getAttempt());
+ } catch (SQLException e) {
+ throw new ExecutorManagerException("Error writing job " + node.getId(), e);
+ }
+ }
+
+ @Override
+ public void updateExecutableNode(ExecutableNode node)
+ throws ExecutorManagerException {
+ final String UPSERT_EXECUTION_NODE =
+ "UPDATE execution_jobs "
+ + "SET start_time=?, end_time=?, status=?, output_params=? "
+ + "WHERE exec_id=? AND flow_id=? AND job_id=? AND attempt=?";
+
+ byte[] outputParam = null;
+ Props outputProps = node.getOutputProps();
+ if (outputProps != null) {
+ try {
+ String jsonString =
+ JSONUtils.toJSON(PropsUtils.toHierarchicalMap(outputProps));
+ outputParam = GZIPUtils.gzipString(jsonString, "UTF-8");
+ } catch (IOException e) {
+ throw new ExecutorManagerException("Error encoding input params");
+ }
+ }
+
+ QueryRunner runner = createQueryRunner();
+ try {
+ runner.update(UPSERT_EXECUTION_NODE, node.getStartTime(), node
+ .getEndTime(), node.getStatus().getNumVal(), outputParam, node
+ .getExecutableFlow().getExecutionId(), node.getParentFlow()
+ .getFlowPath(), node.getId(), node.getAttempt());
+ } catch (SQLException e) {
+ throw new ExecutorManagerException("Error updating job " + node.getId(),
+ e);
+ }
+ }
+
+ @Override
+ public List<ExecutableJobInfo> fetchJobInfoAttempts(int execId, String jobId)
+ throws ExecutorManagerException {
+ QueryRunner runner = createQueryRunner();
+
+ try {
+ List<ExecutableJobInfo> info =
+ runner.query(
+ FetchExecutableJobHandler.FETCH_EXECUTABLE_NODE_ATTEMPTS,
+ new FetchExecutableJobHandler(), execId, jobId);
+ if (info == null || info.isEmpty()) {
+ return null;
+ }
+ return info;
+ } catch (SQLException e) {
+ throw new ExecutorManagerException("Error querying job info " + jobId, e);
+ }
+ }
+
+ @Override
+ public ExecutableJobInfo fetchJobInfo(int execId, String jobId, int attempts)
+ throws ExecutorManagerException {
+ QueryRunner runner = createQueryRunner();
+
+ try {
+ List<ExecutableJobInfo> info =
+ runner.query(FetchExecutableJobHandler.FETCH_EXECUTABLE_NODE,
+ new FetchExecutableJobHandler(), execId, jobId, attempts);
+ if (info == null || info.isEmpty()) {
+ return null;
+ }
+ return info.get(0);
+ } catch (SQLException e) {
+ throw new ExecutorManagerException("Error querying job info " + jobId, e);
+ }
+ }
+
+ @Override
+ public Props fetchExecutionJobInputProps(int execId, String jobId)
+ throws ExecutorManagerException {
+ QueryRunner runner = createQueryRunner();
+ try {
+ Pair<Props, Props> props =
+ runner.query(
+ FetchExecutableJobPropsHandler.FETCH_INPUT_PARAM_EXECUTABLE_NODE,
+ new FetchExecutableJobPropsHandler(), execId, jobId);
+ return props.getFirst();
+ } catch (SQLException e) {
+ throw new ExecutorManagerException("Error querying job params " + execId
+ + " " + jobId, e);
+ }
+ }
+
+ @Override
+ public Props fetchExecutionJobOutputProps(int execId, String jobId)
+ throws ExecutorManagerException {
+ QueryRunner runner = createQueryRunner();
+ try {
+ Pair<Props, Props> props =
+ runner
+ .query(
+ FetchExecutableJobPropsHandler.FETCH_OUTPUT_PARAM_EXECUTABLE_NODE,
+ new FetchExecutableJobPropsHandler(), execId, jobId);
+ return props.getFirst();
+ } catch (SQLException e) {
+ throw new ExecutorManagerException("Error querying job params " + execId
+ + " " + jobId, e);
+ }
+ }
+
+ @Override
+ public Pair<Props, Props> fetchExecutionJobProps(int execId, String jobId)
+ throws ExecutorManagerException {
+ QueryRunner runner = createQueryRunner();
+ try {
+ Pair<Props, Props> props =
+ runner
+ .query(
+ FetchExecutableJobPropsHandler.FETCH_INPUT_OUTPUT_PARAM_EXECUTABLE_NODE,
+ new FetchExecutableJobPropsHandler(), execId, jobId);
+ return props;
+ } catch (SQLException e) {
+ throw new ExecutorManagerException("Error querying job params " + execId
+ + " " + jobId, e);
+ }
+ }
+
+ @Override
+ public List<ExecutableJobInfo> fetchJobHistory(int projectId, String jobId,
+ int skip, int size) throws ExecutorManagerException {
+ QueryRunner runner = createQueryRunner();
+
+ try {
+ List<ExecutableJobInfo> info =
+ runner.query(FetchExecutableJobHandler.FETCH_PROJECT_EXECUTABLE_NODE,
+ new FetchExecutableJobHandler(), projectId, jobId, skip, size);
+ if (info == null || info.isEmpty()) {
+ return null;
+ }
+ return info;
+ } catch (SQLException e) {
+ throw new ExecutorManagerException("Error querying job info " + jobId, e);
+ }
+ }
+
+ @Override
+ public LogData fetchLogs(int execId, String name, int attempt, int startByte,
+ int length) throws ExecutorManagerException {
+ QueryRunner runner = createQueryRunner();
+
+ FetchLogsHandler handler =
+ new FetchLogsHandler(startByte, length + startByte);
+ try {
+ LogData result =
+ runner.query(FetchLogsHandler.FETCH_LOGS, handler, execId, name,
+ attempt, startByte, startByte + length);
+ return result;
+ } catch (SQLException e) {
+ throw new ExecutorManagerException("Error fetching logs " + execId
+ + " : " + name, e);
+ }
+ }
+
+ @Override
+ public List<Object> fetchAttachments(int execId, String jobId, int attempt)
+ throws ExecutorManagerException {
+ QueryRunner runner = createQueryRunner();
+
+ try {
+ String attachments =
+ runner
+ .query(
+ FetchExecutableJobAttachmentsHandler.FETCH_ATTACHMENTS_EXECUTABLE_NODE,
+ new FetchExecutableJobAttachmentsHandler(), execId, jobId);
+ if (attachments == null) {
+ return null;
+ }
+
+ @SuppressWarnings("unchecked")
+ List<Object> attachmentList =
+ (List<Object>) JSONUtils.parseJSONFromString(attachments);
+
+ return attachmentList;
+ } catch (IOException e) {
+ throw new ExecutorManagerException(
+ "Error converting job attachments to JSON " + jobId, e);
+ } catch (SQLException e) {
+ throw new ExecutorManagerException(
+ "Error query job attachments " + jobId, e);
+ }
+ }
+
+ @Override
+ public void uploadLogFile(int execId, String name, int attempt, File... files)
+ throws ExecutorManagerException {
+ Connection connection = getConnection();
+ try {
+ uploadLogFile(connection, execId, name, attempt, files,
+ defaultEncodingType);
+ connection.commit();
+ } catch (SQLException e) {
+ throw new ExecutorManagerException("Error committing log", e);
+ } catch (IOException e) {
+ throw new ExecutorManagerException("Error committing log", e);
+ } finally {
+ DbUtils.closeQuietly(connection);
+ }
+ }
+
+ private void uploadLogFile(Connection connection, int execId, String name,
+ int attempt, File[] files, EncodingType encType)
+ throws ExecutorManagerException, IOException {
+ // 50K buffer... if logs are greater than this, we chunk.
+ // However, we better prevent large log files from being uploaded somehow
+ byte[] buffer = new byte[50 * 1024];
+ int pos = 0;
+ int length = buffer.length;
+ int startByte = 0;
+ try {
+ for (int i = 0; i < files.length; ++i) {
+ File file = files[i];
+
+ BufferedInputStream bufferedStream =
+ new BufferedInputStream(new FileInputStream(file));
+ try {
+ int size = bufferedStream.read(buffer, pos, length);
+ while (size >= 0) {
+ if (pos + size == buffer.length) {
+ // Flush here.
+ uploadLogPart(connection, execId, name, attempt, startByte,
+ startByte + buffer.length, encType, buffer, buffer.length);
+
+ pos = 0;
+ length = buffer.length;
+ startByte += buffer.length;
+ } else {
+ // Usually end of file.
+ pos += size;
+ length = buffer.length - pos;
+ }
+ size = bufferedStream.read(buffer, pos, length);
+ }
+ } finally {
+ IOUtils.closeQuietly(bufferedStream);
+ }
+ }
+
+ // Final commit of buffer.
+ if (pos > 0) {
+ uploadLogPart(connection, execId, name, attempt, startByte, startByte
+ + pos, encType, buffer, pos);
+ }
+ } catch (SQLException e) {
+ throw new ExecutorManagerException("Error writing log part.", e);
+ } catch (IOException e) {
+ throw new ExecutorManagerException("Error chunking", e);
+ }
+ }
+
+ private void uploadLogPart(Connection connection, int execId, String name,
+ int attempt, int startByte, int endByte, EncodingType encType,
+ byte[] buffer, int length) throws SQLException, IOException {
+ final String INSERT_EXECUTION_LOGS =
+ "INSERT INTO execution_logs "
+ + "(exec_id, name, attempt, enc_type, start_byte, end_byte, "
+ + "log, upload_time) VALUES (?,?,?,?,?,?,?,?)";
+
+ QueryRunner runner = new QueryRunner();
+ byte[] buf = buffer;
+ if (encType == EncodingType.GZIP) {
+ buf = GZIPUtils.gzipBytes(buf, 0, length);
+ } else if (length < buf.length) {
+ buf = Arrays.copyOf(buffer, length);
+ }
+
+ runner.update(connection, INSERT_EXECUTION_LOGS, execId, name, attempt,
+ encType.getNumVal(), startByte, startByte + length, buf, DateTime.now()
+ .getMillis());
+ }
+
+ @Override
+ public void uploadAttachmentFile(ExecutableNode node, File file)
+ throws ExecutorManagerException {
+ Connection connection = getConnection();
+ try {
+ uploadAttachmentFile(connection, node, file, defaultEncodingType);
+ connection.commit();
+ } catch (SQLException e) {
+ throw new ExecutorManagerException("Error committing attachments ", e);
+ } catch (IOException e) {
+ throw new ExecutorManagerException("Error uploading attachments ", e);
+ } finally {
+ DbUtils.closeQuietly(connection);
+ }
+ }
+
+ private void uploadAttachmentFile(Connection connection, ExecutableNode node,
+ File file, EncodingType encType) throws SQLException, IOException {
+
+ String jsonString = FileUtils.readFileToString(file);
+ byte[] attachments = GZIPUtils.gzipString(jsonString, "UTF-8");
+
+ final String UPDATE_EXECUTION_NODE_ATTACHMENTS =
+ "UPDATE execution_jobs " + "SET attachments=? "
+ + "WHERE exec_id=? AND flow_id=? AND job_id=? AND attempt=?";
+
+ QueryRunner runner = new QueryRunner();
+ runner.update(connection, UPDATE_EXECUTION_NODE_ATTACHMENTS, attachments,
+ node.getExecutableFlow().getExecutionId(), node.getParentFlow()
+ .getNestedId(), node.getId(), node.getAttempt());
+ }
+
+ private Connection getConnection() throws ExecutorManagerException {
+ Connection connection = null;
+ try {
+ connection = super.getDBConnection(false);
+ } catch (Exception e) {
+ DbUtils.closeQuietly(connection);
+ throw new ExecutorManagerException("Error getting DB connection.", e);
+ }
+ return connection;
+ }
+
+ private static class LastInsertID implements ResultSetHandler<Long> {
+ private static String LAST_INSERT_ID = "SELECT LAST_INSERT_ID()";
+
+ @Override
+ public Long handle(ResultSet rs) throws SQLException {
+ if (!rs.next()) {
+ return -1l;
+ }
+ long id = rs.getLong(1);
+ return id;
+ }
+ }
+
+ private static class FetchLogsHandler implements ResultSetHandler<LogData> {
+ private static String FETCH_LOGS =
+ "SELECT exec_id, name, attempt, enc_type, start_byte, end_byte, log "
+ + "FROM execution_logs "
+ + "WHERE exec_id=? AND name=? AND attempt=? AND end_byte > ? "
+ + "AND start_byte <= ? ORDER BY start_byte";
+
+ private int startByte;
+ private int endByte;
+
+ public FetchLogsHandler(int startByte, int endByte) {
+ this.startByte = startByte;
+ this.endByte = endByte;
+ }
+
+ @Override
+ public LogData handle(ResultSet rs) throws SQLException {
+ if (!rs.next()) {
+ return null;
+ }
+
+ ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
+
+ do {
+ // int execId = rs.getInt(1);
+ // String name = rs.getString(2);
+ @SuppressWarnings("unused")
+ int attempt = rs.getInt(3);
+ EncodingType encType = EncodingType.fromInteger(rs.getInt(4));
+ int startByte = rs.getInt(5);
+ int endByte = rs.getInt(6);
+
+ byte[] data = rs.getBytes(7);
+
+ int offset =
+ this.startByte > startByte ? this.startByte - startByte : 0;
+ int length =
+ this.endByte < endByte ? this.endByte - startByte - offset
+ : endByte - startByte - offset;
+ try {
+ byte[] buffer = data;
+ if (encType == EncodingType.GZIP) {
+ buffer = GZIPUtils.unGzipBytes(data);
+ }
+
+ byteStream.write(buffer, offset, length);
+ } catch (IOException e) {
+ throw new SQLException(e);
+ }
+ } while (rs.next());
+
+ byte[] buffer = byteStream.toByteArray();
+ Pair<Integer, Integer> result =
+ FileIOUtils.getUtf8Range(buffer, 0, buffer.length);
+
+ return new LogData(startByte + result.getFirst(), result.getSecond(),
+ new String(buffer, result.getFirst(), result.getSecond()));
+ }
+ }
+
+ private static class FetchExecutableJobHandler implements
+ ResultSetHandler<List<ExecutableJobInfo>> {
+ private static String FETCH_EXECUTABLE_NODE =
+ "SELECT exec_id, project_id, version, flow_id, job_id, "
+ + "start_time, end_time, status, attempt "
+ + "FROM execution_jobs WHERE exec_id=? "
+ + "AND job_id=? AND attempt_id=?";
+ private static String FETCH_EXECUTABLE_NODE_ATTEMPTS =
+ "SELECT exec_id, project_id, version, flow_id, job_id, "
+ + "start_time, end_time, status, attempt FROM execution_jobs "
+ + "WHERE exec_id=? AND job_id=?";
+ private static String FETCH_PROJECT_EXECUTABLE_NODE =
+ "SELECT exec_id, project_id, version, flow_id, job_id, "
+ + "start_time, end_time, status, attempt FROM execution_jobs "
+ + "WHERE project_id=? AND job_id=? "
+ + "ORDER BY exec_id DESC LIMIT ?, ? ";
+
+ @Override
+ public List<ExecutableJobInfo> handle(ResultSet rs) throws SQLException {
+ if (!rs.next()) {
+ return Collections.<ExecutableJobInfo> emptyList();
+ }
+
+ List<ExecutableJobInfo> execNodes = new ArrayList<ExecutableJobInfo>();
+ do {
+ int execId = rs.getInt(1);
+ int projectId = rs.getInt(2);
+ int version = rs.getInt(3);
+ String flowId = rs.getString(4);
+ String jobId = rs.getString(5);
+ long startTime = rs.getLong(6);
+ long endTime = rs.getLong(7);
+ Status status = Status.fromInteger(rs.getInt(8));
+ int attempt = rs.getInt(9);
+
+ ExecutableJobInfo info =
+ new ExecutableJobInfo(execId, projectId, version, flowId, jobId,
+ startTime, endTime, status, attempt);
+ execNodes.add(info);
+ } while (rs.next());
+
+ return execNodes;
+ }
+ }
+
+ private static class FetchExecutableJobAttachmentsHandler implements
+ ResultSetHandler<String> {
+ private static String FETCH_ATTACHMENTS_EXECUTABLE_NODE =
+ "SELECT attachments FROM execution_jobs WHERE exec_id=? AND job_id=?";
+
+ @Override
+ public String handle(ResultSet rs) throws SQLException {
+ String attachmentsJson = null;
+ if (rs.next()) {
+ try {
+ byte[] attachments = rs.getBytes(1);
+ if (attachments != null) {
+ attachmentsJson = GZIPUtils.unGzipString(attachments, "UTF-8");
+ }
+ } catch (IOException e) {
+ throw new SQLException("Error decoding job attachments", e);
+ }
+ }
+ return attachmentsJson;
+ }
+ }
+
+ private static class FetchExecutableJobPropsHandler implements
+ ResultSetHandler<Pair<Props, Props>> {
+ private static String FETCH_OUTPUT_PARAM_EXECUTABLE_NODE =
+ "SELECT output_params FROM execution_jobs WHERE exec_id=? AND job_id=?";
+ private static String FETCH_INPUT_PARAM_EXECUTABLE_NODE =
+ "SELECT input_params FROM execution_jobs WHERE exec_id=? AND job_id=?";
+ private static String FETCH_INPUT_OUTPUT_PARAM_EXECUTABLE_NODE =
+ "SELECT input_params, output_params "
+ + "FROM execution_jobs WHERE exec_id=? AND job_id=?";
+
+ @SuppressWarnings("unchecked")
+ @Override
+ public Pair<Props, Props> handle(ResultSet rs) throws SQLException {
+ if (!rs.next()) {
+ return new Pair<Props, Props>(null, null);
+ }
+
+ if (rs.getMetaData().getColumnCount() > 1) {
+ byte[] input = rs.getBytes(1);
+ byte[] output = rs.getBytes(2);
+
+ Props inputProps = null;
+ Props outputProps = null;
+ try {
+ if (input != null) {
+ String jsonInputString = GZIPUtils.unGzipString(input, "UTF-8");
+ inputProps =
+ PropsUtils.fromHierarchicalMap((Map<String, Object>) JSONUtils
+ .parseJSONFromString(jsonInputString));
+
+ }
+ if (output != null) {
+ String jsonOutputString = GZIPUtils.unGzipString(output, "UTF-8");
+ outputProps =
+ PropsUtils.fromHierarchicalMap((Map<String, Object>) JSONUtils
+ .parseJSONFromString(jsonOutputString));
+ }
+ } catch (IOException e) {
+ throw new SQLException("Error decoding param data", e);
+ }
+
+ return new Pair<Props, Props>(inputProps, outputProps);
+ } else {
+ byte[] params = rs.getBytes(1);
+ Props props = null;
+ try {
+ if (params != null) {
+ String jsonProps = GZIPUtils.unGzipString(params, "UTF-8");
+
+ props =
+ PropsUtils.fromHierarchicalMap((Map<String, Object>) JSONUtils
+ .parseJSONFromString(jsonProps));
+ }
+ } catch (IOException e) {
+ throw new SQLException("Error decoding param data", e);
+ }
+
+ return new Pair<Props, Props>(props, null);
+ }
+ }
+ }
+
+ private static class FetchActiveExecutableFlows implements
+ ResultSetHandler<Map<Integer, Pair<ExecutionReference, ExecutableFlow>>> {
+ private static String FETCH_ACTIVE_EXECUTABLE_FLOW =
+ "SELECT ex.exec_id exec_id, ex.enc_type enc_type, ex.flow_data "
+ + "flow_data, ax.host host, ax.port port, ax.update_time "
+ + "axUpdateTime " + "FROM execution_flows ex "
+ + "INNER JOIN active_executing_flows ax ON ex.exec_id = ax.exec_id";
+
+ @Override
+ public Map<Integer, Pair<ExecutionReference, ExecutableFlow>> handle(
+ ResultSet rs) throws SQLException {
+ if (!rs.next()) {
+ return Collections
+ .<Integer, Pair<ExecutionReference, ExecutableFlow>> emptyMap();
+ }
+
+ Map<Integer, Pair<ExecutionReference, ExecutableFlow>> execFlows =
+ new HashMap<Integer, Pair<ExecutionReference, ExecutableFlow>>();
+ do {
+ int id = rs.getInt(1);
+ int encodingType = rs.getInt(2);
+ byte[] data = rs.getBytes(3);
+ String host = rs.getString(4);
+ int port = rs.getInt(5);
+ long updateTime = rs.getLong(6);
+
+ if (data == null) {
+ execFlows.put(id, null);
+ } else {
+ EncodingType encType = EncodingType.fromInteger(encodingType);
+ Object flowObj;
+ try {
+ // Convoluted way to inflate strings. Should find common package or
+ // helper function.
+ if (encType == EncodingType.GZIP) {
+ // Decompress the sucker.
+ String jsonString = GZIPUtils.unGzipString(data, "UTF-8");
+ flowObj = JSONUtils.parseJSONFromString(jsonString);
+ } else {
+ String jsonString = new String(data, "UTF-8");
+ flowObj = JSONUtils.parseJSONFromString(jsonString);
+ }
+
+ ExecutableFlow exFlow =
+ ExecutableFlow.createExecutableFlowFromObject(flowObj);
+ ExecutionReference ref = new ExecutionReference(id, host, port);
+ ref.setUpdateTime(updateTime);
+
+ execFlows.put(id, new Pair<ExecutionReference, ExecutableFlow>(ref,
+ exFlow));
+ } catch (IOException e) {
+ throw new SQLException("Error retrieving flow data " + id, e);
+ }
+ }
+ } while (rs.next());
+
+ return execFlows;
+ }
+ }
+
+ private static class FetchExecutableFlows implements
+ ResultSetHandler<List<ExecutableFlow>> {
+ private static String FETCH_BASE_EXECUTABLE_FLOW_QUERY =
+ "SELECT exec_id, enc_type, flow_data FROM execution_flows ";
+ private static String FETCH_EXECUTABLE_FLOW =
+ "SELECT exec_id, enc_type, flow_data FROM execution_flows "
+ + "WHERE exec_id=?";
+ // private static String FETCH_ACTIVE_EXECUTABLE_FLOW =
+ // "SELECT ex.exec_id exec_id, ex.enc_type enc_type, ex.flow_data flow_data "
+ // +
+ // "FROM execution_flows ex " +
+ // "INNER JOIN active_executing_flows ax ON ex.exec_id = ax.exec_id";
+ private static String FETCH_ALL_EXECUTABLE_FLOW_HISTORY =
+ "SELECT exec_id, enc_type, flow_data FROM execution_flows "
+ + "ORDER BY exec_id DESC LIMIT ?, ?";
+ private static String FETCH_EXECUTABLE_FLOW_HISTORY =
+ "SELECT exec_id, enc_type, flow_data FROM execution_flows "
+ + "WHERE project_id=? AND flow_id=? "
+ + "ORDER BY exec_id DESC LIMIT ?, ?";
+ private static String FETCH_EXECUTABLE_FLOW_BY_STATUS =
+ "SELECT exec_id, enc_type, flow_data FROM execution_flows "
+ + "WHERE project_id=? AND flow_id=? AND status=? "
+ + "ORDER BY exec_id DESC LIMIT ?, ?";
+
+ @Override
+ public List<ExecutableFlow> handle(ResultSet rs) throws SQLException {
+ if (!rs.next()) {
+ return Collections.<ExecutableFlow> emptyList();
+ }
+
+ List<ExecutableFlow> execFlows = new ArrayList<ExecutableFlow>();
+ do {
+ int id = rs.getInt(1);
+ int encodingType = rs.getInt(2);
+ byte[] data = rs.getBytes(3);
+
+ if (data != null) {
+ EncodingType encType = EncodingType.fromInteger(encodingType);
+ Object flowObj;
+ try {
+ // Convoluted way to inflate strings. Should find common package
+ // or helper function.
+ if (encType == EncodingType.GZIP) {
+ // Decompress the sucker.
+ String jsonString = GZIPUtils.unGzipString(data, "UTF-8");
+ flowObj = JSONUtils.parseJSONFromString(jsonString);
+ } else {
+ String jsonString = new String(data, "UTF-8");
+ flowObj = JSONUtils.parseJSONFromString(jsonString);
+ }
+
+ ExecutableFlow exFlow =
+ ExecutableFlow.createExecutableFlowFromObject(flowObj);
+ execFlows.add(exFlow);
+ } catch (IOException e) {
+ throw new SQLException("Error retrieving flow data " + id, e);
+ }
+ }
+ } while (rs.next());
+
+ return execFlows;
+ }
+ }
+
+ private static class IntHandler implements ResultSetHandler<Integer> {
+ private static String NUM_EXECUTIONS =
+ "SELECT COUNT(1) FROM execution_flows";
+ private static String NUM_FLOW_EXECUTIONS =
+ "SELECT COUNT(1) FROM execution_flows WHERE project_id=? AND flow_id=?";
+ private static String NUM_JOB_EXECUTIONS =
+ "SELECT COUNT(1) FROM execution_jobs WHERE project_id=? AND job_id=?";
+
+ @Override
+ public Integer handle(ResultSet rs) throws SQLException {
+ if (!rs.next()) {
+ return 0;
+ }
+ return rs.getInt(1);
+ }
+ }
+
+ @Override
+ public int removeExecutionLogsByTime(long millis)
+ throws ExecutorManagerException {
+ final String DELETE_BY_TIME =
+ "DELETE FROM execution_logs WHERE upload_time < ?";
+
+ QueryRunner runner = createQueryRunner();
+ int updateNum = 0;
+ try {
+ updateNum = runner.update(DELETE_BY_TIME, millis);
+ } catch (SQLException e) {
+ e.printStackTrace();
+ throw new ExecutorManagerException(
+ "Error deleting old execution_logs before " + millis, e);
+ }
+
+ return updateNum;
+ }
}
src/main/java/azkaban/executor/mail/DefaultMailCreator.java 316(+178 -138)
diff --git a/src/main/java/azkaban/executor/mail/DefaultMailCreator.java b/src/main/java/azkaban/executor/mail/DefaultMailCreator.java
index 831c809..62e8eac 100644
--- a/src/main/java/azkaban/executor/mail/DefaultMailCreator.java
+++ b/src/main/java/azkaban/executor/mail/DefaultMailCreator.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -27,139 +27,179 @@ import azkaban.utils.Emailer;
import azkaban.utils.Utils;
public class DefaultMailCreator implements MailCreator {
- public static final String DEFAULT_MAIL_CREATOR = "default";
- private static HashMap<String, MailCreator> registeredCreators = new HashMap<String, MailCreator>();
- private static MailCreator defaultCreator;
-
- public static void registerCreator(String name, MailCreator creator) {
- registeredCreators.put(name, creator);
- }
-
- public static MailCreator getCreator(String name) {
- MailCreator creator = registeredCreators.get(name);
- if (creator == null) {
- creator = defaultCreator;
- }
- return creator;
- }
-
- static {
- defaultCreator = new DefaultMailCreator();
- registerCreator(DEFAULT_MAIL_CREATOR, defaultCreator);
- }
-
- @Override
- public boolean createFirstErrorMessage(ExecutableFlow flow, EmailMessage message, String azkabanName, String clientHostname, String clientPortNumber, String... vars) {
-
- ExecutionOptions option = flow.getExecutionOptions();
- List<String> emailList = option.getFailureEmails();
- int execId = flow.getExecutionId();
-
- if (emailList != null && !emailList.isEmpty()) {
- message.addAllToAddress(emailList);
- message.setMimeType("text/html");
- message.setSubject("Flow '" + flow.getFlowId() + "' has failed on " + azkabanName);
-
- message.println("<h2 style=\"color:#FF0000\"> Execution '" + flow.getExecutionId() + "' of flow '" + flow.getFlowId() + "' has encountered a failure on " + azkabanName + "</h2>");
-
- if (option.getFailureAction() == FailureAction.CANCEL_ALL) {
- message.println("This flow is set to cancel all currently running jobs.");
- }
- else if (option.getFailureAction() == FailureAction.FINISH_ALL_POSSIBLE) {
- message.println("This flow is set to complete all jobs that aren't blocked by the failure.");
- }
- else {
- message.println("This flow is set to complete all currently running jobs before stopping.");
- }
-
- message.println("<table>");
- message.println("<tr><td>Start Time</td><td>" + flow.getStartTime() + "</td></tr>");
- message.println("<tr><td>End Time</td><td>" + flow.getEndTime() + "</td></tr>");
- message.println("<tr><td>Duration</td><td>" + Utils.formatDuration(flow.getStartTime(), flow.getEndTime()) + "</td></tr>");
- message.println("</table>");
- message.println("");
- String executionUrl = "https://" + clientHostname + ":" + clientPortNumber + "/" + "executor?" + "execid=" + execId;
- message.println("<a href='\"" + executionUrl + "\">" + flow.getFlowId() + " Execution Link</a>");
-
- message.println("");
- message.println("<h3>Reason</h3>");
- List<String> failedJobs = Emailer.findFailedJobs(flow);
- message.println("<ul>");
- for (String jobId : failedJobs) {
- message.println("<li><a href=\"" + executionUrl + "&job=" + jobId + "\">Failed job '" + jobId + "' Link</a></li>");
- }
-
- message.println("</ul>");
- return true;
- }
-
- return false;
- }
-
- @Override
- public boolean createErrorEmail(ExecutableFlow flow, EmailMessage message, String azkabanName, String clientHostname, String clientPortNumber, String... vars) {
-
- ExecutionOptions option = flow.getExecutionOptions();
-
- List<String> emailList = option.getFailureEmails();
- int execId = flow.getExecutionId();
-
- if (emailList != null && !emailList.isEmpty()) {
- message.addAllToAddress(emailList);
- message.setMimeType("text/html");
- message.setSubject("Flow '" + flow.getFlowId() + "' has failed on " + azkabanName);
-
- message.println("<h2 style=\"color:#FF0000\"> Execution '" + execId + "' of flow '" + flow.getFlowId() + "' has failed on " + azkabanName + "</h2>");
- message.println("<table>");
- message.println("<tr><td>Start Time</td><td>" + flow.getStartTime() + "</td></tr>");
- message.println("<tr><td>End Time</td><td>" + flow.getEndTime() + "</td></tr>");
- message.println("<tr><td>Duration</td><td>" + Utils.formatDuration(flow.getStartTime(), flow.getEndTime()) + "</td></tr>");
- message.println("</table>");
- message.println("");
- String executionUrl = "https://" + clientHostname + ":" + clientPortNumber + "/" + "executor?" + "execid=" + execId;
- message.println("<a href='\"" + executionUrl + "\">" + flow.getFlowId() + " Execution Link</a>");
-
- message.println("");
- message.println("<h3>Reason</h3>");
- List<String> failedJobs = Emailer.findFailedJobs(flow);
- message.println("<ul>");
- for (String jobId : failedJobs) {
- message.println("<li><a href=\"" + executionUrl + "&job=" + jobId + "\">Failed job '" + jobId + "' Link</a></li>");
- }
- for (String reasons : vars) {
- message.println("<li>" + reasons + "</li>");
- }
-
- message.println("</ul>");
- return true;
- }
- return false;
- }
-
- @Override
- public boolean createSuccessEmail(ExecutableFlow flow, EmailMessage message, String azkabanName, String clientHostname, String clientPortNumber, String... vars) {
-
- ExecutionOptions option = flow.getExecutionOptions();
- List<String> emailList = option.getSuccessEmails();
-
- int execId = flow.getExecutionId();
-
- if (emailList != null && !emailList.isEmpty()) {
- message.addAllToAddress(emailList);
- message.setMimeType("text/html");
- message.setSubject("Flow '" + flow.getFlowId() + "' has succeeded on " + azkabanName);
-
- message.println("<h2> Execution '" + flow.getExecutionId() + "' of flow '" + flow.getFlowId() + "' has succeeded on " + azkabanName + "</h2>");
- message.println("<table>");
- message.println("<tr><td>Start Time</td><td>" + flow.getStartTime() + "</td></tr>");
- message.println("<tr><td>End Time</td><td>" + flow.getEndTime() + "</td></tr>");
- message.println("<tr><td>Duration</td><td>" + Utils.formatDuration(flow.getStartTime(), flow.getEndTime()) + "</td></tr>");
- message.println("</table>");
- message.println("");
- String executionUrl = "https://" + clientHostname + ":" + clientPortNumber + "/" + "executor?" + "execid=" + execId;
- message.println("<a href=\"" + executionUrl + "\">" + flow.getFlowId() + " Execution Link</a>");
- return true;
- }
- return false;
- }
+ public static final String DEFAULT_MAIL_CREATOR = "default";
+ private static HashMap<String, MailCreator> registeredCreators =
+ new HashMap<String, MailCreator>();
+ private static MailCreator defaultCreator;
+
+ public static void registerCreator(String name, MailCreator creator) {
+ registeredCreators.put(name, creator);
+ }
+
+ public static MailCreator getCreator(String name) {
+ MailCreator creator = registeredCreators.get(name);
+ if (creator == null) {
+ creator = defaultCreator;
+ }
+ return creator;
+ }
+
+ static {
+ defaultCreator = new DefaultMailCreator();
+ registerCreator(DEFAULT_MAIL_CREATOR, defaultCreator);
+ }
+
+ @Override
+ public boolean createFirstErrorMessage(ExecutableFlow flow,
+ EmailMessage message, String azkabanName, String clientHostname,
+ String clientPortNumber, String... vars) {
+
+ ExecutionOptions option = flow.getExecutionOptions();
+ List<String> emailList = option.getFailureEmails();
+ int execId = flow.getExecutionId();
+
+ if (emailList != null && !emailList.isEmpty()) {
+ message.addAllToAddress(emailList);
+ message.setMimeType("text/html");
+ message.setSubject("Flow '" + flow.getFlowId() + "' has failed on "
+ + azkabanName);
+
+ message.println("<h2 style=\"color:#FF0000\"> Execution '"
+ + flow.getExecutionId() + "' of flow '" + flow.getFlowId()
+ + "' has encountered a failure on " + azkabanName + "</h2>");
+
+ if (option.getFailureAction() == FailureAction.CANCEL_ALL) {
+ message
+ .println("This flow is set to cancel all currently running jobs.");
+ } else if (option.getFailureAction() == FailureAction.FINISH_ALL_POSSIBLE) {
+ message
+ .println("This flow is set to complete all jobs that aren't blocked by the failure.");
+ } else {
+ message
+ .println("This flow is set to complete all currently running jobs before stopping.");
+ }
+
+ message.println("<table>");
+ message.println("<tr><td>Start Time</td><td>" + flow.getStartTime()
+ + "</td></tr>");
+ message.println("<tr><td>End Time</td><td>" + flow.getEndTime()
+ + "</td></tr>");
+ message.println("<tr><td>Duration</td><td>"
+ + Utils.formatDuration(flow.getStartTime(), flow.getEndTime())
+ + "</td></tr>");
+ message.println("</table>");
+ message.println("");
+ String executionUrl =
+ "https://" + clientHostname + ":" + clientPortNumber + "/"
+ + "executor?" + "execid=" + execId;
+ message.println("<a href='\"" + executionUrl + "\">" + flow.getFlowId()
+ + " Execution Link</a>");
+
+ message.println("");
+ message.println("<h3>Reason</h3>");
+ List<String> failedJobs = Emailer.findFailedJobs(flow);
+ message.println("<ul>");
+ for (String jobId : failedJobs) {
+ message.println("<li><a href=\"" + executionUrl + "&job=" + jobId
+ + "\">Failed job '" + jobId + "' Link</a></li>");
+ }
+
+ message.println("</ul>");
+ return true;
+ }
+
+ return false;
+ }
+
+ @Override
+ public boolean createErrorEmail(ExecutableFlow flow, EmailMessage message,
+ String azkabanName, String clientHostname, String clientPortNumber,
+ String... vars) {
+
+ ExecutionOptions option = flow.getExecutionOptions();
+
+ List<String> emailList = option.getFailureEmails();
+ int execId = flow.getExecutionId();
+
+ if (emailList != null && !emailList.isEmpty()) {
+ message.addAllToAddress(emailList);
+ message.setMimeType("text/html");
+ message.setSubject("Flow '" + flow.getFlowId() + "' has failed on "
+ + azkabanName);
+
+ message.println("<h2 style=\"color:#FF0000\"> Execution '" + execId
+ + "' of flow '" + flow.getFlowId() + "' has failed on " + azkabanName
+ + "</h2>");
+ message.println("<table>");
+ message.println("<tr><td>Start Time</td><td>" + flow.getStartTime()
+ + "</td></tr>");
+ message.println("<tr><td>End Time</td><td>" + flow.getEndTime()
+ + "</td></tr>");
+ message.println("<tr><td>Duration</td><td>"
+ + Utils.formatDuration(flow.getStartTime(), flow.getEndTime())
+ + "</td></tr>");
+ message.println("</table>");
+ message.println("");
+ String executionUrl =
+ "https://" + clientHostname + ":" + clientPortNumber + "/"
+ + "executor?" + "execid=" + execId;
+ message.println("<a href='\"" + executionUrl + "\">" + flow.getFlowId()
+ + " Execution Link</a>");
+
+ message.println("");
+ message.println("<h3>Reason</h3>");
+ List<String> failedJobs = Emailer.findFailedJobs(flow);
+ message.println("<ul>");
+ for (String jobId : failedJobs) {
+ message.println("<li><a href=\"" + executionUrl + "&job=" + jobId
+ + "\">Failed job '" + jobId + "' Link</a></li>");
+ }
+ for (String reasons : vars) {
+ message.println("<li>" + reasons + "</li>");
+ }
+
+ message.println("</ul>");
+ return true;
+ }
+ return false;
+ }
+
+ @Override
+ public boolean createSuccessEmail(ExecutableFlow flow, EmailMessage message,
+ String azkabanName, String clientHostname, String clientPortNumber,
+ String... vars) {
+
+ ExecutionOptions option = flow.getExecutionOptions();
+ List<String> emailList = option.getSuccessEmails();
+
+ int execId = flow.getExecutionId();
+
+ if (emailList != null && !emailList.isEmpty()) {
+ message.addAllToAddress(emailList);
+ message.setMimeType("text/html");
+ message.setSubject("Flow '" + flow.getFlowId() + "' has succeeded on "
+ + azkabanName);
+
+ message.println("<h2> Execution '" + flow.getExecutionId()
+ + "' of flow '" + flow.getFlowId() + "' has succeeded on "
+ + azkabanName + "</h2>");
+ message.println("<table>");
+ message.println("<tr><td>Start Time</td><td>" + flow.getStartTime()
+ + "</td></tr>");
+ message.println("<tr><td>End Time</td><td>" + flow.getEndTime()
+ + "</td></tr>");
+ message.println("<tr><td>Duration</td><td>"
+ + Utils.formatDuration(flow.getStartTime(), flow.getEndTime())
+ + "</td></tr>");
+ message.println("</table>");
+ message.println("");
+ String executionUrl =
+ "https://" + clientHostname + ":" + clientPortNumber + "/"
+ + "executor?" + "execid=" + execId;
+ message.println("<a href=\"" + executionUrl + "\">" + flow.getFlowId()
+ + " Execution Link</a>");
+ return true;
+ }
+ return false;
+ }
}
diff --git a/src/main/java/azkaban/executor/mail/MailCreator.java b/src/main/java/azkaban/executor/mail/MailCreator.java
index 19bed3b..f0f16a5 100644
--- a/src/main/java/azkaban/executor/mail/MailCreator.java
+++ b/src/main/java/azkaban/executor/mail/MailCreator.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -20,7 +20,15 @@ import azkaban.executor.ExecutableFlow;
import azkaban.utils.EmailMessage;
public interface MailCreator {
- public boolean createFirstErrorMessage(ExecutableFlow flow, EmailMessage message, String azkabanName, String clientHostname, String clientPortNumber, String... vars);
- public boolean createErrorEmail(ExecutableFlow flow, EmailMessage message, String azkabanName, String clientHostname, String clientPortNumber, String... vars);
- public boolean createSuccessEmail(ExecutableFlow flow, EmailMessage message, String azkabanName, String clientHostname, String clientPortNumber, String... vars);
+ public boolean createFirstErrorMessage(ExecutableFlow flow,
+ EmailMessage message, String azkabanName, String clientHostname,
+ String clientPortNumber, String... vars);
+
+ public boolean createErrorEmail(ExecutableFlow flow, EmailMessage message,
+ String azkabanName, String clientHostname, String clientPortNumber,
+ String... vars);
+
+ public boolean createSuccessEmail(ExecutableFlow flow, EmailMessage message,
+ String azkabanName, String clientHostname, String clientPortNumber,
+ String... vars);
}
src/main/java/azkaban/executor/Status.java 166(+83 -83)
diff --git a/src/main/java/azkaban/executor/Status.java b/src/main/java/azkaban/executor/Status.java
index 7643d2f..3e26bee 100644
--- a/src/main/java/azkaban/executor/Status.java
+++ b/src/main/java/azkaban/executor/Status.java
@@ -1,12 +1,12 @@
/*
* Copyright 2014 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -17,85 +17,85 @@
package azkaban.executor;
public enum Status {
- READY(10),
- PREPARING(20),
- RUNNING(30),
- PAUSED(40),
- SUCCEEDED(50),
- KILLED(60),
- FAILED(70),
- FAILED_FINISHING(80),
- SKIPPED(90),
- DISABLED(100),
- QUEUED(110),
- FAILED_SUCCEEDED(120),
- CANCELLED(130);
-
- private int numVal;
+ READY(10),
+ PREPARING(20),
+ RUNNING(30),
+ PAUSED(40),
+ SUCCEEDED(50),
+ KILLED(60),
+ FAILED(70),
+ FAILED_FINISHING(80),
+ SKIPPED(90),
+ DISABLED(100),
+ QUEUED(110),
+ FAILED_SUCCEEDED(120),
+ CANCELLED(130);
- Status(int numVal) {
- this.numVal = numVal;
- }
+ private int numVal;
- public int getNumVal() {
- return numVal;
- }
-
- public static Status fromInteger(int x) {
- switch (x) {
- case 10:
- return READY;
- case 20:
- return PREPARING;
- case 30:
- return RUNNING;
- case 40:
- return PAUSED;
- case 50:
- return SUCCEEDED;
- case 60:
- return KILLED;
- case 70:
- return FAILED;
- case 80:
- return FAILED_FINISHING;
- case 90:
- return SKIPPED;
- case 100:
- return DISABLED;
- case 110:
- return QUEUED;
- case 120:
- return FAILED_SUCCEEDED;
- case 130:
- return CANCELLED;
- default:
- return READY;
- }
- }
-
- public static boolean isStatusFinished(Status status) {
- switch (status) {
- case FAILED:
- case KILLED:
- case SUCCEEDED:
- case SKIPPED:
- case FAILED_SUCCEEDED:
- case CANCELLED:
- return true;
- default:
- return false;
- }
- }
-
- public static boolean isStatusRunning(Status status) {
- switch (status) {
- case RUNNING:
- case FAILED_FINISHING:
- case QUEUED:
- return true;
- default:
- return false;
- }
- }
-}
\ No newline at end of file
+ Status(int numVal) {
+ this.numVal = numVal;
+ }
+
+ public int getNumVal() {
+ return numVal;
+ }
+
+ public static Status fromInteger(int x) {
+ switch (x) {
+ case 10:
+ return READY;
+ case 20:
+ return PREPARING;
+ case 30:
+ return RUNNING;
+ case 40:
+ return PAUSED;
+ case 50:
+ return SUCCEEDED;
+ case 60:
+ return KILLED;
+ case 70:
+ return FAILED;
+ case 80:
+ return FAILED_FINISHING;
+ case 90:
+ return SKIPPED;
+ case 100:
+ return DISABLED;
+ case 110:
+ return QUEUED;
+ case 120:
+ return FAILED_SUCCEEDED;
+ case 130:
+ return CANCELLED;
+ default:
+ return READY;
+ }
+ }
+
+ public static boolean isStatusFinished(Status status) {
+ switch (status) {
+ case FAILED:
+ case KILLED:
+ case SUCCEEDED:
+ case SKIPPED:
+ case FAILED_SUCCEEDED:
+ case CANCELLED:
+ return true;
+ default:
+ return false;
+ }
+ }
+
+ public static boolean isStatusRunning(Status status) {
+ switch (status) {
+ case RUNNING:
+ case FAILED_FINISHING:
+ case QUEUED:
+ return true;
+ default:
+ return false;
+ }
+ }
+}
src/main/java/azkaban/flow/CommonJobProperties.java 227(+116 -111)
diff --git a/src/main/java/azkaban/flow/CommonJobProperties.java b/src/main/java/azkaban/flow/CommonJobProperties.java
index 3873a56..5ead549 100644
--- a/src/main/java/azkaban/flow/CommonJobProperties.java
+++ b/src/main/java/azkaban/flow/CommonJobProperties.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -17,112 +17,117 @@
package azkaban.flow;
public class CommonJobProperties {
- /*
- * The following are Common properties that can be set in a job file
- */
-
- /**
- * The type of job that will be executed.
- * Examples: command, java, etc.
- */
- public static final String JOB_TYPE = "type";
-
- /**
- * Force a node to be a root node in a flow, even if there are other jobs dependent on it.
- */
- public static final String ROOT_NODE = "root.node";
-
- /**
- * Comma delimited list of job names which are dependencies
- */
- public static final String DEPENDENCIES = "dependencies";
-
- /**
- * The number of retries when this job has failed.
- */
- public static final String RETRIES = "retries";
-
- /**
- * The time in millisec to back off after every retry
- */
- public static final String RETRY_BACKOFF = "retry.backoff";
-
- /**
- * Comma delimited list of email addresses for both failure and success messages
- */
- public static final String NOTIFY_EMAILS = "notify.emails";
-
- /**
- * Comma delimited list of email addresses for success messages
- */
- public static final String SUCCESS_EMAILS = "success.emails";
-
- /**
- * Comma delimited list of email addresses for failure messages
- */
- public static final String FAILURE_EMAILS = "failure.emails";
-
- /*
- * The following are the common props that will be added to the job by azkaban
- */
-
- /**
- * The attempt number of the executing job.
- */
- public static final String JOB_ATTEMPT = "azkaban.job.attempt";
-
- /**
- * The attempt number of the executing job.
- */
- public static final String JOB_METADATA_FILE = "azkaban.job.metadata.file";
-
- /**
- * The attempt number of the executing job.
- */
- public static final String JOB_ATTACHMENT_FILE = "azkaban.job.attachment.file";
-
- /**
- * The executing flow id
- */
- public static final String FLOW_ID = "azkaban.flow.flowid";
-
- /**
- * The nested flow id path
- */
- public static final String NESTED_FLOW_PATH = "azkaban.flow.nested.path";
-
- /**
- * The execution id. This should be unique per flow, but may not be due to
- * restarts.
- */
- public static final String EXEC_ID = "azkaban.flow.execid";
-
- /**
- * The numerical project id identifier.
- */
- public static final String PROJECT_ID = "azkaban.flow.projectid";
-
- /**
- * The version of the project the flow is running. This may change if a
- * forced hotspot occurs.
- */
- public static final String PROJECT_VERSION = "azkaban.flow.projectversion";
-
- /**
- * A uuid assigned to every execution
- */
- public static final String FLOW_UUID = "azkaban.flow.uuid";
-
- /**
- * Properties for passing the flow start time to the jobs.
- */
- public static final String FLOW_START_TIMESTAMP = "azkaban.flow.start.timestamp";
- public static final String FLOW_START_YEAR = "azkaban.flow.start.year";
- public static final String FLOW_START_MONTH = "azkaban.flow.start.month";
- public static final String FLOW_START_DAY = "azkaban.flow.start.day";
- public static final String FLOW_START_HOUR = "azkaban.flow.start.hour";
- public static final String FLOW_START_MINUTE = "azkaban.flow.start.minute";
- public static final String FLOW_START_SECOND = "azkaban.flow.start.second";
- public static final String FLOW_START_MILLISSECOND = "azkaban.flow.start.milliseconds";
- public static final String FLOW_START_TIMEZONE = "azkaban.flow.start.timezone";
+ /*
+ * The following are Common properties that can be set in a job file
+ */
+
+ /**
+ * The type of job that will be executed. Examples: command, java, etc.
+ */
+ public static final String JOB_TYPE = "type";
+
+ /**
+ * Force a node to be a root node in a flow, even if there are other jobs
+ * dependent on it.
+ */
+ public static final String ROOT_NODE = "root.node";
+
+ /**
+ * Comma delimited list of job names which are dependencies
+ */
+ public static final String DEPENDENCIES = "dependencies";
+
+ /**
+ * The number of retries when this job has failed.
+ */
+ public static final String RETRIES = "retries";
+
+ /**
+ * The time in millisec to back off after every retry
+ */
+ public static final String RETRY_BACKOFF = "retry.backoff";
+
+ /**
+ * Comma delimited list of email addresses for both failure and success
+ * messages
+ */
+ public static final String NOTIFY_EMAILS = "notify.emails";
+
+ /**
+ * Comma delimited list of email addresses for success messages
+ */
+ public static final String SUCCESS_EMAILS = "success.emails";
+
+ /**
+ * Comma delimited list of email addresses for failure messages
+ */
+ public static final String FAILURE_EMAILS = "failure.emails";
+
+ /*
+ * The following are the common props that will be added to the job by azkaban
+ */
+
+ /**
+ * The attempt number of the executing job.
+ */
+ public static final String JOB_ATTEMPT = "azkaban.job.attempt";
+
+ /**
+ * The attempt number of the executing job.
+ */
+ public static final String JOB_METADATA_FILE = "azkaban.job.metadata.file";
+
+ /**
+ * The attempt number of the executing job.
+ */
+ public static final String JOB_ATTACHMENT_FILE =
+ "azkaban.job.attachment.file";
+
+ /**
+ * The executing flow id
+ */
+ public static final String FLOW_ID = "azkaban.flow.flowid";
+
+ /**
+ * The nested flow id path
+ */
+ public static final String NESTED_FLOW_PATH = "azkaban.flow.nested.path";
+
+ /**
+ * The execution id. This should be unique per flow, but may not be due to
+ * restarts.
+ */
+ public static final String EXEC_ID = "azkaban.flow.execid";
+
+ /**
+ * The numerical project id identifier.
+ */
+ public static final String PROJECT_ID = "azkaban.flow.projectid";
+
+ /**
+ * The version of the project the flow is running. This may change if a forced
+ * hotspot occurs.
+ */
+ public static final String PROJECT_VERSION = "azkaban.flow.projectversion";
+
+ /**
+ * A uuid assigned to every execution
+ */
+ public static final String FLOW_UUID = "azkaban.flow.uuid";
+
+ /**
+ * Properties for passing the flow start time to the jobs.
+ */
+ public static final String FLOW_START_TIMESTAMP =
+ "azkaban.flow.start.timestamp";
+ public static final String FLOW_START_YEAR = "azkaban.flow.start.year";
+ public static final String FLOW_START_MONTH = "azkaban.flow.start.month";
+ public static final String FLOW_START_DAY = "azkaban.flow.start.day";
+ public static final String FLOW_START_HOUR = "azkaban.flow.start.hour";
+ public static final String FLOW_START_MINUTE = "azkaban.flow.start.minute";
+ public static final String FLOW_START_SECOND = "azkaban.flow.start.second";
+ public static final String FLOW_START_MILLISSECOND =
+ "azkaban.flow.start.milliseconds";
+ public static final String FLOW_START_TIMEZONE =
+ "azkaban.flow.start.timezone";
}
src/main/java/azkaban/flow/Edge.java 271(+136 -135)
diff --git a/src/main/java/azkaban/flow/Edge.java b/src/main/java/azkaban/flow/Edge.java
index 14fc6a0..52b835f 100644
--- a/src/main/java/azkaban/flow/Edge.java
+++ b/src/main/java/azkaban/flow/Edge.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -23,137 +23,138 @@ import java.util.List;
import java.util.Map;
public class Edge {
- private final String sourceId;
- private final String targetId;
- private Node source;
- private Node target;
- private String error;
-
- // Useful in rendering.
- private String guideType;
- private List<Point2D> guideValues;
-
- public Edge(String fromId, String toId) {
- this.sourceId = fromId;
- this.targetId = toId;
- }
-
- public Edge(Edge clone) {
- this.sourceId = clone.getSourceId();
- this.targetId = clone.getTargetId();
- this.error = clone.getError();
- }
-
- public String getId() {
- return getSourceId() + ">>" + getTargetId();
- }
-
- public String getSourceId() {
- return sourceId;
- }
-
- public String getTargetId() {
- return targetId;
- }
-
- public void setError(String error) {
- this.error = error;
- }
-
- public String getError() {
- return this.error;
- }
-
- public boolean hasError() {
- return this.error != null;
- }
-
- public Node getSource() {
- return source;
- }
-
- public void setSource(Node source) {
- this.source = source;
- }
-
- public Node getTarget() {
- return target;
- }
-
- public void setTarget(Node target) {
- this.target = target;
- }
-
- public String getGuideType() {
- return guideType;
- }
-
- public List<Point2D> getGuideValues() {
- return guideValues;
- }
-
- public void setGuides(String type, List<Point2D> values) {
- this.guideType = type;
- this.guideValues = values;
- }
-
- public Object toObject() {
- HashMap<String, Object> obj = new HashMap<String, Object>();
- obj.put("source", getSourceId());
- obj.put("target", getTargetId());
- if (hasError()) {
- obj.put("error", error);
- }
- if (guideValues != null) {
- HashMap<String, Object> lineGuidesObj = new HashMap<String, Object>();
- lineGuidesObj.put("type", guideType);
-
- ArrayList<Object> guides = new ArrayList<Object>();
- for (Point2D point: this.guideValues) {
- HashMap<String, Double> pointObj = new HashMap<String, Double>();
- pointObj.put("x", point.getX());
- pointObj.put("y", point.getY());
- guides.add(pointObj);
- }
- lineGuidesObj.put("values", guides);
-
- obj.put("guides",lineGuidesObj);
- }
-
- return obj;
- }
-
- @SuppressWarnings("unchecked")
- public static Edge fromObject(Object obj) {
- HashMap<String, Object> edgeObj = (HashMap<String,Object>)obj;
-
- String source = (String)edgeObj.get("source");
- String target = (String)edgeObj.get("target");
-
- String error = (String)edgeObj.get("error");
-
- Edge edge = new Edge(source, target);
- edge.setError(error);
-
- if (edgeObj.containsKey("guides")) {
- Map<String, Object> guideMap = (Map<String,Object>)edgeObj.get("guides");
- List<Object> values = (List<Object>)guideMap.get("values");
- String type = (String)guideMap.get("type");
-
- ArrayList<Point2D> valuePoints = new ArrayList<Point2D>();
- for (Object pointObj: values) {
- Map<String, Double> point = (Map<String,Double>)pointObj;
-
- Double x = point.get("x");
- Double y = point.get("y");
-
- valuePoints.add(new Point2D.Double(x, y));
- }
-
- edge.setGuides(type, valuePoints);
- }
-
- return edge;
- }
+ private final String sourceId;
+ private final String targetId;
+ private Node source;
+ private Node target;
+ private String error;
+
+ // Useful in rendering.
+ private String guideType;
+ private List<Point2D> guideValues;
+
+ public Edge(String fromId, String toId) {
+ this.sourceId = fromId;
+ this.targetId = toId;
+ }
+
+ public Edge(Edge clone) {
+ this.sourceId = clone.getSourceId();
+ this.targetId = clone.getTargetId();
+ this.error = clone.getError();
+ }
+
+ public String getId() {
+ return getSourceId() + ">>" + getTargetId();
+ }
+
+ public String getSourceId() {
+ return sourceId;
+ }
+
+ public String getTargetId() {
+ return targetId;
+ }
+
+ public void setError(String error) {
+ this.error = error;
+ }
+
+ public String getError() {
+ return this.error;
+ }
+
+ public boolean hasError() {
+ return this.error != null;
+ }
+
+ public Node getSource() {
+ return source;
+ }
+
+ public void setSource(Node source) {
+ this.source = source;
+ }
+
+ public Node getTarget() {
+ return target;
+ }
+
+ public void setTarget(Node target) {
+ this.target = target;
+ }
+
+ public String getGuideType() {
+ return guideType;
+ }
+
+ public List<Point2D> getGuideValues() {
+ return guideValues;
+ }
+
+ public void setGuides(String type, List<Point2D> values) {
+ this.guideType = type;
+ this.guideValues = values;
+ }
+
+ public Object toObject() {
+ HashMap<String, Object> obj = new HashMap<String, Object>();
+ obj.put("source", getSourceId());
+ obj.put("target", getTargetId());
+ if (hasError()) {
+ obj.put("error", error);
+ }
+ if (guideValues != null) {
+ HashMap<String, Object> lineGuidesObj = new HashMap<String, Object>();
+ lineGuidesObj.put("type", guideType);
+
+ ArrayList<Object> guides = new ArrayList<Object>();
+ for (Point2D point : this.guideValues) {
+ HashMap<String, Double> pointObj = new HashMap<String, Double>();
+ pointObj.put("x", point.getX());
+ pointObj.put("y", point.getY());
+ guides.add(pointObj);
+ }
+ lineGuidesObj.put("values", guides);
+
+ obj.put("guides", lineGuidesObj);
+ }
+
+ return obj;
+ }
+
+ @SuppressWarnings("unchecked")
+ public static Edge fromObject(Object obj) {
+ HashMap<String, Object> edgeObj = (HashMap<String, Object>) obj;
+
+ String source = (String) edgeObj.get("source");
+ String target = (String) edgeObj.get("target");
+
+ String error = (String) edgeObj.get("error");
+
+ Edge edge = new Edge(source, target);
+ edge.setError(error);
+
+ if (edgeObj.containsKey("guides")) {
+ Map<String, Object> guideMap =
+ (Map<String, Object>) edgeObj.get("guides");
+ List<Object> values = (List<Object>) guideMap.get("values");
+ String type = (String) guideMap.get("type");
+
+ ArrayList<Point2D> valuePoints = new ArrayList<Point2D>();
+ for (Object pointObj : values) {
+ Map<String, Double> point = (Map<String, Double>) pointObj;
+
+ Double x = point.get("x");
+ Double y = point.get("y");
+
+ valuePoints.add(new Point2D.Double(x, y));
+ }
+
+ edge.setGuides(type, valuePoints);
+ }
+
+ return edge;
+ }
}
src/main/java/azkaban/flow/Flow.java 768(+387 -381)
diff --git a/src/main/java/azkaban/flow/Flow.java b/src/main/java/azkaban/flow/Flow.java
index d28a159..3e09476 100644
--- a/src/main/java/azkaban/flow/Flow.java
+++ b/src/main/java/azkaban/flow/Flow.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -27,382 +27,388 @@ import java.util.Set;
import azkaban.executor.mail.DefaultMailCreator;
public class Flow {
- private final String id;
- private int projectId;
- private ArrayList<Node> startNodes = null;
- private ArrayList<Node> endNodes = null;
- private int numLevels = -1;
-
- private HashMap<String, Node> nodes = new HashMap<String, Node>();
-
- private HashMap<String, Edge> edges = new HashMap<String, Edge>();
- private HashMap<String, Set<Edge>> outEdges = new HashMap<String, Set<Edge>>();
- private HashMap<String, Set<Edge>> inEdges = new HashMap<String, Set<Edge>>();
- private HashMap<String, FlowProps> flowProps = new HashMap<String, FlowProps>();
-
- private List<String> failureEmail = new ArrayList<String>();
- private List<String> successEmail = new ArrayList<String>();
- private String mailCreator = DefaultMailCreator.DEFAULT_MAIL_CREATOR;
- private ArrayList<String> errors;
- private int version = -1;
- private Map<String, Object> metadata = new HashMap<String, Object>();
-
- private boolean isLayedOut = false;
-
- public Flow(String id) {
- this.id = id;
- }
-
- public void setVersion(int version) {
- this.version = version;
- }
-
- public int getVersion() {
- return version;
- }
-
- public void initialize() {
- if (startNodes == null) {
- startNodes = new ArrayList<Node>();
- endNodes = new ArrayList<Node>();
- for (Node node : nodes.values()) {
- // If it doesn't have any incoming edges, its a start node
- if (!inEdges.containsKey(node.getId())) {
- startNodes.add(node);
- }
-
- // If it doesn't contain any outgoing edges, its an end node.
- if (!outEdges.containsKey(node.getId())) {
- endNodes.add(node);
- }
- }
-
- for (Node node: startNodes) {
- node.setLevel(0);
- numLevels = 0;
- recursiveSetLevels(node);
- }
- }
- }
-
- private void recursiveSetLevels(Node node) {
- Set<Edge> edges = outEdges.get(node.getId());
- if (edges != null) {
- for (Edge edge : edges) {
- Node nextNode = nodes.get(edge.getTargetId());
- edge.setSource(node);
- edge.setTarget(nextNode);
-
- // We pick whichever is higher to get the max distance from root.
- int level = Math.max(node.getLevel() + 1, nextNode.getLevel());
- nextNode.setLevel(level);
- numLevels = Math.max(level, numLevels);
- recursiveSetLevels(nextNode);
- }
- }
- }
-
- public Node getNode(String nodeId) {
- return nodes.get(nodeId);
- }
-
- public List<String> getSuccessEmails() {
- return successEmail;
- }
-
- public String getMailCreator() {
- return mailCreator;
- }
-
- public List<String> getFailureEmails() {
- return failureEmail;
- }
-
- public void setMailCreator(String mailCreator) {
- this.mailCreator = mailCreator;
- }
-
- public void addSuccessEmails(Collection<String> emails) {
- successEmail.addAll(emails);
- }
-
- public void addFailureEmails(Collection<String> emails) {
- failureEmail.addAll(emails);
- }
-
- public int getNumLevels() {
- return numLevels;
- }
-
- public List<Node> getStartNodes() {
- return startNodes;
- }
-
- public List<Node> getEndNodes() {
- return endNodes;
- }
-
- public Set<Edge> getInEdges(String id) {
- return inEdges.get(id);
- }
-
- public Set<Edge> getOutEdges(String id) {
- return outEdges.get(id);
- }
-
- public void addAllNodes(Collection<Node> nodes) {
- for (Node node: nodes) {
- addNode(node);
- }
- }
-
- public void addNode(Node node) {
- nodes.put(node.getId(), node);
- }
-
- public void addAllFlowProperties(Collection<FlowProps> props) {
- for (FlowProps prop : props) {
- flowProps.put(prop.getSource(), prop);
- }
- }
-
- public String getId() {
- return id;
- }
-
- public void addError(String error) {
- if (errors == null) {
- errors = new ArrayList<String>();
- }
-
- errors.add(error);
- }
-
- public List<String> getErrors() {
- return errors;
- }
-
- public boolean hasErrors() {
- return errors != null && !errors.isEmpty();
- }
-
- public Collection<Node> getNodes() {
- return nodes.values();
- }
-
- public Collection<Edge> getEdges() {
- return edges.values();
- }
-
- public void addAllEdges(Collection<Edge> edges) {
- for (Edge edge: edges) {
- addEdge(edge);
- }
- }
-
- public void addEdge(Edge edge) {
- String source = edge.getSourceId();
- String target = edge.getTargetId();
-
- if (edge.hasError()) {
- addError("Error on " + edge.getId() + ". " + edge.getError());
- }
-
- Set<Edge> sourceSet = getEdgeSet(outEdges, source);
- sourceSet.add(edge);
-
- Set<Edge> targetSet = getEdgeSet(inEdges, target);
- targetSet.add(edge);
-
- edges.put(edge.getId(), edge);
- }
-
- private Set<Edge> getEdgeSet(HashMap<String, Set<Edge>> map, String id) {
- Set<Edge> edges = map.get(id);
- if (edges == null) {
- edges = new HashSet<Edge>();
- map.put(id, edges);
- }
-
- return edges;
- }
-
- public Map<String,Object> toObject() {
- HashMap<String, Object> flowObj = new HashMap<String, Object>();
- flowObj.put("type", "flow");
- flowObj.put("id", getId());
- flowObj.put("project.id", projectId);
- flowObj.put("version", version);
- flowObj.put("props", objectizeProperties());
- flowObj.put("nodes", objectizeNodes());
- flowObj.put("edges", objectizeEdges());
- flowObj.put("failure.email", failureEmail);
- flowObj.put("success.email", successEmail);
- flowObj.put("mailCreator", mailCreator);
- flowObj.put("layedout", isLayedOut);
- if (errors != null) {
- flowObj.put("errors", errors);
- }
-
- if (metadata != null) {
- flowObj.put("metadata", metadata);
- }
-
- return flowObj;
- }
-
- private List<Object> objectizeProperties() {
- ArrayList<Object> result = new ArrayList<Object>();
- for (FlowProps props: flowProps.values()) {
- Object objProps = props.toObject();
- result.add(objProps);
- }
-
- return result;
- }
-
- private List<Object> objectizeNodes() {
- ArrayList<Object> result = new ArrayList<Object>();
- for (Node node : getNodes()) {
- Object nodeObj = node.toObject();
- result.add(nodeObj);
- }
-
- return result;
- }
-
- private List<Object> objectizeEdges() {
- ArrayList<Object> result = new ArrayList<Object>();
- for (Edge edge: getEdges()) {
- Object edgeObj = edge.toObject();
- result.add(edgeObj);
- }
-
- return result;
- }
-
- @SuppressWarnings("unchecked")
- public static Flow flowFromObject(Object object) {
- Map<String, Object> flowObject = (Map<String,Object>)object;
-
- String id = (String)flowObject.get("id");
- Boolean layedout = (Boolean)flowObject.get("layedout");
- Flow flow = new Flow(id);
- if (layedout != null) {
- flow.setLayedOut(layedout);
- }
- int projId = (Integer)flowObject.get("project.id");
- flow.setProjectId(projId);
-
- int version = (Integer)flowObject.get("version");
- flow.setVersion(version);
-
- // Loading projects
- List<Object> propertiesList = (List<Object>)flowObject.get("props");
- Map<String, FlowProps> properties = loadPropertiesFromObject(propertiesList);
- flow.addAllFlowProperties(properties.values());
-
- // Loading nodes
- List<Object> nodeList = (List<Object>)flowObject.get("nodes");
- Map<String, Node> nodes = loadNodesFromObjects(nodeList);
- flow.addAllNodes(nodes.values());
-
- // Loading edges
- List<Object> edgeList = (List<Object>)flowObject.get("edges");
- List<Edge> edges = loadEdgeFromObjects(edgeList, nodes);
- flow.addAllEdges(edges);
-
- Map<String, Object> metadata = (Map<String, Object>)flowObject.get("metadata");
-
- if (metadata != null) {
- flow.setMetadata(metadata);
- }
-
- flow.failureEmail = (List<String>)flowObject.get("failure.email");
- flow.successEmail = (List<String>)flowObject.get("success.email");
- if (flowObject.containsKey("mailCreator")) {
- flow.mailCreator = flowObject.get("mailCreator").toString();
- }
- return flow;
- }
-
- private static Map<String, Node> loadNodesFromObjects(List<Object> nodeList) {
- Map<String, Node> nodeMap = new HashMap<String, Node>();
-
- for (Object obj: nodeList) {
- Node node = Node.fromObject(obj);
- nodeMap.put(node.getId(), node);
- }
-
- return nodeMap;
- }
-
- private static List<Edge> loadEdgeFromObjects(List<Object> edgeList, Map<String, Node> nodes) {
- List<Edge> edgeResult = new ArrayList<Edge>();
-
- for (Object obj: edgeList) {
- Edge edge = Edge.fromObject(obj);
- edgeResult.add(edge);
- }
-
- return edgeResult;
- }
-
- private static Map<String, FlowProps> loadPropertiesFromObject(List<Object> propertyObjectList) {
- Map<String, FlowProps> properties = new HashMap<String, FlowProps>();
-
- for (Object propObj: propertyObjectList) {
- FlowProps prop = FlowProps.fromObject(propObj);
- properties.put(prop.getSource(), prop);
- }
-
- return properties;
- }
-
- public boolean isLayedOut() {
- return isLayedOut;
- }
-
- public Map<String, Object> getMetadata() {
- if(metadata == null){
- metadata = new HashMap<String, Object>();
- }
- return metadata;
- }
-
- public void setMetadata(Map<String, Object> metadata) {
- this.metadata = metadata;
- }
-
- public void setLayedOut(boolean layedOut) {
- this.isLayedOut = layedOut;
- }
-
- public Map<String, Node> getNodeMap() {
- return nodes;
- }
-
- public Map<String, Set<Edge>> getOutEdgeMap() {
- return outEdges;
- }
-
- public Map<String, Set<Edge>> getInEdgeMap() {
- return inEdges;
- }
-
- public FlowProps getFlowProps(String propSource) {
- return flowProps.get(propSource);
- }
-
- public Map<String, FlowProps> getAllFlowProps() {
- return flowProps;
- }
-
- public int getProjectId() {
- return projectId;
- }
-
- public void setProjectId(int projectId) {
- this.projectId = projectId;
- }
+ private final String id;
+ private int projectId;
+ private ArrayList<Node> startNodes = null;
+ private ArrayList<Node> endNodes = null;
+ private int numLevels = -1;
+
+ private HashMap<String, Node> nodes = new HashMap<String, Node>();
+
+ private HashMap<String, Edge> edges = new HashMap<String, Edge>();
+ private HashMap<String, Set<Edge>> outEdges =
+ new HashMap<String, Set<Edge>>();
+ private HashMap<String, Set<Edge>> inEdges = new HashMap<String, Set<Edge>>();
+ private HashMap<String, FlowProps> flowProps =
+ new HashMap<String, FlowProps>();
+
+ private List<String> failureEmail = new ArrayList<String>();
+ private List<String> successEmail = new ArrayList<String>();
+ private String mailCreator = DefaultMailCreator.DEFAULT_MAIL_CREATOR;
+ private ArrayList<String> errors;
+ private int version = -1;
+ private Map<String, Object> metadata = new HashMap<String, Object>();
+
+ private boolean isLayedOut = false;
+
+ public Flow(String id) {
+ this.id = id;
+ }
+
+ public void setVersion(int version) {
+ this.version = version;
+ }
+
+ public int getVersion() {
+ return version;
+ }
+
+ public void initialize() {
+ if (startNodes == null) {
+ startNodes = new ArrayList<Node>();
+ endNodes = new ArrayList<Node>();
+ for (Node node : nodes.values()) {
+ // If it doesn't have any incoming edges, its a start node
+ if (!inEdges.containsKey(node.getId())) {
+ startNodes.add(node);
+ }
+
+ // If it doesn't contain any outgoing edges, its an end node.
+ if (!outEdges.containsKey(node.getId())) {
+ endNodes.add(node);
+ }
+ }
+
+ for (Node node : startNodes) {
+ node.setLevel(0);
+ numLevels = 0;
+ recursiveSetLevels(node);
+ }
+ }
+ }
+
+ private void recursiveSetLevels(Node node) {
+ Set<Edge> edges = outEdges.get(node.getId());
+ if (edges != null) {
+ for (Edge edge : edges) {
+ Node nextNode = nodes.get(edge.getTargetId());
+ edge.setSource(node);
+ edge.setTarget(nextNode);
+
+ // We pick whichever is higher to get the max distance from root.
+ int level = Math.max(node.getLevel() + 1, nextNode.getLevel());
+ nextNode.setLevel(level);
+ numLevels = Math.max(level, numLevels);
+ recursiveSetLevels(nextNode);
+ }
+ }
+ }
+
+ public Node getNode(String nodeId) {
+ return nodes.get(nodeId);
+ }
+
+ public List<String> getSuccessEmails() {
+ return successEmail;
+ }
+
+ public String getMailCreator() {
+ return mailCreator;
+ }
+
+ public List<String> getFailureEmails() {
+ return failureEmail;
+ }
+
+ public void setMailCreator(String mailCreator) {
+ this.mailCreator = mailCreator;
+ }
+
+ public void addSuccessEmails(Collection<String> emails) {
+ successEmail.addAll(emails);
+ }
+
+ public void addFailureEmails(Collection<String> emails) {
+ failureEmail.addAll(emails);
+ }
+
+ public int getNumLevels() {
+ return numLevels;
+ }
+
+ public List<Node> getStartNodes() {
+ return startNodes;
+ }
+
+ public List<Node> getEndNodes() {
+ return endNodes;
+ }
+
+ public Set<Edge> getInEdges(String id) {
+ return inEdges.get(id);
+ }
+
+ public Set<Edge> getOutEdges(String id) {
+ return outEdges.get(id);
+ }
+
+ public void addAllNodes(Collection<Node> nodes) {
+ for (Node node : nodes) {
+ addNode(node);
+ }
+ }
+
+ public void addNode(Node node) {
+ nodes.put(node.getId(), node);
+ }
+
+ public void addAllFlowProperties(Collection<FlowProps> props) {
+ for (FlowProps prop : props) {
+ flowProps.put(prop.getSource(), prop);
+ }
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public void addError(String error) {
+ if (errors == null) {
+ errors = new ArrayList<String>();
+ }
+
+ errors.add(error);
+ }
+
+ public List<String> getErrors() {
+ return errors;
+ }
+
+ public boolean hasErrors() {
+ return errors != null && !errors.isEmpty();
+ }
+
+ public Collection<Node> getNodes() {
+ return nodes.values();
+ }
+
+ public Collection<Edge> getEdges() {
+ return edges.values();
+ }
+
+ public void addAllEdges(Collection<Edge> edges) {
+ for (Edge edge : edges) {
+ addEdge(edge);
+ }
+ }
+
+ public void addEdge(Edge edge) {
+ String source = edge.getSourceId();
+ String target = edge.getTargetId();
+
+ if (edge.hasError()) {
+ addError("Error on " + edge.getId() + ". " + edge.getError());
+ }
+
+ Set<Edge> sourceSet = getEdgeSet(outEdges, source);
+ sourceSet.add(edge);
+
+ Set<Edge> targetSet = getEdgeSet(inEdges, target);
+ targetSet.add(edge);
+
+ edges.put(edge.getId(), edge);
+ }
+
+ private Set<Edge> getEdgeSet(HashMap<String, Set<Edge>> map, String id) {
+ Set<Edge> edges = map.get(id);
+ if (edges == null) {
+ edges = new HashSet<Edge>();
+ map.put(id, edges);
+ }
+
+ return edges;
+ }
+
+ public Map<String, Object> toObject() {
+ HashMap<String, Object> flowObj = new HashMap<String, Object>();
+ flowObj.put("type", "flow");
+ flowObj.put("id", getId());
+ flowObj.put("project.id", projectId);
+ flowObj.put("version", version);
+ flowObj.put("props", objectizeProperties());
+ flowObj.put("nodes", objectizeNodes());
+ flowObj.put("edges", objectizeEdges());
+ flowObj.put("failure.email", failureEmail);
+ flowObj.put("success.email", successEmail);
+ flowObj.put("mailCreator", mailCreator);
+ flowObj.put("layedout", isLayedOut);
+ if (errors != null) {
+ flowObj.put("errors", errors);
+ }
+
+ if (metadata != null) {
+ flowObj.put("metadata", metadata);
+ }
+
+ return flowObj;
+ }
+
+ private List<Object> objectizeProperties() {
+ ArrayList<Object> result = new ArrayList<Object>();
+ for (FlowProps props : flowProps.values()) {
+ Object objProps = props.toObject();
+ result.add(objProps);
+ }
+
+ return result;
+ }
+
+ private List<Object> objectizeNodes() {
+ ArrayList<Object> result = new ArrayList<Object>();
+ for (Node node : getNodes()) {
+ Object nodeObj = node.toObject();
+ result.add(nodeObj);
+ }
+
+ return result;
+ }
+
+ private List<Object> objectizeEdges() {
+ ArrayList<Object> result = new ArrayList<Object>();
+ for (Edge edge : getEdges()) {
+ Object edgeObj = edge.toObject();
+ result.add(edgeObj);
+ }
+
+ return result;
+ }
+
+ @SuppressWarnings("unchecked")
+ public static Flow flowFromObject(Object object) {
+ Map<String, Object> flowObject = (Map<String, Object>) object;
+
+ String id = (String) flowObject.get("id");
+ Boolean layedout = (Boolean) flowObject.get("layedout");
+ Flow flow = new Flow(id);
+ if (layedout != null) {
+ flow.setLayedOut(layedout);
+ }
+ int projId = (Integer) flowObject.get("project.id");
+ flow.setProjectId(projId);
+
+ int version = (Integer) flowObject.get("version");
+ flow.setVersion(version);
+
+ // Loading projects
+ List<Object> propertiesList = (List<Object>) flowObject.get("props");
+ Map<String, FlowProps> properties =
+ loadPropertiesFromObject(propertiesList);
+ flow.addAllFlowProperties(properties.values());
+
+ // Loading nodes
+ List<Object> nodeList = (List<Object>) flowObject.get("nodes");
+ Map<String, Node> nodes = loadNodesFromObjects(nodeList);
+ flow.addAllNodes(nodes.values());
+
+ // Loading edges
+ List<Object> edgeList = (List<Object>) flowObject.get("edges");
+ List<Edge> edges = loadEdgeFromObjects(edgeList, nodes);
+ flow.addAllEdges(edges);
+
+ Map<String, Object> metadata =
+ (Map<String, Object>) flowObject.get("metadata");
+
+ if (metadata != null) {
+ flow.setMetadata(metadata);
+ }
+
+ flow.failureEmail = (List<String>) flowObject.get("failure.email");
+ flow.successEmail = (List<String>) flowObject.get("success.email");
+ if (flowObject.containsKey("mailCreator")) {
+ flow.mailCreator = flowObject.get("mailCreator").toString();
+ }
+ return flow;
+ }
+
+ private static Map<String, Node> loadNodesFromObjects(List<Object> nodeList) {
+ Map<String, Node> nodeMap = new HashMap<String, Node>();
+
+ for (Object obj : nodeList) {
+ Node node = Node.fromObject(obj);
+ nodeMap.put(node.getId(), node);
+ }
+
+ return nodeMap;
+ }
+
+ private static List<Edge> loadEdgeFromObjects(List<Object> edgeList,
+ Map<String, Node> nodes) {
+ List<Edge> edgeResult = new ArrayList<Edge>();
+
+ for (Object obj : edgeList) {
+ Edge edge = Edge.fromObject(obj);
+ edgeResult.add(edge);
+ }
+
+ return edgeResult;
+ }
+
+ private static Map<String, FlowProps> loadPropertiesFromObject(
+ List<Object> propertyObjectList) {
+ Map<String, FlowProps> properties = new HashMap<String, FlowProps>();
+
+ for (Object propObj : propertyObjectList) {
+ FlowProps prop = FlowProps.fromObject(propObj);
+ properties.put(prop.getSource(), prop);
+ }
+
+ return properties;
+ }
+
+ public boolean isLayedOut() {
+ return isLayedOut;
+ }
+
+ public Map<String, Object> getMetadata() {
+ if (metadata == null) {
+ metadata = new HashMap<String, Object>();
+ }
+ return metadata;
+ }
+
+ public void setMetadata(Map<String, Object> metadata) {
+ this.metadata = metadata;
+ }
+
+ public void setLayedOut(boolean layedOut) {
+ this.isLayedOut = layedOut;
+ }
+
+ public Map<String, Node> getNodeMap() {
+ return nodes;
+ }
+
+ public Map<String, Set<Edge>> getOutEdgeMap() {
+ return outEdges;
+ }
+
+ public Map<String, Set<Edge>> getInEdgeMap() {
+ return inEdges;
+ }
+
+ public FlowProps getFlowProps(String propSource) {
+ return flowProps.get(propSource);
+ }
+
+ public Map<String, FlowProps> getAllFlowProps() {
+ return flowProps;
+ }
+
+ public int getProjectId() {
+ return projectId;
+ }
+
+ public void setProjectId(int projectId) {
+ this.projectId = projectId;
+ }
}
src/main/java/azkaban/flow/FlowProps.java 97(+49 -48)
diff --git a/src/main/java/azkaban/flow/FlowProps.java b/src/main/java/azkaban/flow/FlowProps.java
index f4cf7fe..437aff0 100644
--- a/src/main/java/azkaban/flow/FlowProps.java
+++ b/src/main/java/azkaban/flow/FlowProps.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -22,53 +22,54 @@ import java.util.Map;
import azkaban.utils.Props;
public class FlowProps {
- private String parentSource;
- private String propSource;
- private Props props = null;
+ private String parentSource;
+ private String propSource;
+ private Props props = null;
- public FlowProps(String parentSource, String propSource) {
- this.parentSource = parentSource;
- this.propSource = propSource;
- }
+ public FlowProps(String parentSource, String propSource) {
+ this.parentSource = parentSource;
+ this.propSource = propSource;
+ }
- public FlowProps(Props props) {
- this.setProps(props);
- }
+ public FlowProps(Props props) {
+ this.setProps(props);
+ }
- public Props getProps() {
- return props;
- }
+ public Props getProps() {
+ return props;
+ }
- public void setProps(Props props) {
- this.props = props;
- this.parentSource = props.getParent() == null ? null : props.getParent().getSource();
- this.propSource = props.getSource();
- }
-
- public String getSource() {
- return propSource;
- }
-
- public String getInheritedSource() {
- return parentSource;
- }
-
- public Object toObject() {
- HashMap<String, Object> obj = new HashMap<String, Object>();
- obj.put("source", propSource);
- if (parentSource != null) {
- obj.put("inherits", parentSource);
- }
- return obj;
- }
-
- @SuppressWarnings("unchecked")
- public static FlowProps fromObject(Object obj) {
- Map<String, Object> flowMap = (Map<String, Object>)obj;
- String source = (String)flowMap.get("source");
- String parentSource = (String)flowMap.get("inherits");
-
- FlowProps flowProps = new FlowProps(parentSource, source);
- return flowProps;
- }
+ public void setProps(Props props) {
+ this.props = props;
+ this.parentSource =
+ props.getParent() == null ? null : props.getParent().getSource();
+ this.propSource = props.getSource();
+ }
+
+ public String getSource() {
+ return propSource;
+ }
+
+ public String getInheritedSource() {
+ return parentSource;
+ }
+
+ public Object toObject() {
+ HashMap<String, Object> obj = new HashMap<String, Object>();
+ obj.put("source", propSource);
+ if (parentSource != null) {
+ obj.put("inherits", parentSource);
+ }
+ return obj;
+ }
+
+ @SuppressWarnings("unchecked")
+ public static FlowProps fromObject(Object obj) {
+ Map<String, Object> flowMap = (Map<String, Object>) obj;
+ String source = (String) flowMap.get("source");
+ String parentSource = (String) flowMap.get("inherits");
+
+ FlowProps flowProps = new FlowProps(parentSource, source);
+ return flowProps;
+ }
}
src/main/java/azkaban/flow/Node.java 322(+161 -161)
diff --git a/src/main/java/azkaban/flow/Node.java b/src/main/java/azkaban/flow/Node.java
index 1a62b03..515420b 100644
--- a/src/main/java/azkaban/flow/Node.java
+++ b/src/main/java/azkaban/flow/Node.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -23,162 +23,162 @@ import java.util.Map;
import azkaban.utils.Utils;
public class Node {
- private final String id;
- private String jobSource;
- private String propsSource;
-
- private Point2D position = null;
- private int level;
- private int expectedRunTimeSec = 1;
- private String type;
-
- private String embeddedFlowId;
-
- public Node(String id) {
- this.id = id;
- }
-
- /**
- * Clones nodes
- * @param node
- */
- public Node(Node clone) {
- this.id = clone.id;
- this.propsSource = clone.propsSource;
- this.jobSource = clone.jobSource;
- }
-
- public String getId() {
- return id;
- }
-
- public String getType() {
- return type;
- }
-
- public void setType(String type) {
- this.type = type;
- }
-
- public Point2D getPosition() {
- return position;
- }
-
- public void setPosition(Point2D position) {
- this.position = position;
- }
-
- public void setPosition(double x, double y) {
- this.position = new Point2D.Double(x,y);
- }
-
- public int getLevel() {
- return level;
- }
-
- public void setLevel(int level) {
- this.level = level;
- }
-
- public String getJobSource() {
- return jobSource;
- }
-
- public void setJobSource(String jobSource) {
- this.jobSource = jobSource;
- }
-
- public String getPropsSource() {
- return propsSource;
- }
-
- public void setPropsSource(String propsSource) {
- this.propsSource = propsSource;
- }
-
- public void setExpectedRuntimeSec(int runtimeSec) {
- expectedRunTimeSec = runtimeSec;
- }
-
- public int getExpectedRuntimeSec() {
- return expectedRunTimeSec;
- }
-
- public void setEmbeddedFlowId(String flowId) {
- embeddedFlowId = flowId;
- }
-
- public String getEmbeddedFlowId() {
- return embeddedFlowId;
- }
-
- @SuppressWarnings("unchecked")
- public static Node fromObject(Object obj) {
- Map<String,Object> mapObj = (Map<String,Object>)obj;
- String id = (String)mapObj.get("id");
-
- Node node = new Node(id);
- String jobSource = (String)mapObj.get("jobSource");
- String propSource = (String)mapObj.get("propSource");
- String jobType = (String)mapObj.get("jobType");
-
- String embeddedFlowId = (String)mapObj.get("embeddedFlowId");
-
- node.setJobSource(jobSource);
- node.setPropsSource(propSource);
- node.setType(jobType);
- node.setEmbeddedFlowId(embeddedFlowId);
-
- Integer expectedRuntime = (Integer)mapObj.get("expectedRuntime");
- if (expectedRuntime != null) {
- node.setExpectedRuntimeSec(expectedRuntime);
- }
-
- Map<String,Object> layoutInfo = (Map<String,Object>)mapObj.get("layout");
- if (layoutInfo != null) {
- Double x = null;
- Double y = null;
- Integer level = null;
-
- try {
- x = Utils.convertToDouble(layoutInfo.get("x"));
- y = Utils.convertToDouble(layoutInfo.get("y"));
- level = (Integer)layoutInfo.get("level");
- }
- catch (ClassCastException e) {
- throw new RuntimeException("Error creating node " + id, e);
- }
-
- if (x != null && y != null) {
- node.setPosition(new Point2D.Double(x, y));
- }
- if (level != null) {
- node.setLevel(level);
- }
- }
-
- return node;
- }
-
- public Object toObject() {
- HashMap<String, Object> objMap = new HashMap<String, Object>();
- objMap.put("id", id);
- objMap.put("jobSource", jobSource);
- objMap.put("propSource", propsSource);
- objMap.put("jobType", type);
- if (embeddedFlowId != null) {
- objMap.put("embeddedFlowId", embeddedFlowId);
- }
- objMap.put("expectedRuntime", expectedRunTimeSec);
-
- HashMap<String, Object> layoutInfo = new HashMap<String, Object>();
- if (position != null) {
- layoutInfo.put("x", position.getX());
- layoutInfo.put("y", position.getY());
- }
- layoutInfo.put("level", level);
- objMap.put("layout", layoutInfo);
-
- return objMap;
- }
+ private final String id;
+ private String jobSource;
+ private String propsSource;
+
+ private Point2D position = null;
+ private int level;
+ private int expectedRunTimeSec = 1;
+ private String type;
+
+ private String embeddedFlowId;
+
+ public Node(String id) {
+ this.id = id;
+ }
+
+ /**
+ * Clones nodes
+ *
+ * @param node
+ */
+ public Node(Node clone) {
+ this.id = clone.id;
+ this.propsSource = clone.propsSource;
+ this.jobSource = clone.jobSource;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public String getType() {
+ return type;
+ }
+
+ public void setType(String type) {
+ this.type = type;
+ }
+
+ public Point2D getPosition() {
+ return position;
+ }
+
+ public void setPosition(Point2D position) {
+ this.position = position;
+ }
+
+ public void setPosition(double x, double y) {
+ this.position = new Point2D.Double(x, y);
+ }
+
+ public int getLevel() {
+ return level;
+ }
+
+ public void setLevel(int level) {
+ this.level = level;
+ }
+
+ public String getJobSource() {
+ return jobSource;
+ }
+
+ public void setJobSource(String jobSource) {
+ this.jobSource = jobSource;
+ }
+
+ public String getPropsSource() {
+ return propsSource;
+ }
+
+ public void setPropsSource(String propsSource) {
+ this.propsSource = propsSource;
+ }
+
+ public void setExpectedRuntimeSec(int runtimeSec) {
+ expectedRunTimeSec = runtimeSec;
+ }
+
+ public int getExpectedRuntimeSec() {
+ return expectedRunTimeSec;
+ }
+
+ public void setEmbeddedFlowId(String flowId) {
+ embeddedFlowId = flowId;
+ }
+
+ public String getEmbeddedFlowId() {
+ return embeddedFlowId;
+ }
+
+ @SuppressWarnings("unchecked")
+ public static Node fromObject(Object obj) {
+ Map<String, Object> mapObj = (Map<String, Object>) obj;
+ String id = (String) mapObj.get("id");
+
+ Node node = new Node(id);
+ String jobSource = (String) mapObj.get("jobSource");
+ String propSource = (String) mapObj.get("propSource");
+ String jobType = (String) mapObj.get("jobType");
+
+ String embeddedFlowId = (String) mapObj.get("embeddedFlowId");
+
+ node.setJobSource(jobSource);
+ node.setPropsSource(propSource);
+ node.setType(jobType);
+ node.setEmbeddedFlowId(embeddedFlowId);
+
+ Integer expectedRuntime = (Integer) mapObj.get("expectedRuntime");
+ if (expectedRuntime != null) {
+ node.setExpectedRuntimeSec(expectedRuntime);
+ }
+
+ Map<String, Object> layoutInfo = (Map<String, Object>) mapObj.get("layout");
+ if (layoutInfo != null) {
+ Double x = null;
+ Double y = null;
+ Integer level = null;
+
+ try {
+ x = Utils.convertToDouble(layoutInfo.get("x"));
+ y = Utils.convertToDouble(layoutInfo.get("y"));
+ level = (Integer) layoutInfo.get("level");
+ } catch (ClassCastException e) {
+ throw new RuntimeException("Error creating node " + id, e);
+ }
+
+ if (x != null && y != null) {
+ node.setPosition(new Point2D.Double(x, y));
+ }
+ if (level != null) {
+ node.setLevel(level);
+ }
+ }
+
+ return node;
+ }
+
+ public Object toObject() {
+ HashMap<String, Object> objMap = new HashMap<String, Object>();
+ objMap.put("id", id);
+ objMap.put("jobSource", jobSource);
+ objMap.put("propSource", propsSource);
+ objMap.put("jobType", type);
+ if (embeddedFlowId != null) {
+ objMap.put("embeddedFlowId", embeddedFlowId);
+ }
+ objMap.put("expectedRuntime", expectedRunTimeSec);
+
+ HashMap<String, Object> layoutInfo = new HashMap<String, Object>();
+ if (position != null) {
+ layoutInfo.put("x", position.getX());
+ layoutInfo.put("y", position.getY());
+ }
+ layoutInfo.put("level", level);
+ objMap.put("layout", layoutInfo);
+
+ return objMap;
+ }
}
diff --git a/src/main/java/azkaban/flow/SpecialJobTypes.java b/src/main/java/azkaban/flow/SpecialJobTypes.java
index dfbe03a..083c2f2 100644
--- a/src/main/java/azkaban/flow/SpecialJobTypes.java
+++ b/src/main/java/azkaban/flow/SpecialJobTypes.java
@@ -1,9 +1,25 @@
+/*
+ * Copyright 2014 LinkedIn Corp.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
package azkaban.flow;
public class SpecialJobTypes {
- public static final String BRANCH_START_TYPE = "branch.start";
- public static final String BRANCH_END_TYPE = "branch.end";
+ public static final String BRANCH_START_TYPE = "branch.start";
+ public static final String BRANCH_END_TYPE = "branch.end";
- public static final String EMBEDDED_FLOW_TYPE = "flow";
- public static final String FLOW_NAME = "flow.name";
+ public static final String EMBEDDED_FLOW_TYPE = "flow";
+ public static final String FLOW_NAME = "flow.name";
}
src/main/java/azkaban/jmx/DisplayName.java 20(+18 -2)
diff --git a/src/main/java/azkaban/jmx/DisplayName.java b/src/main/java/azkaban/jmx/DisplayName.java
index 7a55f94..9498555 100644
--- a/src/main/java/azkaban/jmx/DisplayName.java
+++ b/src/main/java/azkaban/jmx/DisplayName.java
@@ -1,3 +1,19 @@
+/*
+ * Copyright 2014 LinkedIn Corp.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
package azkaban.jmx;
import java.lang.annotation.Documented;
@@ -16,6 +32,6 @@ import javax.management.DescriptorKey;
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
public @interface DisplayName {
- @DescriptorKey("displayName")
- String value();
+ @DescriptorKey("displayName")
+ String value();
}
diff --git a/src/main/java/azkaban/jmx/JmxExecutorManager.java b/src/main/java/azkaban/jmx/JmxExecutorManager.java
index e3acefb..08e5534 100644
--- a/src/main/java/azkaban/jmx/JmxExecutorManager.java
+++ b/src/main/java/azkaban/jmx/JmxExecutorManager.java
@@ -1,3 +1,19 @@
+/*
+ * Copyright 2014 LinkedIn Corp.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
package azkaban.jmx;
import java.util.ArrayList;
@@ -6,46 +22,44 @@ import java.util.List;
import azkaban.executor.ExecutorManager;
public class JmxExecutorManager implements JmxExecutorManagerMBean {
- private ExecutorManager manager;
-
- public JmxExecutorManager(ExecutorManager manager) {
- this.manager = manager;
- }
-
- @Override
- public int getNumRunningFlows() {
- return this.manager.getRunningFlows().size();
- }
-
- @Override
- public String getExecutorThreadState() {
- return manager.getExecutorManagerThreadState().toString();
- }
-
- @Override
- public String getExecutorThreadStage() {
- return manager.getExecutorThreadStage();
- }
-
- @Override
- public boolean isThreadActive() {
- return manager.isExecutorManagerThreadActive();
- }
-
- @Override
- public Long getLastThreadCheckTime() {
- return manager.getLastExecutorManagerThreadCheckTime();
- }
-
- @Override
- public List<String> getPrimaryExecutorHostPorts() {
- return new ArrayList<String>(manager.getPrimaryServerHosts());
- }
-
- @Override
- public String getRunningFlows() {
- return manager.getRunningFlowIds();
- }
-
-
+ private ExecutorManager manager;
+
+ public JmxExecutorManager(ExecutorManager manager) {
+ this.manager = manager;
+ }
+
+ @Override
+ public int getNumRunningFlows() {
+ return this.manager.getRunningFlows().size();
+ }
+
+ @Override
+ public String getExecutorThreadState() {
+ return manager.getExecutorManagerThreadState().toString();
+ }
+
+ @Override
+ public String getExecutorThreadStage() {
+ return manager.getExecutorThreadStage();
+ }
+
+ @Override
+ public boolean isThreadActive() {
+ return manager.isExecutorManagerThreadActive();
+ }
+
+ @Override
+ public Long getLastThreadCheckTime() {
+ return manager.getLastExecutorManagerThreadCheckTime();
+ }
+
+ @Override
+ public List<String> getPrimaryExecutorHostPorts() {
+ return new ArrayList<String>(manager.getPrimaryServerHosts());
+ }
+
+ @Override
+ public String getRunningFlows() {
+ return manager.getRunningFlowIds();
+ }
}
diff --git a/src/main/java/azkaban/jmx/JmxExecutorManagerAdapter.java b/src/main/java/azkaban/jmx/JmxExecutorManagerAdapter.java
index fc0f8cf..4637970 100644
--- a/src/main/java/azkaban/jmx/JmxExecutorManagerAdapter.java
+++ b/src/main/java/azkaban/jmx/JmxExecutorManagerAdapter.java
@@ -1,3 +1,19 @@
+/*
+ * Copyright 2014 LinkedIn Corp.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
package azkaban.jmx;
import java.io.IOException;
@@ -6,42 +22,43 @@ import java.util.List;
import azkaban.executor.ExecutorManagerAdapter;
-public class JmxExecutorManagerAdapter implements JmxExecutorManagerAdapterMBean {
- private ExecutorManagerAdapter manager;
-
- public JmxExecutorManagerAdapter(ExecutorManagerAdapter manager) {
- this.manager = manager;
- }
-
- @Override
- public int getNumRunningFlows() {
- try {
- return this.manager.getRunningFlows().size();
- } catch (IOException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- return 0;
- }
- }
-
- @Override
- public String getExecutorManagerThreadState() {
- return manager.getExecutorManagerThreadState().toString();
- }
-
- @Override
- public boolean isExecutorManagerThreadActive() {
- return manager.isExecutorManagerThreadActive();
- }
-
- @Override
- public Long getLastExecutorManagerThreadCheckTime() {
- return manager.getLastExecutorManagerThreadCheckTime();
- }
-
- @Override
- public List<String> getPrimaryExecutorHostPorts() {
- return new ArrayList<String>(manager.getPrimaryServerHosts());
- }
+public class JmxExecutorManagerAdapter implements
+ JmxExecutorManagerAdapterMBean {
+ private ExecutorManagerAdapter manager;
+
+ public JmxExecutorManagerAdapter(ExecutorManagerAdapter manager) {
+ this.manager = manager;
+ }
+
+ @Override
+ public int getNumRunningFlows() {
+ try {
+ return this.manager.getRunningFlows().size();
+ } catch (IOException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ return 0;
+ }
+ }
+
+ @Override
+ public String getExecutorManagerThreadState() {
+ return manager.getExecutorManagerThreadState().toString();
+ }
+
+ @Override
+ public boolean isExecutorManagerThreadActive() {
+ return manager.isExecutorManagerThreadActive();
+ }
+
+ @Override
+ public Long getLastExecutorManagerThreadCheckTime() {
+ return manager.getLastExecutorManagerThreadCheckTime();
+ }
+
+ @Override
+ public List<String> getPrimaryExecutorHostPorts() {
+ return new ArrayList<String>(manager.getPrimaryServerHosts());
+ }
}
diff --git a/src/main/java/azkaban/jmx/JmxExecutorManagerAdapterMBean.java b/src/main/java/azkaban/jmx/JmxExecutorManagerAdapterMBean.java
index 197e721..686f604 100644
--- a/src/main/java/azkaban/jmx/JmxExecutorManagerAdapterMBean.java
+++ b/src/main/java/azkaban/jmx/JmxExecutorManagerAdapterMBean.java
@@ -1,26 +1,42 @@
+/*
+ * Copyright 2014 LinkedIn Corp.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
package azkaban.jmx;
import java.util.List;
public interface JmxExecutorManagerAdapterMBean {
- @DisplayName("OPERATION: getNumRunningFlows")
- public int getNumRunningFlows();
-
- @DisplayName("OPERATION: getExecutorThreadState")
- public String getExecutorManagerThreadState();
-
- @DisplayName("OPERATION: isThreadActive")
- public boolean isExecutorManagerThreadActive();
-
- @DisplayName("OPERATION: getLastThreadCheckTime")
- public Long getLastExecutorManagerThreadCheckTime();
-
- @DisplayName("OPERATION: getPrimaryExecutorHostPorts")
- public List<String> getPrimaryExecutorHostPorts();
-
-// @DisplayName("OPERATION: getExecutorThreadStage")
-// public String getExecutorThreadStage();
-//
-// @DisplayName("OPERATION: getRunningFlows")
-// public String getRunningFlows();
+ @DisplayName("OPERATION: getNumRunningFlows")
+ public int getNumRunningFlows();
+
+ @DisplayName("OPERATION: getExecutorThreadState")
+ public String getExecutorManagerThreadState();
+
+ @DisplayName("OPERATION: isThreadActive")
+ public boolean isExecutorManagerThreadActive();
+
+ @DisplayName("OPERATION: getLastThreadCheckTime")
+ public Long getLastExecutorManagerThreadCheckTime();
+
+ @DisplayName("OPERATION: getPrimaryExecutorHostPorts")
+ public List<String> getPrimaryExecutorHostPorts();
+
+ // @DisplayName("OPERATION: getExecutorThreadStage")
+ // public String getExecutorThreadStage();
+ //
+ // @DisplayName("OPERATION: getRunningFlows")
+ // public String getRunningFlows();
}
diff --git a/src/main/java/azkaban/jmx/JmxExecutorManagerMBean.java b/src/main/java/azkaban/jmx/JmxExecutorManagerMBean.java
index b29d00a..9bc1175 100644
--- a/src/main/java/azkaban/jmx/JmxExecutorManagerMBean.java
+++ b/src/main/java/azkaban/jmx/JmxExecutorManagerMBean.java
@@ -1,26 +1,42 @@
+/*
+ * Copyright 2014 LinkedIn Corp.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
package azkaban.jmx;
import java.util.List;
public interface JmxExecutorManagerMBean {
- @DisplayName("OPERATION: getNumRunningFlows")
- public int getNumRunningFlows();
-
- @DisplayName("OPERATION: getRunningFlows")
- public String getRunningFlows();
-
- @DisplayName("OPERATION: getExecutorThreadState")
- public String getExecutorThreadState();
-
- @DisplayName("OPERATION: getExecutorThreadStage")
- public String getExecutorThreadStage();
-
- @DisplayName("OPERATION: isThreadActive")
- public boolean isThreadActive();
-
- @DisplayName("OPERATION: getLastThreadCheckTime")
- public Long getLastThreadCheckTime();
-
- @DisplayName("OPERATION: getPrimaryExecutorHostPorts")
- public List<String> getPrimaryExecutorHostPorts();
+ @DisplayName("OPERATION: getNumRunningFlows")
+ public int getNumRunningFlows();
+
+ @DisplayName("OPERATION: getRunningFlows")
+ public String getRunningFlows();
+
+ @DisplayName("OPERATION: getExecutorThreadState")
+ public String getExecutorThreadState();
+
+ @DisplayName("OPERATION: getExecutorThreadStage")
+ public String getExecutorThreadStage();
+
+ @DisplayName("OPERATION: isThreadActive")
+ public boolean isThreadActive();
+
+ @DisplayName("OPERATION: getLastThreadCheckTime")
+ public Long getLastThreadCheckTime();
+
+ @DisplayName("OPERATION: getPrimaryExecutorHostPorts")
+ public List<String> getPrimaryExecutorHostPorts();
}
src/main/java/azkaban/jmx/JmxFlowRunnerManager.java 126(+71 -55)
diff --git a/src/main/java/azkaban/jmx/JmxFlowRunnerManager.java b/src/main/java/azkaban/jmx/JmxFlowRunnerManager.java
index 3541140..f453988 100644
--- a/src/main/java/azkaban/jmx/JmxFlowRunnerManager.java
+++ b/src/main/java/azkaban/jmx/JmxFlowRunnerManager.java
@@ -1,62 +1,78 @@
+/*
+ * Copyright 2014 LinkedIn Corp.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
package azkaban.jmx;
import azkaban.execapp.FlowRunnerManager;
public class JmxFlowRunnerManager implements JmxFlowRunnerManagerMBean {
- private FlowRunnerManager manager;
-
- public JmxFlowRunnerManager(FlowRunnerManager manager) {
- this.manager = manager;
- }
-
- @Override
- public long getLastCleanerThreadCheckTime() {
- return manager.getLastCleanerThreadCheckTime();
- }
-
- @Override
- public long getLastSubmitterThreadCheckTime() {
- return manager.getLastSubmitterThreadCheckTime();
- }
-
- @Override
- public boolean isSubmitterThreadActive() {
- return manager.isSubmitterThreadActive();
- }
-
- @Override
- public boolean isCleanerThreadActive() {
- return manager.isCleanerThreadActive();
- }
-
- @Override
- public String getSubmitterThreadState() {
- return manager.getSubmitterThreadState().toString();
- }
-
- @Override
- public String getCleanerThreadState() {
- return manager.getCleanerThreadState().toString();
- }
-
- @Override
- public boolean isExecutorThreadPoolShutdown() {
- return manager.isExecutorThreadPoolShutdown();
- }
-
- @Override
- public int getNumExecutingFlows() {
- return manager.getNumExecutingFlows();
- }
-
- @Override
- public int countTotalNumRunningJobs() {
- return manager.getNumExecutingJobs();
- }
-
- @Override
- public String getRunningFlows() {
- return manager.getRunningFlowIds();
- }
+ private FlowRunnerManager manager;
+
+ public JmxFlowRunnerManager(FlowRunnerManager manager) {
+ this.manager = manager;
+ }
+
+ @Override
+ public long getLastCleanerThreadCheckTime() {
+ return manager.getLastCleanerThreadCheckTime();
+ }
+
+ @Override
+ public long getLastSubmitterThreadCheckTime() {
+ return manager.getLastSubmitterThreadCheckTime();
+ }
+
+ @Override
+ public boolean isSubmitterThreadActive() {
+ return manager.isSubmitterThreadActive();
+ }
+
+ @Override
+ public boolean isCleanerThreadActive() {
+ return manager.isCleanerThreadActive();
+ }
+
+ @Override
+ public String getSubmitterThreadState() {
+ return manager.getSubmitterThreadState().toString();
+ }
+
+ @Override
+ public String getCleanerThreadState() {
+ return manager.getCleanerThreadState().toString();
+ }
+
+ @Override
+ public boolean isExecutorThreadPoolShutdown() {
+ return manager.isExecutorThreadPoolShutdown();
+ }
+
+ @Override
+ public int getNumExecutingFlows() {
+ return manager.getNumExecutingFlows();
+ }
+
+ @Override
+ public int countTotalNumRunningJobs() {
+ return manager.getNumExecutingJobs();
+ }
+
+ @Override
+ public String getRunningFlows() {
+ return manager.getRunningFlowIds();
+ }
}
diff --git a/src/main/java/azkaban/jmx/JmxFlowRunnerManagerMBean.java b/src/main/java/azkaban/jmx/JmxFlowRunnerManagerMBean.java
index ed509ef..32edcc3 100644
--- a/src/main/java/azkaban/jmx/JmxFlowRunnerManagerMBean.java
+++ b/src/main/java/azkaban/jmx/JmxFlowRunnerManagerMBean.java
@@ -1,33 +1,49 @@
+/*
+ * Copyright 2014 LinkedIn Corp.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
package azkaban.jmx;
public interface JmxFlowRunnerManagerMBean {
- @DisplayName("OPERATION: getLastCleanerThreadCheckTime")
- public long getLastCleanerThreadCheckTime();
+ @DisplayName("OPERATION: getLastCleanerThreadCheckTime")
+ public long getLastCleanerThreadCheckTime();
+
+ @DisplayName("OPERATION: getLastSubmitterThreadCheckTime")
+ public long getLastSubmitterThreadCheckTime();
+
+ @DisplayName("OPERATION: isSubmitterThreadActive")
+ public boolean isSubmitterThreadActive();
- @DisplayName("OPERATION: getLastSubmitterThreadCheckTime")
- public long getLastSubmitterThreadCheckTime();
+ @DisplayName("OPERATION: isCleanerThreadActive")
+ public boolean isCleanerThreadActive();
- @DisplayName("OPERATION: isSubmitterThreadActive")
- public boolean isSubmitterThreadActive();
+ @DisplayName("OPERATION: getSubmitterThreadState")
+ public String getSubmitterThreadState();
- @DisplayName("OPERATION: isCleanerThreadActive")
- public boolean isCleanerThreadActive();
+ @DisplayName("OPERATION: getCleanerThreadState")
+ public String getCleanerThreadState();
- @DisplayName("OPERATION: getSubmitterThreadState")
- public String getSubmitterThreadState();
+ @DisplayName("OPERATION: isExecutorThreadPoolShutdown")
+ public boolean isExecutorThreadPoolShutdown();
- @DisplayName("OPERATION: getCleanerThreadState")
- public String getCleanerThreadState();
+ @DisplayName("OPERATION: getNumExecutingFlows")
+ public int getNumExecutingFlows();
- @DisplayName("OPERATION: isExecutorThreadPoolShutdown")
- public boolean isExecutorThreadPoolShutdown();
+ @DisplayName("OPERATION: getRunningFlows")
+ public String getRunningFlows();
- @DisplayName("OPERATION: getNumExecutingFlows")
- public int getNumExecutingFlows();
-
- @DisplayName("OPERATION: getRunningFlows")
- public String getRunningFlows();
-
- @DisplayName("OPERATION: getTotalNumRunningJobs")
- public int countTotalNumRunningJobs();
+ @DisplayName("OPERATION: getTotalNumRunningJobs")
+ public int countTotalNumRunningJobs();
}
src/main/java/azkaban/jmx/JmxJettyServer.java 260(+138 -122)
diff --git a/src/main/java/azkaban/jmx/JmxJettyServer.java b/src/main/java/azkaban/jmx/JmxJettyServer.java
index 6a74376..fd520f2 100644
--- a/src/main/java/azkaban/jmx/JmxJettyServer.java
+++ b/src/main/java/azkaban/jmx/JmxJettyServer.java
@@ -1,129 +1,145 @@
+/*
+ * Copyright 2014 LinkedIn Corp.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
package azkaban.jmx;
import org.mortbay.jetty.Connector;
import org.mortbay.jetty.Server;
public class JmxJettyServer implements JmxJettyServerMBean {
- private Server server;
- private Connector connector;
-
- public JmxJettyServer(Server server) {
- this.server = server;
- this.connector = server.getConnectors()[0];
- }
-
- @Override
- public boolean isRunning() {
- return this.server.isRunning();
- }
-
- @Override
- public boolean isFailed() {
- return this.server.isFailed();
- }
-
- @Override
- public boolean isStopped() {
- return this.server.isStopped();
- }
-
- @Override
- public int getNumThreads() {
- return this.server.getThreadPool().getThreads();
- }
-
- @Override
- public int getNumIdleThreads() {
- return this.server.getThreadPool().getIdleThreads();
- }
-
- @Override
- public String getHost() {
- return connector.getHost();
- }
-
- @Override
- public int getPort() {
- return connector.getPort();
- }
-
- @Override
- public int getConfidentialPort() {
- return connector.getConfidentialPort();
- }
-
- @Override
- public int getConnections() {
- return connector.getConnections();
- }
-
- @Override
- public int getConnectionsOpen() {
- return connector.getConnectionsOpen();
- }
-
- @Override
- public int getConnectionsOpenMax() {
- return connector.getConnectionsOpenMax();
- }
-
- @Override
- public int getConnectionsOpenMin() {
- return connector.getConnectionsOpenMin();
- }
-
- @Override
- public long getConnectionsDurationAve() {
- return connector.getConnectionsDurationAve();
- }
-
- @Override
- public long getConnectionsDurationMax() {
- return connector.getConnectionsDurationMax();
- }
-
- @Override
- public long getConnectionsDurationMin() {
- return connector.getConnectionsDurationMin();
- }
-
- @Override
- public long getConnectionsDurationTotal() {
- return connector.getConnectionsDurationTotal();
- }
-
- @Override
- public long getConnectionsRequestAve() {
- return connector.getConnectionsRequestsAve();
- }
-
- @Override
- public long getConnectionsRequestMax() {
- return connector.getConnectionsRequestsMax();
- }
-
- @Override
- public long getConnectionsRequestMin() {
- return connector.getConnectionsRequestsMin();
- }
-
- @Override
- public void turnStatsOn() {
- connector.setStatsOn(true);
- }
-
- @Override
- public void turnStatsOff() {
- connector.setStatsOn(false);
- }
-
- @Override
- public void resetStats() {
- connector.statsReset();
- }
-
- @Override
- public boolean isStatsOn() {
- return connector.getStatsOn();
- }
+ private Server server;
+ private Connector connector;
+
+ public JmxJettyServer(Server server) {
+ this.server = server;
+ this.connector = server.getConnectors()[0];
+ }
+
+ @Override
+ public boolean isRunning() {
+ return this.server.isRunning();
+ }
+
+ @Override
+ public boolean isFailed() {
+ return this.server.isFailed();
+ }
+
+ @Override
+ public boolean isStopped() {
+ return this.server.isStopped();
+ }
+
+ @Override
+ public int getNumThreads() {
+ return this.server.getThreadPool().getThreads();
+ }
+
+ @Override
+ public int getNumIdleThreads() {
+ return this.server.getThreadPool().getIdleThreads();
+ }
+
+ @Override
+ public String getHost() {
+ return connector.getHost();
+ }
+
+ @Override
+ public int getPort() {
+ return connector.getPort();
+ }
+
+ @Override
+ public int getConfidentialPort() {
+ return connector.getConfidentialPort();
+ }
+
+ @Override
+ public int getConnections() {
+ return connector.getConnections();
+ }
+
+ @Override
+ public int getConnectionsOpen() {
+ return connector.getConnectionsOpen();
+ }
+
+ @Override
+ public int getConnectionsOpenMax() {
+ return connector.getConnectionsOpenMax();
+ }
+
+ @Override
+ public int getConnectionsOpenMin() {
+ return connector.getConnectionsOpenMin();
+ }
+
+ @Override
+ public long getConnectionsDurationAve() {
+ return connector.getConnectionsDurationAve();
+ }
+
+ @Override
+ public long getConnectionsDurationMax() {
+ return connector.getConnectionsDurationMax();
+ }
+
+ @Override
+ public long getConnectionsDurationMin() {
+ return connector.getConnectionsDurationMin();
+ }
+
+ @Override
+ public long getConnectionsDurationTotal() {
+ return connector.getConnectionsDurationTotal();
+ }
+
+ @Override
+ public long getConnectionsRequestAve() {
+ return connector.getConnectionsRequestsAve();
+ }
+
+ @Override
+ public long getConnectionsRequestMax() {
+ return connector.getConnectionsRequestsMax();
+ }
+
+ @Override
+ public long getConnectionsRequestMin() {
+ return connector.getConnectionsRequestsMin();
+ }
+
+ @Override
+ public void turnStatsOn() {
+ connector.setStatsOn(true);
+ }
+
+ @Override
+ public void turnStatsOff() {
+ connector.setStatsOn(false);
+ }
+
+ @Override
+ public void resetStats() {
+ connector.statsReset();
+ }
+
+ @Override
+ public boolean isStatsOn() {
+ return connector.getStatsOn();
+ }
}
src/main/java/azkaban/jmx/JmxJettyServerMBean.java 152(+84 -68)
diff --git a/src/main/java/azkaban/jmx/JmxJettyServerMBean.java b/src/main/java/azkaban/jmx/JmxJettyServerMBean.java
index 7b1f046..7862c6c 100644
--- a/src/main/java/azkaban/jmx/JmxJettyServerMBean.java
+++ b/src/main/java/azkaban/jmx/JmxJettyServerMBean.java
@@ -1,72 +1,88 @@
+/*
+ * Copyright 2014 LinkedIn Corp.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
package azkaban.jmx;
public interface JmxJettyServerMBean {
- @DisplayName("OPERATION: isRunning")
- public boolean isRunning();
-
- @DisplayName("OPERATION: isFailed")
- public boolean isFailed();
-
- @DisplayName("OPERATION: isStopped")
- public boolean isStopped();
-
- @DisplayName("OPERATION: getNumThreads")
- public int getNumThreads();
-
- @DisplayName("OPERATION: getNumIdleThreads")
- public int getNumIdleThreads();
-
- @DisplayName("OPERATION: getHost")
- public String getHost();
-
- @DisplayName("OPERATION: getPort")
- public int getPort();
-
- @DisplayName("OPERATION: getConfidentialPort")
- public int getConfidentialPort();
-
- @DisplayName("OPERATION: getConnections")
- public int getConnections();
-
- @DisplayName("OPERATION: getConnectionsOpen")
- public int getConnectionsOpen();
-
- @DisplayName("OPERATION: getConnectionsOpenMax")
- public int getConnectionsOpenMax();
-
- @DisplayName("OPERATION: getConnectionsOpenMin")
- public int getConnectionsOpenMin();
-
- @DisplayName("OPERATION: getConnectionsDurationAve")
- public long getConnectionsDurationAve();
-
- @DisplayName("OPERATION: getConnectionsDurationMax")
- public long getConnectionsDurationMax();
-
- @DisplayName("OPERATION: getConnectionsDurationMin")
- public long getConnectionsDurationMin();
-
- @DisplayName("OPERATION: getConnectionsDurationTotal")
- public long getConnectionsDurationTotal();
-
- @DisplayName("OPERATION: getConnectionsRequestAve")
- public long getConnectionsRequestAve();
-
- @DisplayName("OPERATION: getConnectionsRequestMax")
- public long getConnectionsRequestMax();
-
- @DisplayName("OPERATION: getConnectionsRequestMin")
- public long getConnectionsRequestMin();
-
- @DisplayName("OPERATION: turnStatsOn")
- public void turnStatsOn();
-
- @DisplayName("OPERATION: turnStatsOff")
- public void turnStatsOff();
-
- @DisplayName("OPERATION: resetStats")
- public void resetStats();
-
- @DisplayName("OPERATION: isStatsOn")
- public boolean isStatsOn();
+ @DisplayName("OPERATION: isRunning")
+ public boolean isRunning();
+
+ @DisplayName("OPERATION: isFailed")
+ public boolean isFailed();
+
+ @DisplayName("OPERATION: isStopped")
+ public boolean isStopped();
+
+ @DisplayName("OPERATION: getNumThreads")
+ public int getNumThreads();
+
+ @DisplayName("OPERATION: getNumIdleThreads")
+ public int getNumIdleThreads();
+
+ @DisplayName("OPERATION: getHost")
+ public String getHost();
+
+ @DisplayName("OPERATION: getPort")
+ public int getPort();
+
+ @DisplayName("OPERATION: getConfidentialPort")
+ public int getConfidentialPort();
+
+ @DisplayName("OPERATION: getConnections")
+ public int getConnections();
+
+ @DisplayName("OPERATION: getConnectionsOpen")
+ public int getConnectionsOpen();
+
+ @DisplayName("OPERATION: getConnectionsOpenMax")
+ public int getConnectionsOpenMax();
+
+ @DisplayName("OPERATION: getConnectionsOpenMin")
+ public int getConnectionsOpenMin();
+
+ @DisplayName("OPERATION: getConnectionsDurationAve")
+ public long getConnectionsDurationAve();
+
+ @DisplayName("OPERATION: getConnectionsDurationMax")
+ public long getConnectionsDurationMax();
+
+ @DisplayName("OPERATION: getConnectionsDurationMin")
+ public long getConnectionsDurationMin();
+
+ @DisplayName("OPERATION: getConnectionsDurationTotal")
+ public long getConnectionsDurationTotal();
+
+ @DisplayName("OPERATION: getConnectionsRequestAve")
+ public long getConnectionsRequestAve();
+
+ @DisplayName("OPERATION: getConnectionsRequestMax")
+ public long getConnectionsRequestMax();
+
+ @DisplayName("OPERATION: getConnectionsRequestMin")
+ public long getConnectionsRequestMin();
+
+ @DisplayName("OPERATION: turnStatsOn")
+ public void turnStatsOn();
+
+ @DisplayName("OPERATION: turnStatsOff")
+ public void turnStatsOff();
+
+ @DisplayName("OPERATION: resetStats")
+ public void resetStats();
+
+ @DisplayName("OPERATION: isStatsOn")
+ public boolean isStatsOn();
}
src/main/java/azkaban/jmx/JmxTriggerManager.java 118(+67 -51)
diff --git a/src/main/java/azkaban/jmx/JmxTriggerManager.java b/src/main/java/azkaban/jmx/JmxTriggerManager.java
index 2c16aaa..713211c 100644
--- a/src/main/java/azkaban/jmx/JmxTriggerManager.java
+++ b/src/main/java/azkaban/jmx/JmxTriggerManager.java
@@ -1,58 +1,74 @@
+/*
+ * Copyright 2014 LinkedIn Corp.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
package azkaban.jmx;
import azkaban.trigger.TriggerManagerAdapter;
import azkaban.trigger.TriggerManagerAdapter.TriggerJMX;
public class JmxTriggerManager implements JmxTriggerManagerMBean {
- private TriggerJMX jmxStats;
-
- public JmxTriggerManager(TriggerManagerAdapter manager) {
- this.jmxStats = manager.getJMX();
- }
-
- @Override
- public long getLastRunnerThreadCheckTime() {
- return jmxStats.getLastRunnerThreadCheckTime();
- }
-
- @Override
- public boolean isRunnerThreadActive() {
- return jmxStats.isRunnerThreadActive();
- }
-
- @Override
- public String getPrimaryTriggerHostPort() {
- return jmxStats.getPrimaryServerHost();
- }
-
-// @Override
-// public List<String> getAllTriggerHostPorts() {
-// return new ArrayList<String>(manager.getAllActiveTriggerServerHosts());
-// }
-
- @Override
- public int getNumTriggers() {
- return jmxStats.getNumTriggers();
- }
-
- @Override
- public String getTriggerSources() {
- return jmxStats.getTriggerSources();
- }
-
- @Override
- public String getTriggerIds() {
- return jmxStats.getTriggerIds();
- }
-
- @Override
- public long getScannerIdleTime() {
- return jmxStats.getScannerIdleTime();
- }
-
- @Override
- public String getScannerThreadStage() {
- // TODO Auto-generated method stub
- return jmxStats.getScannerThreadStage();
- }
+ private TriggerJMX jmxStats;
+
+ public JmxTriggerManager(TriggerManagerAdapter manager) {
+ this.jmxStats = manager.getJMX();
+ }
+
+ @Override
+ public long getLastRunnerThreadCheckTime() {
+ return jmxStats.getLastRunnerThreadCheckTime();
+ }
+
+ @Override
+ public boolean isRunnerThreadActive() {
+ return jmxStats.isRunnerThreadActive();
+ }
+
+ @Override
+ public String getPrimaryTriggerHostPort() {
+ return jmxStats.getPrimaryServerHost();
+ }
+
+ // @Override
+ // public List<String> getAllTriggerHostPorts() {
+ // return new ArrayList<String>(manager.getAllActiveTriggerServerHosts());
+ // }
+
+ @Override
+ public int getNumTriggers() {
+ return jmxStats.getNumTriggers();
+ }
+
+ @Override
+ public String getTriggerSources() {
+ return jmxStats.getTriggerSources();
+ }
+
+ @Override
+ public String getTriggerIds() {
+ return jmxStats.getTriggerIds();
+ }
+
+ @Override
+ public long getScannerIdleTime() {
+ return jmxStats.getScannerIdleTime();
+ }
+
+ @Override
+ public String getScannerThreadStage() {
+ // TODO Auto-generated method stub
+ return jmxStats.getScannerThreadStage();
+ }
}
diff --git a/src/main/java/azkaban/jmx/JmxTriggerManagerMBean.java b/src/main/java/azkaban/jmx/JmxTriggerManagerMBean.java
index f7d3169..641af43 100644
--- a/src/main/java/azkaban/jmx/JmxTriggerManagerMBean.java
+++ b/src/main/java/azkaban/jmx/JmxTriggerManagerMBean.java
@@ -1,31 +1,47 @@
+/*
+ * Copyright 2014 LinkedIn Corp.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
package azkaban.jmx;
-public interface JmxTriggerManagerMBean {
-
- @DisplayName("OPERATION: getLastThreadCheckTime")
- public long getLastRunnerThreadCheckTime();
-
- @DisplayName("OPERATION: isThreadActive")
- public boolean isRunnerThreadActive();
-
- @DisplayName("OPERATION: getPrimaryTriggerHostPort")
- public String getPrimaryTriggerHostPort();
-
-// @DisplayName("OPERATION: getAllTriggerHostPorts")
-// public List<String> getAllTriggerHostPorts();
-
- @DisplayName("OPERATION: getNumTriggers")
- public int getNumTriggers();
-
- @DisplayName("OPERATION: getTriggerSources")
- public String getTriggerSources();
-
- @DisplayName("OPERATION: getTriggerIds")
- public String getTriggerIds();
-
- @DisplayName("OPERATION: getScannerIdleTime")
- public long getScannerIdleTime();
-
- @DisplayName("OPERATION: getScannerThreadStage")
- public String getScannerThreadStage();
+public interface JmxTriggerManagerMBean {
+
+ @DisplayName("OPERATION: getLastThreadCheckTime")
+ public long getLastRunnerThreadCheckTime();
+
+ @DisplayName("OPERATION: isThreadActive")
+ public boolean isRunnerThreadActive();
+
+ @DisplayName("OPERATION: getPrimaryTriggerHostPort")
+ public String getPrimaryTriggerHostPort();
+
+ // @DisplayName("OPERATION: getAllTriggerHostPorts")
+ // public List<String> getAllTriggerHostPorts();
+
+ @DisplayName("OPERATION: getNumTriggers")
+ public int getNumTriggers();
+
+ @DisplayName("OPERATION: getTriggerSources")
+ public String getTriggerSources();
+
+ @DisplayName("OPERATION: getTriggerIds")
+ public String getTriggerIds();
+
+ @DisplayName("OPERATION: getScannerIdleTime")
+ public long getScannerIdleTime();
+
+ @DisplayName("OPERATION: getScannerThreadStage")
+ public String getScannerThreadStage();
}
diff --git a/src/main/java/azkaban/jmx/JmxTriggerRunnerManagerMBean.java b/src/main/java/azkaban/jmx/JmxTriggerRunnerManagerMBean.java
index ca0f45b..2da91d1 100644
--- a/src/main/java/azkaban/jmx/JmxTriggerRunnerManagerMBean.java
+++ b/src/main/java/azkaban/jmx/JmxTriggerRunnerManagerMBean.java
@@ -1,23 +1,39 @@
+/*
+ * Copyright 2014 LinkedIn Corp.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
package azkaban.jmx;
public interface JmxTriggerRunnerManagerMBean {
- @DisplayName("OPERATION: getLastRunnerThreadCheckTime")
- public long getLastRunnerThreadCheckTime();
-
- @DisplayName("OPERATION: getNumTriggers")
- public int getNumTriggers();
-
- @DisplayName("OPERATION: isRunnerThreadActive")
- public boolean isRunnerThreadActive();
-
- @DisplayName("OPERATION: getTriggerSources")
- public String getTriggerSources();
-
- @DisplayName("OPERATION: getTriggerIds")
- public String getTriggerIds();
-
- @DisplayName("OPERATION: getScannerIdleTime")
- public long getScannerIdleTime();
-
+ @DisplayName("OPERATION: getLastRunnerThreadCheckTime")
+ public long getLastRunnerThreadCheckTime();
+
+ @DisplayName("OPERATION: getNumTriggers")
+ public int getNumTriggers();
+
+ @DisplayName("OPERATION: isRunnerThreadActive")
+ public boolean isRunnerThreadActive();
+
+ @DisplayName("OPERATION: getTriggerSources")
+ public String getTriggerSources();
+
+ @DisplayName("OPERATION: getTriggerIds")
+ public String getTriggerIds();
+
+ @DisplayName("OPERATION: getScannerIdleTime")
+ public long getScannerIdleTime();
+
}
diff --git a/src/main/java/azkaban/jmx/ParameterName.java b/src/main/java/azkaban/jmx/ParameterName.java
index 3ee0b97..8a4a077 100644
--- a/src/main/java/azkaban/jmx/ParameterName.java
+++ b/src/main/java/azkaban/jmx/ParameterName.java
@@ -1,12 +1,12 @@
/*
* Copyright 2011 Adconion, Inc.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -31,6 +31,6 @@ import javax.management.DescriptorKey;
@Target(ElementType.PARAMETER)
@Retention(RetentionPolicy.RUNTIME)
public @interface ParameterName {
- @DescriptorKey("parameterName")
- String value();
+ @DescriptorKey("parameterName")
+ String value();
}
src/main/java/azkaban/jobExecutor/AbstractJob.java 160(+80 -80)
diff --git a/src/main/java/azkaban/jobExecutor/AbstractJob.java b/src/main/java/azkaban/jobExecutor/AbstractJob.java
index 75a6042..917c82b 100644
--- a/src/main/java/azkaban/jobExecutor/AbstractJob.java
+++ b/src/main/java/azkaban/jobExecutor/AbstractJob.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -22,82 +22,82 @@ import azkaban.utils.Props;
public abstract class AbstractJob implements Job {
- public static final String JOB_TYPE = "type";
- public static final String JOB_CLASS = "job.class";
- public static final String JOB_PATH = "job.path";
- public static final String JOB_FULLPATH = "job.fullpath";
- public static final String JOB_ID = "job.id";
-
- private final String _id;
- private final Logger _log;
- private volatile double _progress;
-
- protected AbstractJob(String id, Logger log) {
- _id = id;
- _log = log;
- _progress = 0.0;
- }
-
- public String getId() {
- return _id;
- }
-
- public double getProgress() throws Exception {
- return _progress;
- }
-
- public void setProgress(double progress) {
- this._progress = progress;
- }
-
- public void cancel() throws Exception {
- throw new RuntimeException("Job " + _id + " does not support cancellation!");
- }
-
- public Logger getLog() {
- return this._log;
- }
-
- public void debug(String message) {
- this._log.debug(message);
- }
-
- public void debug(String message, Throwable t) {
- this._log.debug(message, t);
- }
-
- public void info(String message) {
- this._log.info(message);
- }
-
- public void info(String message, Throwable t) {
- this._log.info(message, t);
- }
-
- public void warn(String message) {
- this._log.warn(message);
- }
-
- public void warn(String message, Throwable t) {
- this._log.warn(message, t);
- }
-
- public void error(String message) {
- this._log.error(message);
- }
-
- public void error(String message, Throwable t) {
- this._log.error(message, t);
- }
-
- public Props getJobGeneratedProperties() {
- return new Props();
- }
-
- public abstract void run() throws Exception;
-
- public boolean isCanceled() {
- return false;
- }
+ public static final String JOB_TYPE = "type";
+ public static final String JOB_CLASS = "job.class";
+ public static final String JOB_PATH = "job.path";
+ public static final String JOB_FULLPATH = "job.fullpath";
+ public static final String JOB_ID = "job.id";
+
+ private final String _id;
+ private final Logger _log;
+ private volatile double _progress;
+
+ protected AbstractJob(String id, Logger log) {
+ _id = id;
+ _log = log;
+ _progress = 0.0;
+ }
+
+ public String getId() {
+ return _id;
+ }
+
+ public double getProgress() throws Exception {
+ return _progress;
+ }
+
+ public void setProgress(double progress) {
+ this._progress = progress;
+ }
+
+ public void cancel() throws Exception {
+ throw new RuntimeException("Job " + _id + " does not support cancellation!");
+ }
+
+ public Logger getLog() {
+ return this._log;
+ }
+
+ public void debug(String message) {
+ this._log.debug(message);
+ }
+
+ public void debug(String message, Throwable t) {
+ this._log.debug(message, t);
+ }
+
+ public void info(String message) {
+ this._log.info(message);
+ }
+
+ public void info(String message, Throwable t) {
+ this._log.info(message, t);
+ }
+
+ public void warn(String message) {
+ this._log.warn(message);
+ }
+
+ public void warn(String message, Throwable t) {
+ this._log.warn(message, t);
+ }
+
+ public void error(String message) {
+ this._log.error(message);
+ }
+
+ public void error(String message, Throwable t) {
+ this._log.error(message, t);
+ }
+
+ public Props getJobGeneratedProperties() {
+ return new Props();
+ }
+
+ public abstract void run() throws Exception;
+
+ public boolean isCanceled() {
+ return false;
+ }
}
src/main/java/azkaban/jobExecutor/AbstractProcessJob.java 319(+162 -157)
diff --git a/src/main/java/azkaban/jobExecutor/AbstractProcessJob.java b/src/main/java/azkaban/jobExecutor/AbstractProcessJob.java
index 25f825b..400a25c 100644
--- a/src/main/java/azkaban/jobExecutor/AbstractProcessJob.java
+++ b/src/main/java/azkaban/jobExecutor/AbstractProcessJob.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -34,161 +34,166 @@ import azkaban.utils.PropsUtils;
/*
* A revised process-based job
- *
+ *
*/
public abstract class AbstractProcessJob extends AbstractJob {
- private final Logger log;
- public static final String ENV_PREFIX = "env.";
- public static final String ENV_PREFIX_UCASE = "ENV.";
- public static final String WORKING_DIR = "working.dir";
- public static final String JOB_PROP_ENV = "JOB_PROP_FILE";
- public static final String JOB_NAME_ENV = "JOB_NAME";
- public static final String JOB_OUTPUT_PROP_FILE = "JOB_OUTPUT_PROP_FILE";
-
- protected final String _jobPath;
-
- protected volatile Props jobProps;
- protected volatile Props sysProps;
-
- protected String _cwd;
-
- private volatile Props generatedProperties;
-
- protected AbstractProcessJob(String jobid, final Props sysProps, final Props jobProps, final Logger log) {
- super(jobid, log);
-
- this.jobProps = jobProps;
- this.sysProps = sysProps;
- _cwd = getWorkingDirectory();
- _jobPath = _cwd;
-
- this.log = log;
- }
-
- public Props getJobProps() {
- return jobProps;
- }
-
- public Props getSysProps() {
- return sysProps;
- }
-
- public String getJobPath() {
- return _jobPath;
- }
-
- protected void resolveProps() {
- jobProps = PropsUtils.resolveProps(jobProps);
- }
-
- @Override
- public Props getJobGeneratedProperties() {
- return generatedProperties;
- }
-
- /**
- * initialize temporary and final property file
- *
- * @return {tmpPropFile, outputPropFile}
- */
- public File[] initPropsFiles() {
- // Create properties file with additionally all input generated properties.
- File[] files = new File[2];
- files[0] = createFlattenedPropsFile(_cwd);
-
- jobProps.put(ENV_PREFIX + JOB_PROP_ENV, files[0].getAbsolutePath());
- jobProps.put(ENV_PREFIX + JOB_NAME_ENV, getId());
-
- files[1] = createOutputPropsFile(getId(), _cwd);
- jobProps.put(ENV_PREFIX + JOB_OUTPUT_PROP_FILE, files[1].getAbsolutePath());
-
- return files;
- }
-
- public String getCwd() {
- return _cwd;
- }
-
- public Map<String, String> getEnvironmentVariables() {
- Props props = getJobProps();
- Map<String, String> envMap = props.getMapByPrefix(ENV_PREFIX);
- envMap.putAll(props.getMapByPrefix(ENV_PREFIX_UCASE));
- return envMap;
- }
-
- public String getWorkingDirectory() {
- String workingDir = getJobProps().getString(WORKING_DIR, _jobPath);
- if (workingDir == null) {
- return "";
- }
-
- return workingDir;
- }
-
- public Props loadOutputFileProps(final File outputPropertiesFile) {
- InputStream reader = null;
- try {
- System.err.println("output properties file=" + outputPropertiesFile.getAbsolutePath());
- reader = new BufferedInputStream(new FileInputStream(outputPropertiesFile));
-
- Props outputProps = new Props();
- final String content = Streams.asString(reader).trim();
-
- if (!content.isEmpty()) {
- @SuppressWarnings("unchecked")
- Map<String, Object> propMap = (Map<String, Object>)JSONUtils.parseJSONFromString(content);
-
- for (Map.Entry<String, Object> entry : propMap.entrySet()) {
- outputProps.put(entry.getKey(), entry.getValue().toString());
- }
- }
- return outputProps;
- } catch (FileNotFoundException e) {
- log.info(String.format(
- "File[%s] wasn't found, returning empty props.",
- outputPropertiesFile));
- return new Props();
- } catch (Exception e) {
- log.error(
- "Exception thrown when trying to load output file props. Returning empty Props instead of failing. Is this really the best thing to do?",
- e);
- return new Props();
- } finally {
- IOUtils.closeQuietly(reader);
- }
- }
-
- public File createFlattenedPropsFile(final String workingDir) {
- File directory = new File(workingDir);
- File tempFile = null;
- try {
- // The temp file prefix must be at least 3 characters.
- tempFile = File.createTempFile(getId() + "_props_", "_tmp", directory);
- jobProps.storeFlattened(tempFile);
- } catch (IOException e) {
- throw new RuntimeException("Failed to create temp property file ", e);
- }
-
- return tempFile;
- }
-
- public static File createOutputPropsFile(final String id, final String workingDir) {
- System.err.println("cwd=" + workingDir);
-
- File directory = new File(workingDir);
- File tempFile = null;
- try {
- tempFile = File.createTempFile(id + "_output_", "_tmp", directory);
- } catch (IOException e) {
- System.err.println("Failed to create temp output property file :\n");
- e.printStackTrace(System.err);
- throw new RuntimeException("Failed to create temp output property file ", e);
- }
- return tempFile;
- }
-
- public void generateProperties(final File outputFile) {
- generatedProperties = loadOutputFileProps(outputFile);
- }
+ private final Logger log;
+ public static final String ENV_PREFIX = "env.";
+ public static final String ENV_PREFIX_UCASE = "ENV.";
+ public static final String WORKING_DIR = "working.dir";
+ public static final String JOB_PROP_ENV = "JOB_PROP_FILE";
+ public static final String JOB_NAME_ENV = "JOB_NAME";
+ public static final String JOB_OUTPUT_PROP_FILE = "JOB_OUTPUT_PROP_FILE";
+
+ protected final String _jobPath;
+
+ protected volatile Props jobProps;
+ protected volatile Props sysProps;
+
+ protected String _cwd;
+
+ private volatile Props generatedProperties;
+
+ protected AbstractProcessJob(String jobid, final Props sysProps,
+ final Props jobProps, final Logger log) {
+ super(jobid, log);
+
+ this.jobProps = jobProps;
+ this.sysProps = sysProps;
+ _cwd = getWorkingDirectory();
+ _jobPath = _cwd;
+
+ this.log = log;
+ }
+
+ public Props getJobProps() {
+ return jobProps;
+ }
+
+ public Props getSysProps() {
+ return sysProps;
+ }
+
+ public String getJobPath() {
+ return _jobPath;
+ }
+
+ protected void resolveProps() {
+ jobProps = PropsUtils.resolveProps(jobProps);
+ }
+
+ @Override
+ public Props getJobGeneratedProperties() {
+ return generatedProperties;
+ }
+
+ /**
+ * initialize temporary and final property file
+ *
+ * @return {tmpPropFile, outputPropFile}
+ */
+ public File[] initPropsFiles() {
+ // Create properties file with additionally all input generated properties.
+ File[] files = new File[2];
+ files[0] = createFlattenedPropsFile(_cwd);
+
+ jobProps.put(ENV_PREFIX + JOB_PROP_ENV, files[0].getAbsolutePath());
+ jobProps.put(ENV_PREFIX + JOB_NAME_ENV, getId());
+
+ files[1] = createOutputPropsFile(getId(), _cwd);
+ jobProps.put(ENV_PREFIX + JOB_OUTPUT_PROP_FILE, files[1].getAbsolutePath());
+
+ return files;
+ }
+
+ public String getCwd() {
+ return _cwd;
+ }
+
+ public Map<String, String> getEnvironmentVariables() {
+ Props props = getJobProps();
+ Map<String, String> envMap = props.getMapByPrefix(ENV_PREFIX);
+ envMap.putAll(props.getMapByPrefix(ENV_PREFIX_UCASE));
+ return envMap;
+ }
+
+ public String getWorkingDirectory() {
+ String workingDir = getJobProps().getString(WORKING_DIR, _jobPath);
+ if (workingDir == null) {
+ return "";
+ }
+
+ return workingDir;
+ }
+
+ public Props loadOutputFileProps(final File outputPropertiesFile) {
+ InputStream reader = null;
+ try {
+ System.err.println("output properties file="
+ + outputPropertiesFile.getAbsolutePath());
+ reader =
+ new BufferedInputStream(new FileInputStream(outputPropertiesFile));
+
+ Props outputProps = new Props();
+ final String content = Streams.asString(reader).trim();
+
+ if (!content.isEmpty()) {
+ @SuppressWarnings("unchecked")
+ Map<String, Object> propMap =
+ (Map<String, Object>) JSONUtils.parseJSONFromString(content);
+
+ for (Map.Entry<String, Object> entry : propMap.entrySet()) {
+ outputProps.put(entry.getKey(), entry.getValue().toString());
+ }
+ }
+ return outputProps;
+ } catch (FileNotFoundException e) {
+ log.info(String.format("File[%s] wasn't found, returning empty props.",
+ outputPropertiesFile));
+ return new Props();
+ } catch (Exception e) {
+ log.error(
+ "Exception thrown when trying to load output file props. Returning empty Props instead of failing. Is this really the best thing to do?",
+ e);
+ return new Props();
+ } finally {
+ IOUtils.closeQuietly(reader);
+ }
+ }
+
+ public File createFlattenedPropsFile(final String workingDir) {
+ File directory = new File(workingDir);
+ File tempFile = null;
+ try {
+ // The temp file prefix must be at least 3 characters.
+ tempFile = File.createTempFile(getId() + "_props_", "_tmp", directory);
+ jobProps.storeFlattened(tempFile);
+ } catch (IOException e) {
+ throw new RuntimeException("Failed to create temp property file ", e);
+ }
+
+ return tempFile;
+ }
+
+ public static File createOutputPropsFile(final String id,
+ final String workingDir) {
+ System.err.println("cwd=" + workingDir);
+
+ File directory = new File(workingDir);
+ File tempFile = null;
+ try {
+ tempFile = File.createTempFile(id + "_output_", "_tmp", directory);
+ } catch (IOException e) {
+ System.err.println("Failed to create temp output property file :\n");
+ e.printStackTrace(System.err);
+ throw new RuntimeException("Failed to create temp output property file ",
+ e);
+ }
+ return tempFile;
+ }
+
+ public void generateProperties(final File outputFile) {
+ generatedProperties = loadOutputFileProps(outputFile);
+ }
}
src/main/java/azkaban/jobExecutor/JavaProcessJob.java 247(+125 -122)
diff --git a/src/main/java/azkaban/jobExecutor/JavaProcessJob.java b/src/main/java/azkaban/jobExecutor/JavaProcessJob.java
index c6be913..645401e 100644
--- a/src/main/java/azkaban/jobExecutor/JavaProcessJob.java
+++ b/src/main/java/azkaban/jobExecutor/JavaProcessJob.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -25,123 +25,126 @@ import org.apache.log4j.Logger;
import azkaban.utils.Props;
public class JavaProcessJob extends ProcessJob {
- public static final String CLASSPATH = "classpath";
- public static final String GLOBAL_CLASSPATH = "global.classpaths";
- public static final String JAVA_CLASS = "java.class";
- public static final String INITIAL_MEMORY_SIZE = "Xms";
- public static final String MAX_MEMORY_SIZE = "Xmx";
- public static final String MAIN_ARGS = "main.args";
- public static final String JVM_PARAMS = "jvm.args";
- public static final String GLOBAL_JVM_PARAMS = "global.jvm.args";
-
- public static final String DEFAULT_INITIAL_MEMORY_SIZE = "64M";
- public static final String DEFAULT_MAX_MEMORY_SIZE = "256M";
-
- public static String JAVA_COMMAND = "java";
-
- public JavaProcessJob(String jobid, Props sysProps, Props jobProps, Logger logger) {
- super(jobid, sysProps, jobProps, logger);
- }
-
- @Override
- protected List<String> getCommandList() {
- ArrayList<String> list = new ArrayList<String>();
- list.add(createCommandLine());
- return list;
- }
-
- protected String createCommandLine() {
- String command = JAVA_COMMAND + " ";
- command += getJVMArguments() + " ";
- command += "-Xms" + getInitialMemorySize() + " ";
- command += "-Xmx" + getMaxMemorySize() + " ";
- command += "-cp " + createArguments(getClassPaths(), ":") + " ";
- command += getJavaClass() + " ";
- command += getMainArguments();
-
- return command;
- }
-
- protected String getJavaClass() {
- return getJobProps().getString(JAVA_CLASS);
- }
-
- protected String getClassPathParam() {
- List<String> classPath = getClassPaths();
- if (classPath == null || classPath.size() == 0) {
- return "";
- }
-
- return "-cp " + createArguments(classPath, ":") + " ";
- }
-
- protected List<String> getClassPaths() {
-
- List<String> classPaths = getJobProps().getStringList(CLASSPATH, null, ",");
-
- ArrayList<String> classpathList = new ArrayList<String>();
- // Adding global properties used system wide.
- if (getJobProps().containsKey(GLOBAL_CLASSPATH)) {
- List<String> globalClasspath = getJobProps().getStringList(GLOBAL_CLASSPATH);
- for (String global: globalClasspath) {
- getLog().info("Adding to global classpath:" + global);
- classpathList.add(global);
- }
- }
-
- if (classPaths == null) {
- File path = new File(getPath());
- //File parent = path.getParentFile();
- getLog().info("No classpath specified. Trying to load classes from " + path);
-
- if (path != null) {
- for (File file : path.listFiles()) {
- if (file.getName().endsWith(".jar")) {
- // log.info("Adding to classpath:" + file.getName());
- classpathList.add(file.getName());
- }
- }
- }
- }
- else {
- classpathList.addAll(classPaths);
- }
-
- return classpathList;
- }
-
- protected String getInitialMemorySize() {
- return getJobProps().getString(INITIAL_MEMORY_SIZE, DEFAULT_INITIAL_MEMORY_SIZE);
- }
-
- protected String getMaxMemorySize() {
- return getJobProps().getString(MAX_MEMORY_SIZE, DEFAULT_MAX_MEMORY_SIZE);
- }
-
- protected String getMainArguments() {
- return getJobProps().getString(MAIN_ARGS, "");
- }
-
- protected String getJVMArguments() {
- String globalJVMArgs = getJobProps().getString(GLOBAL_JVM_PARAMS, null);
-
- if (globalJVMArgs == null) {
- return getJobProps().getString(JVM_PARAMS, "");
- }
-
- return globalJVMArgs + " " + getJobProps().getString(JVM_PARAMS, "");
- }
-
- protected String createArguments(List<String> arguments, String separator) {
- if (arguments != null && arguments.size() > 0) {
- String param = "";
- for (String arg : arguments) {
- param += arg + separator;
- }
-
- return param.substring(0, param.length() - 1);
- }
-
- return "";
- }
+ public static final String CLASSPATH = "classpath";
+ public static final String GLOBAL_CLASSPATH = "global.classpaths";
+ public static final String JAVA_CLASS = "java.class";
+ public static final String INITIAL_MEMORY_SIZE = "Xms";
+ public static final String MAX_MEMORY_SIZE = "Xmx";
+ public static final String MAIN_ARGS = "main.args";
+ public static final String JVM_PARAMS = "jvm.args";
+ public static final String GLOBAL_JVM_PARAMS = "global.jvm.args";
+
+ public static final String DEFAULT_INITIAL_MEMORY_SIZE = "64M";
+ public static final String DEFAULT_MAX_MEMORY_SIZE = "256M";
+
+ public static String JAVA_COMMAND = "java";
+
+ public JavaProcessJob(String jobid, Props sysProps, Props jobProps,
+ Logger logger) {
+ super(jobid, sysProps, jobProps, logger);
+ }
+
+ @Override
+ protected List<String> getCommandList() {
+ ArrayList<String> list = new ArrayList<String>();
+ list.add(createCommandLine());
+ return list;
+ }
+
+ protected String createCommandLine() {
+ String command = JAVA_COMMAND + " ";
+ command += getJVMArguments() + " ";
+ command += "-Xms" + getInitialMemorySize() + " ";
+ command += "-Xmx" + getMaxMemorySize() + " ";
+ command += "-cp " + createArguments(getClassPaths(), ":") + " ";
+ command += getJavaClass() + " ";
+ command += getMainArguments();
+
+ return command;
+ }
+
+ protected String getJavaClass() {
+ return getJobProps().getString(JAVA_CLASS);
+ }
+
+ protected String getClassPathParam() {
+ List<String> classPath = getClassPaths();
+ if (classPath == null || classPath.size() == 0) {
+ return "";
+ }
+
+ return "-cp " + createArguments(classPath, ":") + " ";
+ }
+
+ protected List<String> getClassPaths() {
+
+ List<String> classPaths = getJobProps().getStringList(CLASSPATH, null, ",");
+
+ ArrayList<String> classpathList = new ArrayList<String>();
+ // Adding global properties used system wide.
+ if (getJobProps().containsKey(GLOBAL_CLASSPATH)) {
+ List<String> globalClasspath =
+ getJobProps().getStringList(GLOBAL_CLASSPATH);
+ for (String global : globalClasspath) {
+ getLog().info("Adding to global classpath:" + global);
+ classpathList.add(global);
+ }
+ }
+
+ if (classPaths == null) {
+ File path = new File(getPath());
+ // File parent = path.getParentFile();
+ getLog().info(
+ "No classpath specified. Trying to load classes from " + path);
+
+ if (path != null) {
+ for (File file : path.listFiles()) {
+ if (file.getName().endsWith(".jar")) {
+ // log.info("Adding to classpath:" + file.getName());
+ classpathList.add(file.getName());
+ }
+ }
+ }
+ } else {
+ classpathList.addAll(classPaths);
+ }
+
+ return classpathList;
+ }
+
+ protected String getInitialMemorySize() {
+ return getJobProps().getString(INITIAL_MEMORY_SIZE,
+ DEFAULT_INITIAL_MEMORY_SIZE);
+ }
+
+ protected String getMaxMemorySize() {
+ return getJobProps().getString(MAX_MEMORY_SIZE, DEFAULT_MAX_MEMORY_SIZE);
+ }
+
+ protected String getMainArguments() {
+ return getJobProps().getString(MAIN_ARGS, "");
+ }
+
+ protected String getJVMArguments() {
+ String globalJVMArgs = getJobProps().getString(GLOBAL_JVM_PARAMS, null);
+
+ if (globalJVMArgs == null) {
+ return getJobProps().getString(JVM_PARAMS, "");
+ }
+
+ return globalJVMArgs + " " + getJobProps().getString(JVM_PARAMS, "");
+ }
+
+ protected String createArguments(List<String> arguments, String separator) {
+ if (arguments != null && arguments.size() > 0) {
+ String param = "";
+ for (String arg : arguments) {
+ param += arg + separator;
+ }
+
+ return param.substring(0, param.length() - 1);
+ }
+
+ return "";
+ }
}
src/main/java/azkaban/jobExecutor/Job.java 80(+39 -41)
diff --git a/src/main/java/azkaban/jobExecutor/Job.java b/src/main/java/azkaban/jobExecutor/Job.java
index 8f51d2e..1bb4c80 100644
--- a/src/main/java/azkaban/jobExecutor/Job.java
+++ b/src/main/java/azkaban/jobExecutor/Job.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -31,47 +31,45 @@ import azkaban.utils.Props;
public interface Job {
- /**
- * Returns a unique(should be checked in xml) string name/id for the Job.
- *
- * @return
- */
- public String getId();
+ /**
+ * Returns a unique(should be checked in xml) string name/id for the Job.
+ *
+ * @return
+ */
+ public String getId();
- /**
- * Run the job. In general this method can only be run once. Must either
- * succeed or throw an exception.
- */
- public void run() throws Exception;
+ /**
+ * Run the job. In general this method can only be run once. Must either
+ * succeed or throw an exception.
+ */
+ public void run() throws Exception;
- /**
- * Best effort attempt to cancel the job.
- *
- * @throws Exception
- * If cancel fails
- */
- public void cancel() throws Exception;
+ /**
+ * Best effort attempt to cancel the job.
+ *
+ * @throws Exception If cancel fails
+ */
+ public void cancel() throws Exception;
- /**
- * Returns a progress report between [0 - 1.0] to indicate the percentage
- * complete
- *
- * @throws Exception
- * If getting progress fails
- */
- public double getProgress() throws Exception;
+ /**
+ * Returns a progress report between [0 - 1.0] to indicate the percentage
+ * complete
+ *
+ * @throws Exception If getting progress fails
+ */
+ public double getProgress() throws Exception;
- /**
- * Get the generated properties from this job.
- *
- * @return
- */
- public Props getJobGeneratedProperties();
+ /**
+ * Get the generated properties from this job.
+ *
+ * @return
+ */
+ public Props getJobGeneratedProperties();
- /**
- * Determine if the job was cancelled.
- *
- * @return
- */
- public boolean isCanceled();
+ /**
+ * Determine if the job was cancelled.
+ *
+ * @return
+ */
+ public boolean isCanceled();
}
src/main/java/azkaban/jobExecutor/LongArgJob.java 212(+107 -105)
diff --git a/src/main/java/azkaban/jobExecutor/LongArgJob.java b/src/main/java/azkaban/jobExecutor/LongArgJob.java
index adeea74..a5aa0b6 100644
--- a/src/main/java/azkaban/jobExecutor/LongArgJob.java
+++ b/src/main/java/azkaban/jobExecutor/LongArgJob.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -33,106 +33,108 @@ import azkaban.jobExecutor.utils.process.AzkabanProcessBuilder;
*/
public abstract class LongArgJob extends AbstractProcessJob {
- private static final long KILL_TIME_MS = 5000;
- private final AzkabanProcessBuilder builder;
- private volatile AzkabanProcess process;
-
- public LongArgJob(String jobid, String[] command, Props sysProps, Props jobProps, Logger log) {
- this(jobid, command, sysProps, jobProps, log, new HashSet<String>(0));
- }
-
- public LongArgJob(String jobid, String[] command, Props sysProps, Props jobProp, Logger log, Set<String> suppressedKeys) {
- // super(command, desc);
- super(jobid, sysProps, jobProp, log);
- // String cwd = descriptor.getProps().getString(WORKING_DIR, new
- // File(descriptor.getFullPath()).getParent());
-
- this.builder = new AzkabanProcessBuilder(command)
- .setEnv(getJobProps()
- .getMapByPrefix(ENV_PREFIX))
- .setWorkingDir(getCwd())
- .setLogger(getLog());
- appendProps(suppressedKeys);
- }
-
- public void run() throws Exception {
- try {
- resolveProps();
- } catch (Exception e) {
- error("Bad property definition! " + e.getMessage());
- }
-
- long startMs = System.currentTimeMillis();
- info("Command: " + builder.getCommandString());
- if (builder.getEnv().size() > 0) {
- info("Environment variables: " + builder.getEnv());
- }
- info("Working directory: " + builder.getWorkingDir());
-
- File[] propFiles = initPropsFiles();
- // System.err.println("outputfile=" + propFiles[1]);
-
- boolean success = false;
- this.process = builder.build();
- try {
- this.process.run();
- success = true;
- } catch (Exception e) {
- for (File file : propFiles) {
- if (file != null && file.exists()) {
- file.delete();
- }
- }
- throw new RuntimeException(e);
- } finally {
- this.process = null;
- info("Process completed " + (success ? "successfully" : "unsuccessfully") + " in "
- + ((System.currentTimeMillis() - startMs) / 1000) + " seconds.");
- }
-
- // Get the output properties from this job.
- generateProperties(propFiles[1]);
-
- for (File file : propFiles) {
- if (file != null && file.exists()) {
- file.delete();
- }
- }
- }
-
- /**
- * This gives access to the process builder used to construct the process.
- * An overriding class can use this to add to the command being executed.
- */
- protected AzkabanProcessBuilder getBuilder() {
- return this.builder;
- }
-
- @Override
- public void cancel() throws InterruptedException {
- if (process == null) {
- throw new IllegalStateException("Not started.");
- }
-
- boolean killed = process.softKill(KILL_TIME_MS, TimeUnit.MILLISECONDS);
- if (!killed) {
- warn("Kill with signal TERM failed. Killing with KILL signal.");
- process.hardKill();
- }
- }
-
- @Override
- public double getProgress() {
- return process != null && process.isComplete() ? 1.0 : 0.0;
- }
-
- private void appendProps(Set<String> suppressed) {
- AzkabanProcessBuilder builder = this.getBuilder();
- Props props = getJobProps();
- for (String key : props.getKeySet()) {
- if (!suppressed.contains(key)) {
- builder.addArg("--" + key, props.get(key));
- }
- }
- }
+ private static final long KILL_TIME_MS = 5000;
+ private final AzkabanProcessBuilder builder;
+ private volatile AzkabanProcess process;
+
+ public LongArgJob(String jobid, String[] command, Props sysProps,
+ Props jobProps, Logger log) {
+ this(jobid, command, sysProps, jobProps, log, new HashSet<String>(0));
+ }
+
+ public LongArgJob(String jobid, String[] command, Props sysProps,
+ Props jobProp, Logger log, Set<String> suppressedKeys) {
+ // super(command, desc);
+ super(jobid, sysProps, jobProp, log);
+ // String cwd = descriptor.getProps().getString(WORKING_DIR, new
+ // File(descriptor.getFullPath()).getParent());
+
+ this.builder =
+ new AzkabanProcessBuilder(command)
+ .setEnv(getJobProps().getMapByPrefix(ENV_PREFIX))
+ .setWorkingDir(getCwd()).setLogger(getLog());
+ appendProps(suppressedKeys);
+ }
+
+ public void run() throws Exception {
+ try {
+ resolveProps();
+ } catch (Exception e) {
+ error("Bad property definition! " + e.getMessage());
+ }
+
+ long startMs = System.currentTimeMillis();
+ info("Command: " + builder.getCommandString());
+ if (builder.getEnv().size() > 0) {
+ info("Environment variables: " + builder.getEnv());
+ }
+ info("Working directory: " + builder.getWorkingDir());
+
+ File[] propFiles = initPropsFiles();
+ // System.err.println("outputfile=" + propFiles[1]);
+
+ boolean success = false;
+ this.process = builder.build();
+ try {
+ this.process.run();
+ success = true;
+ } catch (Exception e) {
+ for (File file : propFiles) {
+ if (file != null && file.exists()) {
+ file.delete();
+ }
+ }
+ throw new RuntimeException(e);
+ } finally {
+ this.process = null;
+ info("Process completed " + (success ? "successfully" : "unsuccessfully")
+ + " in " + ((System.currentTimeMillis() - startMs) / 1000)
+ + " seconds.");
+ }
+
+ // Get the output properties from this job.
+ generateProperties(propFiles[1]);
+
+ for (File file : propFiles) {
+ if (file != null && file.exists()) {
+ file.delete();
+ }
+ }
+ }
+
+ /**
+ * This gives access to the process builder used to construct the process. An
+ * overriding class can use this to add to the command being executed.
+ */
+ protected AzkabanProcessBuilder getBuilder() {
+ return this.builder;
+ }
+
+ @Override
+ public void cancel() throws InterruptedException {
+ if (process == null) {
+ throw new IllegalStateException("Not started.");
+ }
+
+ boolean killed = process.softKill(KILL_TIME_MS, TimeUnit.MILLISECONDS);
+ if (!killed) {
+ warn("Kill with signal TERM failed. Killing with KILL signal.");
+ process.hardKill();
+ }
+ }
+
+ @Override
+ public double getProgress() {
+ return process != null && process.isComplete() ? 1.0 : 0.0;
+ }
+
+ private void appendProps(Set<String> suppressed) {
+ AzkabanProcessBuilder builder = this.getBuilder();
+ Props props = getJobProps();
+ for (String key : props.getKeySet()) {
+ if (!suppressed.contains(key)) {
+ builder.addArg("--" + key, props.get(key));
+ }
+ }
+ }
}
src/main/java/azkaban/jobExecutor/NoopJob.java 72(+36 -36)
diff --git a/src/main/java/azkaban/jobExecutor/NoopJob.java b/src/main/java/azkaban/jobExecutor/NoopJob.java
index c16844d..f20c554 100644
--- a/src/main/java/azkaban/jobExecutor/NoopJob.java
+++ b/src/main/java/azkaban/jobExecutor/NoopJob.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -24,37 +24,37 @@ import azkaban.utils.Props;
*
*/
public class NoopJob implements Job {
- private String jobId;
-
- public NoopJob(String jobid, Props props, Props jobProps, Logger log) {
- this.jobId = jobid;
- }
-
- @Override
- public String getId() {
- return this.jobId;
- }
-
- @Override
- public void run() throws Exception {
- }
-
- @Override
- public void cancel() throws Exception {
- }
-
- @Override
- public double getProgress() throws Exception {
- return 0;
- }
-
- @Override
- public Props getJobGeneratedProperties() {
- return new Props();
- }
-
- @Override
- public boolean isCanceled() {
- return false;
- }
+ private String jobId;
+
+ public NoopJob(String jobid, Props props, Props jobProps, Logger log) {
+ this.jobId = jobid;
+ }
+
+ @Override
+ public String getId() {
+ return this.jobId;
+ }
+
+ @Override
+ public void run() throws Exception {
+ }
+
+ @Override
+ public void cancel() throws Exception {
+ }
+
+ @Override
+ public double getProgress() throws Exception {
+ return 0;
+ }
+
+ @Override
+ public Props getJobGeneratedProperties() {
+ return new Props();
+ }
+
+ @Override
+ public boolean isCanceled() {
+ return false;
+ }
}
src/main/java/azkaban/jobExecutor/ProcessJob.java 350(+174 -176)
diff --git a/src/main/java/azkaban/jobExecutor/ProcessJob.java b/src/main/java/azkaban/jobExecutor/ProcessJob.java
index fc9891c..f386c40 100644
--- a/src/main/java/azkaban/jobExecutor/ProcessJob.java
+++ b/src/main/java/azkaban/jobExecutor/ProcessJob.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -30,180 +30,178 @@ import azkaban.utils.Props;
/*
* A job that runs a simple unix command
- *
+ *
*/
public class ProcessJob extends AbstractProcessJob {
- public static final String COMMAND = "command";
- private static final long KILL_TIME_MS = 5000;
- private volatile AzkabanProcess process;
-
- public ProcessJob(final String jobId, final Props sysProps, final Props jobProps, final Logger log) {
- super(jobId, sysProps, jobProps, log);
- }
-
- @Override
- public void run() throws Exception {
- try {
- resolveProps();
- }
- catch (Exception e) {
- handleError("Bad property definition! " + e.getMessage(), e);
- }
-
- List<String> commands = null;
- try {
- commands = getCommandList();
- }
- catch (Exception e) {
- handleError("Job set up failed " + e.getCause(), e);
- }
-
- long startMs = System.currentTimeMillis();
-
- if (commands == null) {
- handleError("There are no commands to execute", null);
- }
-
- info(commands.size() + " commands to execute.");
- File[] propFiles = initPropsFiles();
- Map<String, String> envVars = getEnvironmentVariables();
-
- for (String command : commands) {
- info("Command: " + command);
- AzkabanProcessBuilder builder = new AzkabanProcessBuilder(partitionCommandLine(command))
- .setEnv(envVars)
- .setWorkingDir(getCwd())
- .setLogger(getLog());
-
- if (builder.getEnv().size() > 0) {
- info("Environment variables: " + builder.getEnv());
- }
- info("Working directory: " + builder.getWorkingDir());
-
- boolean success = false;
- this.process = builder.build();
-
- try {
- this.process.run();
- success = true;
- } catch (Throwable e) {
- for (File file : propFiles)
- if (file != null && file.exists())
- file.delete();
- throw new RuntimeException(e);
- } finally {
- this.process = null;
- info("Process completed " + (success ? "successfully" : "unsuccessfully") + " in "
- + ((System.currentTimeMillis() - startMs) / 1000) + " seconds.");
- }
- }
-
- // Get the output properties from this job.
- generateProperties(propFiles[1]);
- }
-
- protected void handleError(String errorMsg, Exception e) throws Exception {
- error(errorMsg);
- if (e != null) {
- throw new Exception(errorMsg, e);
- }
- else {
- throw new Exception(errorMsg);
- }
- }
-
- protected List<String> getCommandList() {
- List<String> commands = new ArrayList<String>();
- commands.add(jobProps.getString(COMMAND));
- for (int i = 1; jobProps.containsKey(COMMAND + "." + i); i++) {
- commands.add(jobProps.getString(COMMAND + "." + i));
- }
-
- return commands;
- }
-
- @Override
- public void cancel() throws InterruptedException {
- if(process == null)
- throw new IllegalStateException("Not started.");
- boolean killed = process.softKill(KILL_TIME_MS, TimeUnit.MILLISECONDS);
- if(!killed) {
- warn("Kill with signal TERM failed. Killing with KILL signal.");
- process.hardKill();
- }
- }
-
- @Override
- public double getProgress() {
- return process != null && process.isComplete()? 1.0 : 0.0;
- }
-
- public int getProcessId() {
- return process.getProcessId();
- }
-
- public String getPath() {
- return _jobPath == null ? "" : _jobPath;
- }
-
- /**
- * Splits the command into a unix like command line structure. Quotes and
- * single quotes are treated as nested strings.
- *
- * @param command
- * @return
- */
- public static String[] partitionCommandLine(final String command) {
- ArrayList<String> commands = new ArrayList<String>();
-
- int index = 0;
-
- StringBuffer buffer = new StringBuffer(command.length());
-
- boolean isApos = false;
- boolean isQuote = false;
- while (index < command.length()) {
- char c = command.charAt(index);
-
- switch (c) {
- case ' ':
- if (!isQuote && !isApos) {
- String arg = buffer.toString();
- buffer = new StringBuffer(command.length() - index);
- if (arg.length() > 0) {
- commands.add(arg);
- }
- } else {
- buffer.append(c);
- }
- break;
- case '\'':
- if (!isQuote) {
- isApos = !isApos;
- } else {
- buffer.append(c);
- }
- break;
- case '"':
- if (!isApos) {
- isQuote = !isQuote;
- } else {
- buffer.append(c);
- }
- break;
- default:
- buffer.append(c);
- }
-
- index++;
- }
-
- if (buffer.length() > 0) {
- String arg = buffer.toString();
- commands.add(arg);
- }
-
- return commands.toArray(new String[commands.size()]);
- }
+ public static final String COMMAND = "command";
+ private static final long KILL_TIME_MS = 5000;
+ private volatile AzkabanProcess process;
+
+ public ProcessJob(final String jobId, final Props sysProps,
+ final Props jobProps, final Logger log) {
+ super(jobId, sysProps, jobProps, log);
+ }
+
+ @Override
+ public void run() throws Exception {
+ try {
+ resolveProps();
+ } catch (Exception e) {
+ handleError("Bad property definition! " + e.getMessage(), e);
+ }
+
+ List<String> commands = null;
+ try {
+ commands = getCommandList();
+ } catch (Exception e) {
+ handleError("Job set up failed " + e.getCause(), e);
+ }
+
+ long startMs = System.currentTimeMillis();
+
+ if (commands == null) {
+ handleError("There are no commands to execute", null);
+ }
+
+ info(commands.size() + " commands to execute.");
+ File[] propFiles = initPropsFiles();
+ Map<String, String> envVars = getEnvironmentVariables();
+
+ for (String command : commands) {
+ info("Command: " + command);
+ AzkabanProcessBuilder builder =
+ new AzkabanProcessBuilder(partitionCommandLine(command))
+ .setEnv(envVars).setWorkingDir(getCwd()).setLogger(getLog());
+
+ if (builder.getEnv().size() > 0) {
+ info("Environment variables: " + builder.getEnv());
+ }
+ info("Working directory: " + builder.getWorkingDir());
+
+ boolean success = false;
+ this.process = builder.build();
+
+ try {
+ this.process.run();
+ success = true;
+ } catch (Throwable e) {
+ for (File file : propFiles)
+ if (file != null && file.exists())
+ file.delete();
+ throw new RuntimeException(e);
+ } finally {
+ this.process = null;
+ info("Process completed "
+ + (success ? "successfully" : "unsuccessfully") + " in "
+ + ((System.currentTimeMillis() - startMs) / 1000) + " seconds.");
+ }
+ }
+
+ // Get the output properties from this job.
+ generateProperties(propFiles[1]);
+ }
+
+ protected void handleError(String errorMsg, Exception e) throws Exception {
+ error(errorMsg);
+ if (e != null) {
+ throw new Exception(errorMsg, e);
+ } else {
+ throw new Exception(errorMsg);
+ }
+ }
+
+ protected List<String> getCommandList() {
+ List<String> commands = new ArrayList<String>();
+ commands.add(jobProps.getString(COMMAND));
+ for (int i = 1; jobProps.containsKey(COMMAND + "." + i); i++) {
+ commands.add(jobProps.getString(COMMAND + "." + i));
+ }
+
+ return commands;
+ }
+
+ @Override
+ public void cancel() throws InterruptedException {
+ if (process == null)
+ throw new IllegalStateException("Not started.");
+ boolean killed = process.softKill(KILL_TIME_MS, TimeUnit.MILLISECONDS);
+ if (!killed) {
+ warn("Kill with signal TERM failed. Killing with KILL signal.");
+ process.hardKill();
+ }
+ }
+
+ @Override
+ public double getProgress() {
+ return process != null && process.isComplete() ? 1.0 : 0.0;
+ }
+
+ public int getProcessId() {
+ return process.getProcessId();
+ }
+
+ public String getPath() {
+ return _jobPath == null ? "" : _jobPath;
+ }
+
+ /**
+ * Splits the command into a unix like command line structure. Quotes and
+ * single quotes are treated as nested strings.
+ *
+ * @param command
+ * @return
+ */
+ public static String[] partitionCommandLine(final String command) {
+ ArrayList<String> commands = new ArrayList<String>();
+
+ int index = 0;
+
+ StringBuffer buffer = new StringBuffer(command.length());
+
+ boolean isApos = false;
+ boolean isQuote = false;
+ while (index < command.length()) {
+ char c = command.charAt(index);
+
+ switch (c) {
+ case ' ':
+ if (!isQuote && !isApos) {
+ String arg = buffer.toString();
+ buffer = new StringBuffer(command.length() - index);
+ if (arg.length() > 0) {
+ commands.add(arg);
+ }
+ } else {
+ buffer.append(c);
+ }
+ break;
+ case '\'':
+ if (!isQuote) {
+ isApos = !isApos;
+ } else {
+ buffer.append(c);
+ }
+ break;
+ case '"':
+ if (!isApos) {
+ isQuote = !isQuote;
+ } else {
+ buffer.append(c);
+ }
+ break;
+ default:
+ buffer.append(c);
+ }
+
+ index++;
+ }
+
+ if (buffer.length() > 0) {
+ String arg = buffer.toString();
+ commands.add(arg);
+ }
+
+ return commands.toArray(new String[commands.size()]);
+ }
}
src/main/java/azkaban/jobExecutor/PythonJob.java 29(+15 -14)
diff --git a/src/main/java/azkaban/jobExecutor/PythonJob.java b/src/main/java/azkaban/jobExecutor/PythonJob.java
index 60e03b3..acacc42 100644
--- a/src/main/java/azkaban/jobExecutor/PythonJob.java
+++ b/src/main/java/azkaban/jobExecutor/PythonJob.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -24,16 +24,17 @@ import azkaban.utils.Props;
public class PythonJob extends LongArgJob {
- private static final String PYTHON_BINARY_KEY = "python";
- private static final String SCRIPT_KEY = "script";
-
- public PythonJob(String jobid, Props sysProps, Props jobProps, Logger log) {
- super(jobid,
- new String[] { jobProps.getString(PYTHON_BINARY_KEY, "python"),jobProps.getString(SCRIPT_KEY) },
- sysProps,
- jobProps,
- log,
- ImmutableSet.of(PYTHON_BINARY_KEY, SCRIPT_KEY, JOB_TYPE));
- }
+ private static final String PYTHON_BINARY_KEY = "python";
+ private static final String SCRIPT_KEY = "script";
+
+ public PythonJob(String jobid, Props sysProps, Props jobProps, Logger log) {
+ super(jobid,
+ new String[] {
+ jobProps.getString(PYTHON_BINARY_KEY, "python"),
+ jobProps.getString(SCRIPT_KEY)
+ },
+ sysProps, jobProps, log,
+ ImmutableSet.of(PYTHON_BINARY_KEY, SCRIPT_KEY, JOB_TYPE));
+ }
}
src/main/java/azkaban/jobExecutor/RubyJob.java 29(+15 -14)
diff --git a/src/main/java/azkaban/jobExecutor/RubyJob.java b/src/main/java/azkaban/jobExecutor/RubyJob.java
index 3635d9e..8fe14fa 100644
--- a/src/main/java/azkaban/jobExecutor/RubyJob.java
+++ b/src/main/java/azkaban/jobExecutor/RubyJob.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -24,16 +24,17 @@ import com.google.common.collect.ImmutableSet;
public class RubyJob extends LongArgJob {
- private static final String RUBY_BINARY_KEY = "ruby";
- private static final String SCRIPT_KEY = "script";
-
- public RubyJob(String jobid, Props sysProps, Props jobProps, Logger log) {
- super(jobid,
- new String[] { jobProps.getString(RUBY_BINARY_KEY, "ruby"), jobProps.getString(SCRIPT_KEY) },
- sysProps,
- jobProps,
- log,
- ImmutableSet.of(RUBY_BINARY_KEY, SCRIPT_KEY, JOB_TYPE));
- }
+ private static final String RUBY_BINARY_KEY = "ruby";
+ private static final String SCRIPT_KEY = "script";
+
+ public RubyJob(String jobid, Props sysProps, Props jobProps, Logger log) {
+ super(jobid,
+ new String[] {
+ jobProps.getString(RUBY_BINARY_KEY, "ruby"),
+ jobProps.getString(SCRIPT_KEY)
+ },
+ sysProps, jobProps, log,
+ ImmutableSet.of(RUBY_BINARY_KEY, SCRIPT_KEY, JOB_TYPE));
+ }
}
src/main/java/azkaban/jobExecutor/ScriptJob.java 33(+18 -15)
diff --git a/src/main/java/azkaban/jobExecutor/ScriptJob.java b/src/main/java/azkaban/jobExecutor/ScriptJob.java
index 4ab712b..d591d35 100644
--- a/src/main/java/azkaban/jobExecutor/ScriptJob.java
+++ b/src/main/java/azkaban/jobExecutor/ScriptJob.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -26,20 +26,23 @@ import azkaban.utils.Props;
* A script job issues a command of the form [EXECUTABLE] [SCRIPT] --key1 val1
* ... --key2 val2 executable -- the interpretor command to execute script --
* the script to pass in (requried)
- *
+ *
*/
public class ScriptJob extends LongArgJob {
- private static final String DEFAULT_EXECUTABLE_KEY = "executable";
- private static final String SCRIPT_KEY = "script";
-
- public ScriptJob(String jobid, Props sysProps, Props jobProps, Logger log) {
- super(jobid,
- new String[] { jobProps.getString(DEFAULT_EXECUTABLE_KEY), jobProps.getString(SCRIPT_KEY) },
- sysProps,
- jobProps,
- log,
- ImmutableSet.of(DEFAULT_EXECUTABLE_KEY, SCRIPT_KEY, JOB_TYPE));
- }
+ private static final String DEFAULT_EXECUTABLE_KEY = "executable";
+ private static final String SCRIPT_KEY = "script";
+
+ public ScriptJob(String jobid, Props sysProps, Props jobProps, Logger log) {
+ super(jobid,
+ new String[] {
+ jobProps.getString(DEFAULT_EXECUTABLE_KEY),
+ jobProps.getString(SCRIPT_KEY)
+ },
+ sysProps,
+ jobProps,
+ log,
+ ImmutableSet.of(DEFAULT_EXECUTABLE_KEY, SCRIPT_KEY, JOB_TYPE));
+ }
}
diff --git a/src/main/java/azkaban/jobExecutor/utils/InitErrorJob.java b/src/main/java/azkaban/jobExecutor/utils/InitErrorJob.java
index 43c8194..b860f80 100644
--- a/src/main/java/azkaban/jobExecutor/utils/InitErrorJob.java
+++ b/src/main/java/azkaban/jobExecutor/utils/InitErrorJob.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -24,21 +24,19 @@ import azkaban.jobExecutor.AbstractJob;
* this job is used to throw out exception caught in initialization stage
*
* @author lguo
- *
+ *
*/
-public class InitErrorJob extends AbstractJob
-{
+public class InitErrorJob extends AbstractJob {
private Exception exception;
-
- public InitErrorJob (String id, Exception e) {
- super(id, Logger.getLogger(AbstractJob.class));
- exception = e;
+
+ public InitErrorJob(String id, Exception e) {
+ super(id, Logger.getLogger(AbstractJob.class));
+ exception = e;
}
-
+
@Override
- public void run() throws Exception
- {
+ public void run() throws Exception {
throw exception;
}
diff --git a/src/main/java/azkaban/jobExecutor/utils/JobExecutionException.java b/src/main/java/azkaban/jobExecutor/utils/JobExecutionException.java
index 8dbb1b0..45cf665 100644
--- a/src/main/java/azkaban/jobExecutor/utils/JobExecutionException.java
+++ b/src/main/java/azkaban/jobExecutor/utils/JobExecutionException.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -18,18 +18,18 @@ package azkaban.jobExecutor.utils;
public class JobExecutionException extends RuntimeException {
- private final static long serialVersionUID = 1;
+ private final static long serialVersionUID = 1;
- public JobExecutionException(String message) {
- super(message);
- }
+ public JobExecutionException(String message) {
+ super(message);
+ }
- public JobExecutionException(Throwable cause) {
- super(cause);
- }
+ public JobExecutionException(Throwable cause) {
+ super(cause);
+ }
- public JobExecutionException(String message, Throwable cause) {
- super(message, cause);
- }
+ public JobExecutionException(String message, Throwable cause) {
+ super(message, cause);
+ }
}
diff --git a/src/main/java/azkaban/jobExecutor/utils/process/AzkabanProcess.java b/src/main/java/azkaban/jobExecutor/utils/process/AzkabanProcess.java
index 01576d4..78666f7 100644
--- a/src/main/java/azkaban/jobExecutor/utils/process/AzkabanProcess.java
+++ b/src/main/java/azkaban/jobExecutor/utils/process/AzkabanProcess.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -36,200 +36,202 @@ import com.google.common.base.Joiner;
/**
* A less shitty version of java.lang.Process.
*
- * Output is read by seperate threads to avoid deadlock and logged to log4j
+ * Output is read by separate threads to avoid deadlock and logged to log4j
* loggers.
*
*/
public class AzkabanProcess {
- private final String workingDir;
- private final List<String> cmd;
- private final Map<String, String> env;
- private final Logger logger;
- private final CountDownLatch startupLatch;
- private final CountDownLatch completeLatch;
-
- private volatile int processId;
- private volatile Process process;
-
- public AzkabanProcess(final List<String> cmd, final Map<String, String> env, final String workingDir, final Logger logger) {
- this.cmd = cmd;
- this.env = env;
- this.workingDir = workingDir;
- this.processId = -1;
- this.startupLatch = new CountDownLatch(1);
- this.completeLatch = new CountDownLatch(1);
- this.logger = logger;
- }
-
- /**
- * Execute this process, blocking until it has completed.
- */
- public void run() throws IOException {
- if (this.isStarted() || this.isComplete()) {
- throw new IllegalStateException("The process can only be used once.");
- }
-
- ProcessBuilder builder = new ProcessBuilder(cmd);
- builder.directory(new File(workingDir));
- builder.environment().putAll(env);
- this.process = builder.start();
- try {
- this.processId = processId(process);
- if (processId == 0) {
- logger.debug("Spawned thread with unknown process id");
- } else {
- logger.debug("Spawned thread with process id " + processId);
- }
-
- this.startupLatch.countDown();
-
- LogGobbler outputGobbler = new LogGobbler(new InputStreamReader(process.getInputStream()), logger, Level.INFO, 30);
- LogGobbler errorGobbler = new LogGobbler(new InputStreamReader(process.getErrorStream()), logger, Level.ERROR, 30);
-
- outputGobbler.start();
- errorGobbler.start();
- int exitCode = -1;
- try {
- exitCode = process.waitFor();
- } catch (InterruptedException e) {
- logger.info("Process interrupted. Exit code is " + exitCode, e);
- }
-
- completeLatch.countDown();
- if (exitCode != 0) {
- throw new ProcessFailureException(exitCode, errorGobbler.getRecentLog());
- }
-
- // try to wait for everything to get logged out before exiting
- outputGobbler.awaitCompletion(5000);
- errorGobbler.awaitCompletion(5000);
- } finally {
- IOUtils.closeQuietly(process.getInputStream());
- IOUtils.closeQuietly(process.getOutputStream());
- IOUtils.closeQuietly(process.getErrorStream());
- }
- }
-
- /**
- * Await the completion of this process
- *
- * @throws InterruptedException
- * if the thread is interrupted while waiting.
- */
- public void awaitCompletion() throws InterruptedException {
- this.completeLatch.await();
- }
-
- /**
- * Await the start of this process
- *
- * @throws InterruptedException
- * if the thread is interrupted while waiting.
- */
- public void awaitStartup() throws InterruptedException {
- this.startupLatch.await();
- }
-
- /**
- * Get the process id for this process, if it has started.
- *
- * @return The process id or -1 if it cannot be fetched
- */
- public int getProcessId() {
- checkStarted();
- return this.processId;
- }
-
- /**
- * Attempt to kill the process, waiting up to the given time for it to die
- *
- * @param time
- * The amount of time to wait
- * @param unit
- * The time unit
- * @return true iff this soft kill kills the process in the given wait time.
- */
- public boolean softKill(final long time, final TimeUnit unit) throws InterruptedException {
- checkStarted();
- if (processId != 0 && isStarted()) {
- try {
- Runtime.getRuntime().exec("kill " + processId);
- return completeLatch.await(time, unit);
- } catch (IOException e) {
- logger.error("Kill attempt failed.", e);
- }
- return false;
- }
- return false;
- }
-
- /**
- * Force kill this process
- */
- public void hardKill() {
- checkStarted();
- if (isRunning()) {
- if (processId != 0 ) {
- try {
- Runtime.getRuntime().exec("kill -9 " + processId);
- } catch (IOException e) {
- logger.error("Kill attempt failed.", e);
- }
- }
- process.destroy();
- }
- }
-
- /**
- * Attempt to get the process id for this process
- *
- * @param process
- * The process to get the id from
- * @return The id of the process
- */
- private int processId(final java.lang.Process process) {
- int processId = 0;
- try {
- Field f = process.getClass().getDeclaredField("pid");
- f.setAccessible(true);
-
- processId = f.getInt(process);
- } catch (Throwable e) {
- e.printStackTrace();
- }
-
- return processId;
- }
-
- /**
- * @return true iff the process has been started
- */
- public boolean isStarted() {
- return startupLatch.getCount() == 0L;
- }
-
- /**
- * @return true iff the process has completed
- */
- public boolean isComplete() {
- return completeLatch.getCount() == 0L;
- }
-
- /**
- * @return true iff the process is currently running
- */
- public boolean isRunning() {
- return isStarted() && !isComplete();
- }
-
- public void checkStarted() {
- if (!isStarted()) {
- throw new IllegalStateException("Process has not yet started.");
- }
- }
-
- @Override
- public String toString() {
- return "Process(cmd = " + Joiner.on(" ").join(cmd) + ", env = " + env + ", cwd = " + workingDir + ")";
- }
+ private final String workingDir;
+ private final List<String> cmd;
+ private final Map<String, String> env;
+ private final Logger logger;
+ private final CountDownLatch startupLatch;
+ private final CountDownLatch completeLatch;
+
+ private volatile int processId;
+ private volatile Process process;
+
+ public AzkabanProcess(final List<String> cmd, final Map<String, String> env,
+ final String workingDir, final Logger logger) {
+ this.cmd = cmd;
+ this.env = env;
+ this.workingDir = workingDir;
+ this.processId = -1;
+ this.startupLatch = new CountDownLatch(1);
+ this.completeLatch = new CountDownLatch(1);
+ this.logger = logger;
+ }
+
+ /**
+ * Execute this process, blocking until it has completed.
+ */
+ public void run() throws IOException {
+ if (this.isStarted() || this.isComplete()) {
+ throw new IllegalStateException("The process can only be used once.");
+ }
+
+ ProcessBuilder builder = new ProcessBuilder(cmd);
+ builder.directory(new File(workingDir));
+ builder.environment().putAll(env);
+ this.process = builder.start();
+ try {
+ this.processId = processId(process);
+ if (processId == 0) {
+ logger.debug("Spawned thread with unknown process id");
+ } else {
+ logger.debug("Spawned thread with process id " + processId);
+ }
+
+ this.startupLatch.countDown();
+
+ LogGobbler outputGobbler =
+ new LogGobbler(new InputStreamReader(process.getInputStream()),
+ logger, Level.INFO, 30);
+ LogGobbler errorGobbler =
+ new LogGobbler(new InputStreamReader(process.getErrorStream()),
+ logger, Level.ERROR, 30);
+
+ outputGobbler.start();
+ errorGobbler.start();
+ int exitCode = -1;
+ try {
+ exitCode = process.waitFor();
+ } catch (InterruptedException e) {
+ logger.info("Process interrupted. Exit code is " + exitCode, e);
+ }
+
+ completeLatch.countDown();
+ if (exitCode != 0) {
+ throw new ProcessFailureException(exitCode, errorGobbler.getRecentLog());
+ }
+
+ // try to wait for everything to get logged out before exiting
+ outputGobbler.awaitCompletion(5000);
+ errorGobbler.awaitCompletion(5000);
+ } finally {
+ IOUtils.closeQuietly(process.getInputStream());
+ IOUtils.closeQuietly(process.getOutputStream());
+ IOUtils.closeQuietly(process.getErrorStream());
+ }
+ }
+
+ /**
+ * Await the completion of this process
+ *
+ * @throws InterruptedException if the thread is interrupted while waiting.
+ */
+ public void awaitCompletion() throws InterruptedException {
+ this.completeLatch.await();
+ }
+
+ /**
+ * Await the start of this process
+ *
+ * @throws InterruptedException if the thread is interrupted while waiting.
+ */
+ public void awaitStartup() throws InterruptedException {
+ this.startupLatch.await();
+ }
+
+ /**
+ * Get the process id for this process, if it has started.
+ *
+ * @return The process id or -1 if it cannot be fetched
+ */
+ public int getProcessId() {
+ checkStarted();
+ return this.processId;
+ }
+
+ /**
+ * Attempt to kill the process, waiting up to the given time for it to die
+ *
+ * @param time The amount of time to wait
+ * @param unit The time unit
+ * @return true iff this soft kill kills the process in the given wait time.
+ */
+ public boolean softKill(final long time, final TimeUnit unit)
+ throws InterruptedException {
+ checkStarted();
+ if (processId != 0 && isStarted()) {
+ try {
+ Runtime.getRuntime().exec("kill " + processId);
+ return completeLatch.await(time, unit);
+ } catch (IOException e) {
+ logger.error("Kill attempt failed.", e);
+ }
+ return false;
+ }
+ return false;
+ }
+
+ /**
+ * Force kill this process
+ */
+ public void hardKill() {
+ checkStarted();
+ if (isRunning()) {
+ if (processId != 0) {
+ try {
+ Runtime.getRuntime().exec("kill -9 " + processId);
+ } catch (IOException e) {
+ logger.error("Kill attempt failed.", e);
+ }
+ }
+ process.destroy();
+ }
+ }
+
+ /**
+ * Attempt to get the process id for this process
+ *
+ * @param process The process to get the id from
+ * @return The id of the process
+ */
+ private int processId(final java.lang.Process process) {
+ int processId = 0;
+ try {
+ Field f = process.getClass().getDeclaredField("pid");
+ f.setAccessible(true);
+
+ processId = f.getInt(process);
+ } catch (Throwable e) {
+ e.printStackTrace();
+ }
+
+ return processId;
+ }
+
+ /**
+ * @return true iff the process has been started
+ */
+ public boolean isStarted() {
+ return startupLatch.getCount() == 0L;
+ }
+
+ /**
+ * @return true iff the process has completed
+ */
+ public boolean isComplete() {
+ return completeLatch.getCount() == 0L;
+ }
+
+ /**
+ * @return true iff the process is currently running
+ */
+ public boolean isRunning() {
+ return isStarted() && !isComplete();
+ }
+
+ public void checkStarted() {
+ if (!isStarted()) {
+ throw new IllegalStateException("Process has not yet started.");
+ }
+ }
+
+ @Override
+ public String toString() {
+ return "Process(cmd = " + Joiner.on(" ").join(cmd) + ", env = " + env
+ + ", cwd = " + workingDir + ")";
+ }
}
diff --git a/src/main/java/azkaban/jobExecutor/utils/process/AzkabanProcessBuilder.java b/src/main/java/azkaban/jobExecutor/utils/process/AzkabanProcessBuilder.java
index b2e0213..8832195 100644
--- a/src/main/java/azkaban/jobExecutor/utils/process/AzkabanProcessBuilder.java
+++ b/src/main/java/azkaban/jobExecutor/utils/process/AzkabanProcessBuilder.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -31,88 +31,89 @@ import com.google.common.base.Joiner;
*/
public class AzkabanProcessBuilder {
- private List<String> cmd = new ArrayList<String>();
- private Map<String, String> env = new HashMap<String, String>();
- private String workingDir = System.getProperty("user.dir");
- private Logger logger = Logger.getLogger(AzkabanProcess.class);
-
- private int stdErrSnippetSize = 30;
- private int stdOutSnippetSize = 30;
-
- public AzkabanProcessBuilder(String... command) {
- addArg(command);
- }
-
- public AzkabanProcessBuilder addArg(String... command) {
- for (String c : command)
- cmd.add(c);
- return this;
- }
-
- public AzkabanProcessBuilder setWorkingDir(String dir) {
- this.workingDir = dir;
- return this;
- }
-
- public AzkabanProcessBuilder setWorkingDir(File f) {
- return setWorkingDir(f.getAbsolutePath());
- }
-
- public String getWorkingDir() {
- return this.workingDir;
- }
-
- public AzkabanProcessBuilder addEnv(String variable, String value) {
- env.put(variable, value);
- return this;
- }
-
- public AzkabanProcessBuilder setEnv(Map<String, String> m) {
- this.env = m;
- return this;
- }
-
- public Map<String, String> getEnv() {
- return this.env;
- }
-
- public AzkabanProcessBuilder setStdErrorSnippetSize(int size) {
- this.stdErrSnippetSize = size;
- return this;
- }
-
- public AzkabanProcessBuilder setStdOutSnippetSize(int size) {
- this.stdOutSnippetSize = size;
- return this;
- }
-
- public int getStdErrorSnippetSize() {
- return this.stdErrSnippetSize;
- }
-
- public int getStdOutSnippetSize() {
- return this.stdOutSnippetSize;
- }
-
- public AzkabanProcessBuilder setLogger(Logger logger) {
- this.logger = logger;
- return this;
- }
-
- public AzkabanProcess build() {
- return new AzkabanProcess(cmd, env, workingDir, logger);
- }
-
- public List<String> getCommand() {
- return this.cmd;
- }
-
- public String getCommandString() {
- return Joiner.on(" ").join(getCommand());
- }
-
- @Override
- public String toString() {
- return "ProcessBuilder(cmd = " + Joiner.on(" ").join(cmd) + ", env = " + env + ", cwd = " + workingDir + ")";
- }
+ private List<String> cmd = new ArrayList<String>();
+ private Map<String, String> env = new HashMap<String, String>();
+ private String workingDir = System.getProperty("user.dir");
+ private Logger logger = Logger.getLogger(AzkabanProcess.class);
+
+ private int stdErrSnippetSize = 30;
+ private int stdOutSnippetSize = 30;
+
+ public AzkabanProcessBuilder(String... command) {
+ addArg(command);
+ }
+
+ public AzkabanProcessBuilder addArg(String... command) {
+ for (String c : command)
+ cmd.add(c);
+ return this;
+ }
+
+ public AzkabanProcessBuilder setWorkingDir(String dir) {
+ this.workingDir = dir;
+ return this;
+ }
+
+ public AzkabanProcessBuilder setWorkingDir(File f) {
+ return setWorkingDir(f.getAbsolutePath());
+ }
+
+ public String getWorkingDir() {
+ return this.workingDir;
+ }
+
+ public AzkabanProcessBuilder addEnv(String variable, String value) {
+ env.put(variable, value);
+ return this;
+ }
+
+ public AzkabanProcessBuilder setEnv(Map<String, String> m) {
+ this.env = m;
+ return this;
+ }
+
+ public Map<String, String> getEnv() {
+ return this.env;
+ }
+
+ public AzkabanProcessBuilder setStdErrorSnippetSize(int size) {
+ this.stdErrSnippetSize = size;
+ return this;
+ }
+
+ public AzkabanProcessBuilder setStdOutSnippetSize(int size) {
+ this.stdOutSnippetSize = size;
+ return this;
+ }
+
+ public int getStdErrorSnippetSize() {
+ return this.stdErrSnippetSize;
+ }
+
+ public int getStdOutSnippetSize() {
+ return this.stdOutSnippetSize;
+ }
+
+ public AzkabanProcessBuilder setLogger(Logger logger) {
+ this.logger = logger;
+ return this;
+ }
+
+ public AzkabanProcess build() {
+ return new AzkabanProcess(cmd, env, workingDir, logger);
+ }
+
+ public List<String> getCommand() {
+ return this.cmd;
+ }
+
+ public String getCommandString() {
+ return Joiner.on(" ").join(getCommand());
+ }
+
+ @Override
+ public String toString() {
+ return "ProcessBuilder(cmd = " + Joiner.on(" ").join(cmd) + ", env = "
+ + env + ", cwd = " + workingDir + ")";
+ }
}
diff --git a/src/main/java/azkaban/jobExecutor/utils/process/ProcessFailureException.java b/src/main/java/azkaban/jobExecutor/utils/process/ProcessFailureException.java
index 01c4656..9d031e4 100644
--- a/src/main/java/azkaban/jobExecutor/utils/process/ProcessFailureException.java
+++ b/src/main/java/azkaban/jobExecutor/utils/process/ProcessFailureException.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -18,22 +18,22 @@ package azkaban.jobExecutor.utils.process;
public class ProcessFailureException extends RuntimeException {
- private static final long serialVersionUID = 1;
-
- private final int exitCode;
- private final String logSnippet;
-
- public ProcessFailureException(int exitCode, String logSnippet) {
- this.exitCode = exitCode;
- this.logSnippet = logSnippet;
- }
-
- public int getExitCode() {
- return exitCode;
- }
-
- public String getLogSnippet() {
- return this.logSnippet;
- }
-
+ private static final long serialVersionUID = 1;
+
+ private final int exitCode;
+ private final String logSnippet;
+
+ public ProcessFailureException(int exitCode, String logSnippet) {
+ this.exitCode = exitCode;
+ this.logSnippet = logSnippet;
+ }
+
+ public int getExitCode() {
+ return exitCode;
+ }
+
+ public String getLogSnippet() {
+ return this.logSnippet;
+ }
+
}
src/main/java/azkaban/jobtype/JobTypeManager.java 677(+348 -329)
diff --git a/src/main/java/azkaban/jobtype/JobTypeManager.java b/src/main/java/azkaban/jobtype/JobTypeManager.java
index 58344c4..e5b5bdb 100644
--- a/src/main/java/azkaban/jobtype/JobTypeManager.java
+++ b/src/main/java/azkaban/jobtype/JobTypeManager.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -37,330 +37,349 @@ import java.util.List;
import org.apache.log4j.Logger;
-public class JobTypeManager
-{
- private final String jobTypePluginDir; // the dir for jobtype plugins
- private final ClassLoader parentLoader;
-
- public static final String DEFAULT_JOBTYPEPLUGINDIR = "plugins/jobtypes";
- private static final String JOBTYPECONFFILE = "plugin.properties"; // need jars.to.include property, will be loaded with user property
- private static final String JOBTYPESYSCONFFILE = "private.properties"; // not exposed to users
- private static final String COMMONCONFFILE = "common.properties"; // common properties for multiple plugins
- private static final String COMMONSYSCONFFILE = "commonprivate.properties"; // common private properties for multiple plugins
- private static final Logger logger = Logger.getLogger(JobTypeManager.class);
-
- private JobTypePluginSet pluginSet;
- private Props globalProperties;
-
- public JobTypeManager(String jobtypePluginDir, Props globalProperties, ClassLoader parentClassLoader) {
- this.jobTypePluginDir = jobtypePluginDir;
- this.parentLoader = parentClassLoader;
- this.globalProperties = globalProperties;
-
- loadPlugins();
- }
-
- public void loadPlugins() throws JobTypeManagerException {
- JobTypePluginSet plugins = new JobTypePluginSet();
-
- loadDefaultTypes(plugins);
- if (jobTypePluginDir != null) {
- File pluginDir = new File(jobTypePluginDir);
- if (pluginDir.exists()) {
- logger.info("Job type plugin directory set. Loading extra job types from " + pluginDir);
- try {
- loadPluginJobTypes(plugins);
- }
- catch (Exception e) {
- logger.info("Plugin jobtypes failed to load. " + e.getCause());
- throw new JobTypeManagerException(e);
- }
- }
- }
-
- // Swap the plugin set. If exception is thrown, then plugin isn't swapped.
- synchronized (this) {
- pluginSet = plugins;
- }
- }
-
- private void loadDefaultTypes(JobTypePluginSet plugins) throws JobTypeManagerException {
- logger.info("Loading plugin default job types");
- plugins.addPluginClass("command", ProcessJob.class);
- plugins.addPluginClass("javaprocess", JavaProcessJob.class);
- plugins.addPluginClass("noop", NoopJob.class);
- plugins.addPluginClass("python", PythonJob.class);
- plugins.addPluginClass("ruby", RubyJob.class);
- plugins.addPluginClass("script", ScriptJob.class);
- }
-
- // load Job Types from jobtype plugin dir
- private void loadPluginJobTypes(JobTypePluginSet plugins) throws JobTypeManagerException {
- File jobPluginsDir = new File(jobTypePluginDir);
-
- if (!jobPluginsDir.exists()) {
- logger.error("Job type plugin dir " + jobTypePluginDir + " doesn't exist. Will not load any external plugins.");
- return;
- }
- else if (!jobPluginsDir.isDirectory()) {
- throw new JobTypeManagerException("Job type plugin dir " + jobTypePluginDir + " is not a directory!");
- }
- else if (!jobPluginsDir.canRead()) {
- throw new JobTypeManagerException("Job type plugin dir " + jobTypePluginDir + " is not readable!");
- }
-
- // Load the common properties used by all jobs that are run
- Props commonPluginJobProps = null;
- File commonJobPropsFile = new File(jobPluginsDir, COMMONCONFFILE);
- if (commonJobPropsFile.exists()) {
- logger.info("Common plugin job props file " + commonJobPropsFile + " found. Attempt to load.");
- try {
- commonPluginJobProps = new Props(globalProperties, commonJobPropsFile);
- }
- catch (IOException e) {
- throw new JobTypeManagerException("Failed to load common plugin job properties" + e.getCause());
- }
- }
- else {
- logger.info("Common plugin job props file " + commonJobPropsFile + " not found. Using empty props.");
- commonPluginJobProps = new Props();
- }
-
- // Loads the common properties used by all plugins when loading
- Props commonPluginLoadProps = null;
- File commonLoadPropsFile = new File(jobPluginsDir, COMMONSYSCONFFILE);
- if (commonLoadPropsFile.exists()) {
- logger.info("Common plugin load props file " + commonLoadPropsFile + " found. Attempt to load.");
- try {
- commonPluginLoadProps = new Props(null, commonLoadPropsFile);
- }
- catch (IOException e) {
- throw new JobTypeManagerException("Failed to load common plugin loader properties" + e.getCause());
- }
- }
- else {
- logger.info("Common plugin load props file " + commonLoadPropsFile + " not found. Using empty props.");
- commonPluginLoadProps = new Props();
- }
-
- plugins.setCommonPluginJobProps(commonPluginJobProps);
- plugins.setCommonPluginLoadProps(commonPluginLoadProps);
-
- // Loading job types
- for (File dir : jobPluginsDir.listFiles()) {
- if (dir.isDirectory() && dir.canRead()) {
- try {
- loadJobTypes(dir, plugins);
- }
- catch (Exception e) {
- logger.error("Failed to load jobtype " + dir.getName() + e.getMessage());
- throw new JobTypeManagerException(e);
- }
- }
- }
- }
-
- @SuppressWarnings("unchecked")
- private void loadJobTypes(File pluginDir, JobTypePluginSet plugins) throws JobTypeManagerException {
- // Directory is the jobtypeName
- String jobTypeName = pluginDir.getName();
- logger.info("Loading plugin " + jobTypeName);
-
- Props pluginJobProps = null;
- Props pluginLoadProps = null;
-
- File pluginJobPropsFile = new File(pluginDir, JOBTYPECONFFILE);
- File pluginLoadPropsFile = new File(pluginDir, JOBTYPESYSCONFFILE);
-
- if (!pluginLoadPropsFile.exists()) {
- logger.info("Plugin load props file " + pluginLoadPropsFile + " not found.");
- return;
- }
-
- try {
- Props commonPluginJobProps = plugins.getCommonPluginJobProps();
- Props commonPluginLoadProps = plugins.getCommonPluginLoadProps();
- if (pluginJobPropsFile.exists()) {
- pluginJobProps = new Props(commonPluginJobProps, pluginJobPropsFile);
- }
- else {
- pluginJobProps = new Props(commonPluginJobProps);
- }
-
- pluginLoadProps = new Props(commonPluginLoadProps, pluginLoadPropsFile);
- pluginLoadProps = PropsUtils.resolveProps(pluginLoadProps);
- }
- catch (Exception e) {
- throw new JobTypeManagerException("Failed to get jobtype properties" + e.getMessage());
- }
- // Add properties into the plugin set
- pluginLoadProps.put("plugin.dir", pluginDir.getAbsolutePath());
- plugins.addPluginLoadProps(jobTypeName, pluginLoadProps);
- if (pluginJobProps != null) {
- plugins.addPluginJobProps(jobTypeName, pluginJobProps);
- }
-
- ClassLoader jobTypeLoader = loadJobTypeClassLoader(pluginDir, jobTypeName, plugins);
- String jobtypeClass = pluginLoadProps.get("jobtype.class");
-
- Class<? extends Job> clazz = null;
- try {
- clazz = (Class<? extends Job>)jobTypeLoader.loadClass(jobtypeClass);
- plugins.addPluginClass(jobTypeName, clazz);
- }
- catch (ClassNotFoundException e) {
- throw new JobTypeManagerException(e);
- }
-
- logger.info("Verifying job plugin " + jobTypeName);
- try {
- Props fakeSysProps = new Props(pluginLoadProps);
- Props fakeJobProps = new Props(pluginJobProps);
- @SuppressWarnings("unused")
- Job job = (Job)Utils.callConstructor(clazz, "dummy", fakeSysProps, fakeJobProps, logger);
- }
- catch (Exception e) {
- logger.info("Jobtype " + jobTypeName + " failed test!", e);
- throw new JobExecutionException(e);
- }
- catch (Throwable t) {
- logger.info("Jobtype " + jobTypeName + " failed test!", t);
- throw new JobExecutionException(t);
- }
-
- logger.info("Loaded jobtype " + jobTypeName + " " + jobtypeClass);
- }
-
- /**
- * Creates and loads all plugin resources (jars) into a ClassLoader
- *
- * @param pluginDir
- * @param jobTypeName
- * @param plugins
- * @return
- */
- private ClassLoader loadJobTypeClassLoader(File pluginDir, String jobTypeName, JobTypePluginSet plugins) {
- // sysconf says what jars/confs to load
- List<URL> resources = new ArrayList<URL>();
- Props pluginLoadProps = plugins.getPluginLoaderProps(jobTypeName);
-
- try {
- //first global classpath
- logger.info("Adding global resources for " + jobTypeName);
- List<String> typeGlobalClassPath = pluginLoadProps.getStringList("jobtype.global.classpath", null, ",");
- if (typeGlobalClassPath != null) {
- for (String jar : typeGlobalClassPath) {
- URL cpItem = new File(jar).toURI().toURL();
- if (!resources.contains(cpItem)) {
- logger.info("adding to classpath " + cpItem);
- resources.add(cpItem);
- }
- }
- }
-
- //type specific classpath
- logger.info("Adding type resources.");
- List<String> typeClassPath = pluginLoadProps.getStringList("jobtype.classpath", null, ",");
- if (typeClassPath != null) {
- for (String jar : typeClassPath) {
- URL cpItem = new File(jar).toURI().toURL();
- if (!resources.contains(cpItem)) {
- logger.info("adding to classpath " + cpItem);
- resources.add(cpItem);
- }
- }
- }
- List<String> jobtypeLibDirs = pluginLoadProps.getStringList("jobtype.lib.dir", null, ",");
- if (jobtypeLibDirs != null) {
- for (String libDir : jobtypeLibDirs) {
- for (File f : new File(libDir).listFiles()) {
- if (f.getName().endsWith(".jar")) {
- resources.add(f.toURI().toURL());
- logger.info("adding to classpath " + f.toURI().toURL());
- }
- }
- }
- }
-
- logger.info("Adding type override resources.");
- for (File f : pluginDir.listFiles()) {
- if (f.getName().endsWith(".jar")) {
- resources.add(f.toURI().toURL());
- logger.info("adding to classpath " + f.toURI().toURL());
- }
- }
-
- }
- catch (MalformedURLException e) {
- throw new JobTypeManagerException(e);
- }
-
- // each job type can have a different class loader
- ClassLoader jobTypeLoader = new URLClassLoader(resources.toArray(new URL[resources.size()]), parentLoader);
- return jobTypeLoader;
- }
-
- public Job buildJobExecutor(String jobId, Props jobProps, Logger logger) throws JobTypeManagerException {
- // This is final because during build phase, you should never need to swap
- // the pluginSet for safety reasons
- final JobTypePluginSet pluginSet = getJobTypePluginSet();
-
- Job job = null;
- try {
- String jobType = jobProps.getString("type");
- if (jobType == null || jobType.length() == 0) {
- /*throw an exception when job name is null or empty*/
- throw new JobExecutionException(
- String.format("The 'type' parameter for job[%s] is null or empty",
- jobProps, logger));
- }
-
- logger.info("Building " + jobType + " job executor. ");
-
- Class<? extends Object> executorClass = pluginSet.getPluginClass(jobType);
- if (executorClass == null) {
- throw new JobExecutionException(
- String.format("Job type '" + jobType + "' is unrecognized. Could not construct job[%s] of type[%s].", jobProps, jobType));
- }
-
- Props pluginJobProps = pluginSet.getPluginJobProps(jobType);
- if (pluginJobProps != null) {
- for (String k : pluginJobProps.getKeySet()) {
- if (!jobProps.containsKey(k)) {
- jobProps.put(k, pluginJobProps.get(k));
- }
- }
- }
- jobProps = PropsUtils.resolveProps(jobProps);
-
- Props pluginLoadProps = pluginSet.getPluginLoaderProps(jobType);
- if (pluginLoadProps != null) {
- pluginLoadProps = PropsUtils.resolveProps(pluginLoadProps);
- }
- else {
- pluginLoadProps = new Props();
- }
-
- job = (Job) Utils.callConstructor(
- executorClass, jobId, pluginLoadProps, jobProps, logger);
- }
- catch (Exception e) {
- logger.error("Failed to build job executor for job " + jobId + e.getMessage());
- throw new JobTypeManagerException("Failed to build job executor for job " + jobId, e);
- }
- catch (Throwable t) {
- logger.error("Failed to build job executor for job " + jobId + t.getMessage(), t);
- throw new JobTypeManagerException("Failed to build job executor for job " + jobId, t);
- }
-
- return job;
- }
-
- /**
- * Public for test reasons. Will need to move tests to the same package
- */
- public synchronized JobTypePluginSet getJobTypePluginSet() {
- return this.pluginSet;
- }
-}
+public class JobTypeManager {
+ private final String jobTypePluginDir; // the dir for jobtype plugins
+ private final ClassLoader parentLoader;
+
+ public static final String DEFAULT_JOBTYPEPLUGINDIR = "plugins/jobtypes";
+ // need jars.to.include property, will be loaded with user property
+ private static final String JOBTYPECONFFILE = "plugin.properties";
+ // not exposed to users
+ private static final String JOBTYPESYSCONFFILE = "private.properties";
+ // common properties for multiple plugins
+ private static final String COMMONCONFFILE = "common.properties";
+ // common private properties for multiple plugins
+ private static final String COMMONSYSCONFFILE = "commonprivate.properties";
+ private static final Logger logger = Logger.getLogger(JobTypeManager.class);
+
+ private JobTypePluginSet pluginSet;
+ private Props globalProperties;
+
+ public JobTypeManager(String jobtypePluginDir, Props globalProperties,
+ ClassLoader parentClassLoader) {
+ this.jobTypePluginDir = jobtypePluginDir;
+ this.parentLoader = parentClassLoader;
+ this.globalProperties = globalProperties;
+
+ loadPlugins();
+ }
+
+ public void loadPlugins() throws JobTypeManagerException {
+ JobTypePluginSet plugins = new JobTypePluginSet();
+
+ loadDefaultTypes(plugins);
+ if (jobTypePluginDir != null) {
+ File pluginDir = new File(jobTypePluginDir);
+ if (pluginDir.exists()) {
+ logger
+ .info("Job type plugin directory set. Loading extra job types from "
+ + pluginDir);
+ try {
+ loadPluginJobTypes(plugins);
+ } catch (Exception e) {
+ logger.info("Plugin jobtypes failed to load. " + e.getCause());
+ throw new JobTypeManagerException(e);
+ }
+ }
+ }
+
+ // Swap the plugin set. If exception is thrown, then plugin isn't swapped.
+ synchronized (this) {
+ pluginSet = plugins;
+ }
+ }
+
+ private void loadDefaultTypes(JobTypePluginSet plugins)
+ throws JobTypeManagerException {
+ logger.info("Loading plugin default job types");
+ plugins.addPluginClass("command", ProcessJob.class);
+ plugins.addPluginClass("javaprocess", JavaProcessJob.class);
+ plugins.addPluginClass("noop", NoopJob.class);
+ plugins.addPluginClass("python", PythonJob.class);
+ plugins.addPluginClass("ruby", RubyJob.class);
+ plugins.addPluginClass("script", ScriptJob.class);
+ }
+
+ // load Job Types from jobtype plugin dir
+ private void loadPluginJobTypes(JobTypePluginSet plugins)
+ throws JobTypeManagerException {
+ File jobPluginsDir = new File(jobTypePluginDir);
+
+ if (!jobPluginsDir.exists()) {
+ logger.error("Job type plugin dir " + jobTypePluginDir
+ + " doesn't exist. Will not load any external plugins.");
+ return;
+ } else if (!jobPluginsDir.isDirectory()) {
+ throw new JobTypeManagerException("Job type plugin dir "
+ + jobTypePluginDir + " is not a directory!");
+ } else if (!jobPluginsDir.canRead()) {
+ throw new JobTypeManagerException("Job type plugin dir "
+ + jobTypePluginDir + " is not readable!");
+ }
+
+ // Load the common properties used by all jobs that are run
+ Props commonPluginJobProps = null;
+ File commonJobPropsFile = new File(jobPluginsDir, COMMONCONFFILE);
+ if (commonJobPropsFile.exists()) {
+ logger.info("Common plugin job props file " + commonJobPropsFile
+ + " found. Attempt to load.");
+ try {
+ commonPluginJobProps = new Props(globalProperties, commonJobPropsFile);
+ } catch (IOException e) {
+ throw new JobTypeManagerException(
+ "Failed to load common plugin job properties" + e.getCause());
+ }
+ } else {
+ logger.info("Common plugin job props file " + commonJobPropsFile
+ + " not found. Using empty props.");
+ commonPluginJobProps = new Props();
+ }
+
+ // Loads the common properties used by all plugins when loading
+ Props commonPluginLoadProps = null;
+ File commonLoadPropsFile = new File(jobPluginsDir, COMMONSYSCONFFILE);
+ if (commonLoadPropsFile.exists()) {
+ logger.info("Common plugin load props file " + commonLoadPropsFile
+ + " found. Attempt to load.");
+ try {
+ commonPluginLoadProps = new Props(null, commonLoadPropsFile);
+ } catch (IOException e) {
+ throw new JobTypeManagerException(
+ "Failed to load common plugin loader properties" + e.getCause());
+ }
+ } else {
+ logger.info("Common plugin load props file " + commonLoadPropsFile
+ + " not found. Using empty props.");
+ commonPluginLoadProps = new Props();
+ }
+
+ plugins.setCommonPluginJobProps(commonPluginJobProps);
+ plugins.setCommonPluginLoadProps(commonPluginLoadProps);
+
+ // Loading job types
+ for (File dir : jobPluginsDir.listFiles()) {
+ if (dir.isDirectory() && dir.canRead()) {
+ try {
+ loadJobTypes(dir, plugins);
+ } catch (Exception e) {
+ logger.error("Failed to load jobtype " + dir.getName()
+ + e.getMessage());
+ throw new JobTypeManagerException(e);
+ }
+ }
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private void loadJobTypes(File pluginDir, JobTypePluginSet plugins)
+ throws JobTypeManagerException {
+ // Directory is the jobtypeName
+ String jobTypeName = pluginDir.getName();
+ logger.info("Loading plugin " + jobTypeName);
+
+ Props pluginJobProps = null;
+ Props pluginLoadProps = null;
+
+ File pluginJobPropsFile = new File(pluginDir, JOBTYPECONFFILE);
+ File pluginLoadPropsFile = new File(pluginDir, JOBTYPESYSCONFFILE);
+
+ if (!pluginLoadPropsFile.exists()) {
+ logger.info("Plugin load props file " + pluginLoadPropsFile
+ + " not found.");
+ return;
+ }
+
+ try {
+ Props commonPluginJobProps = plugins.getCommonPluginJobProps();
+ Props commonPluginLoadProps = plugins.getCommonPluginLoadProps();
+ if (pluginJobPropsFile.exists()) {
+ pluginJobProps = new Props(commonPluginJobProps, pluginJobPropsFile);
+ } else {
+ pluginJobProps = new Props(commonPluginJobProps);
+ }
+ pluginLoadProps = new Props(commonPluginLoadProps, pluginLoadPropsFile);
+ pluginLoadProps = PropsUtils.resolveProps(pluginLoadProps);
+ } catch (Exception e) {
+ throw new JobTypeManagerException("Failed to get jobtype properties"
+ + e.getMessage());
+ }
+ // Add properties into the plugin set
+ pluginLoadProps.put("plugin.dir", pluginDir.getAbsolutePath());
+ plugins.addPluginLoadProps(jobTypeName, pluginLoadProps);
+ if (pluginJobProps != null) {
+ plugins.addPluginJobProps(jobTypeName, pluginJobProps);
+ }
+
+ ClassLoader jobTypeLoader =
+ loadJobTypeClassLoader(pluginDir, jobTypeName, plugins);
+ String jobtypeClass = pluginLoadProps.get("jobtype.class");
+
+ Class<? extends Job> clazz = null;
+ try {
+ clazz = (Class<? extends Job>) jobTypeLoader.loadClass(jobtypeClass);
+ plugins.addPluginClass(jobTypeName, clazz);
+ } catch (ClassNotFoundException e) {
+ throw new JobTypeManagerException(e);
+ }
+
+ logger.info("Verifying job plugin " + jobTypeName);
+ try {
+ Props fakeSysProps = new Props(pluginLoadProps);
+ Props fakeJobProps = new Props(pluginJobProps);
+ @SuppressWarnings("unused")
+ Job job =
+ (Job) Utils.callConstructor(clazz, "dummy", fakeSysProps,
+ fakeJobProps, logger);
+ } catch (Exception e) {
+ logger.info("Jobtype " + jobTypeName + " failed test!", e);
+ throw new JobExecutionException(e);
+ } catch (Throwable t) {
+ logger.info("Jobtype " + jobTypeName + " failed test!", t);
+ throw new JobExecutionException(t);
+ }
+
+ logger.info("Loaded jobtype " + jobTypeName + " " + jobtypeClass);
+ }
+
+ /**
+ * Creates and loads all plugin resources (jars) into a ClassLoader
+ *
+ * @param pluginDir
+ * @param jobTypeName
+ * @param plugins
+ * @return
+ */
+ private ClassLoader loadJobTypeClassLoader(File pluginDir,
+ String jobTypeName, JobTypePluginSet plugins) {
+ // sysconf says what jars/confs to load
+ List<URL> resources = new ArrayList<URL>();
+ Props pluginLoadProps = plugins.getPluginLoaderProps(jobTypeName);
+
+ try {
+ // first global classpath
+ logger.info("Adding global resources for " + jobTypeName);
+ List<String> typeGlobalClassPath =
+ pluginLoadProps.getStringList("jobtype.global.classpath", null, ",");
+ if (typeGlobalClassPath != null) {
+ for (String jar : typeGlobalClassPath) {
+ URL cpItem = new File(jar).toURI().toURL();
+ if (!resources.contains(cpItem)) {
+ logger.info("adding to classpath " + cpItem);
+ resources.add(cpItem);
+ }
+ }
+ }
+
+ // type specific classpath
+ logger.info("Adding type resources.");
+ List<String> typeClassPath =
+ pluginLoadProps.getStringList("jobtype.classpath", null, ",");
+ if (typeClassPath != null) {
+ for (String jar : typeClassPath) {
+ URL cpItem = new File(jar).toURI().toURL();
+ if (!resources.contains(cpItem)) {
+ logger.info("adding to classpath " + cpItem);
+ resources.add(cpItem);
+ }
+ }
+ }
+ List<String> jobtypeLibDirs =
+ pluginLoadProps.getStringList("jobtype.lib.dir", null, ",");
+ if (jobtypeLibDirs != null) {
+ for (String libDir : jobtypeLibDirs) {
+ for (File f : new File(libDir).listFiles()) {
+ if (f.getName().endsWith(".jar")) {
+ resources.add(f.toURI().toURL());
+ logger.info("adding to classpath " + f.toURI().toURL());
+ }
+ }
+ }
+ }
+
+ logger.info("Adding type override resources.");
+ for (File f : pluginDir.listFiles()) {
+ if (f.getName().endsWith(".jar")) {
+ resources.add(f.toURI().toURL());
+ logger.info("adding to classpath " + f.toURI().toURL());
+ }
+ }
+
+ } catch (MalformedURLException e) {
+ throw new JobTypeManagerException(e);
+ }
+
+ // each job type can have a different class loader
+ ClassLoader jobTypeLoader =
+ new URLClassLoader(resources.toArray(new URL[resources.size()]),
+ parentLoader);
+ return jobTypeLoader;
+ }
+
+ public Job buildJobExecutor(String jobId, Props jobProps, Logger logger)
+ throws JobTypeManagerException {
+ // This is final because during build phase, you should never need to swap
+ // the pluginSet for safety reasons
+ final JobTypePluginSet pluginSet = getJobTypePluginSet();
+
+ Job job = null;
+ try {
+ String jobType = jobProps.getString("type");
+ if (jobType == null || jobType.length() == 0) {
+ /* throw an exception when job name is null or empty */
+ throw new JobExecutionException(String.format(
+ "The 'type' parameter for job[%s] is null or empty", jobProps,
+ logger));
+ }
+
+ logger.info("Building " + jobType + " job executor. ");
+
+ Class<? extends Object> executorClass = pluginSet.getPluginClass(jobType);
+ if (executorClass == null) {
+ throw new JobExecutionException(String.format("Job type '" + jobType
+ + "' is unrecognized. Could not construct job[%s] of type[%s].",
+ jobProps, jobType));
+ }
+
+ Props pluginJobProps = pluginSet.getPluginJobProps(jobType);
+ if (pluginJobProps != null) {
+ for (String k : pluginJobProps.getKeySet()) {
+ if (!jobProps.containsKey(k)) {
+ jobProps.put(k, pluginJobProps.get(k));
+ }
+ }
+ }
+ jobProps = PropsUtils.resolveProps(jobProps);
+
+ Props pluginLoadProps = pluginSet.getPluginLoaderProps(jobType);
+ if (pluginLoadProps != null) {
+ pluginLoadProps = PropsUtils.resolveProps(pluginLoadProps);
+ } else {
+ pluginLoadProps = new Props();
+ }
+
+ job =
+ (Job) Utils.callConstructor(executorClass, jobId, pluginLoadProps,
+ jobProps, logger);
+ } catch (Exception e) {
+ logger.error("Failed to build job executor for job " + jobId
+ + e.getMessage());
+ throw new JobTypeManagerException("Failed to build job executor for job "
+ + jobId, e);
+ } catch (Throwable t) {
+ logger.error(
+ "Failed to build job executor for job " + jobId + t.getMessage(), t);
+ throw new JobTypeManagerException("Failed to build job executor for job "
+ + jobId, t);
+ }
+
+ return job;
+ }
+
+ /**
+ * Public for test reasons. Will need to move tests to the same package
+ */
+ public synchronized JobTypePluginSet getJobTypePluginSet() {
+ return this.pluginSet;
+ }
+}
diff --git a/src/main/java/azkaban/jobtype/JobTypeManagerException.java b/src/main/java/azkaban/jobtype/JobTypeManagerException.java
index a5750d4..d3cddb3 100644
--- a/src/main/java/azkaban/jobtype/JobTypeManagerException.java
+++ b/src/main/java/azkaban/jobtype/JobTypeManagerException.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -17,18 +17,18 @@
package azkaban.jobtype;
public class JobTypeManagerException extends RuntimeException {
- private static final long serialVersionUID = 1L;
+ private static final long serialVersionUID = 1L;
- public JobTypeManagerException(String message) {
- super(message);
- }
+ public JobTypeManagerException(String message) {
+ super(message);
+ }
- public JobTypeManagerException(Throwable cause) {
- super(cause);
- }
+ public JobTypeManagerException(Throwable cause) {
+ super(cause);
+ }
- public JobTypeManagerException(String message, Throwable cause) {
- super(message, cause);
- }
+ public JobTypeManagerException(String message, Throwable cause) {
+ super(message, cause);
+ }
}
src/main/java/azkaban/jobtype/JobTypePluginSet.java 244(+125 -119)
diff --git a/src/main/java/azkaban/jobtype/JobTypePluginSet.java b/src/main/java/azkaban/jobtype/JobTypePluginSet.java
index 9e6ded2..f2e9133 100644
--- a/src/main/java/azkaban/jobtype/JobTypePluginSet.java
+++ b/src/main/java/azkaban/jobtype/JobTypePluginSet.java
@@ -1,12 +1,12 @@
/*
* Copyright 2014 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -24,122 +24,128 @@ import azkaban.utils.Props;
/**
* Container for job type plugins
*
- * This contains the jobClass objects, the properties for loading plugins, and the
- * properties given by default to the plugin.
+ * This contains the jobClass objects, the properties for loading plugins, and
+ * the properties given by default to the plugin.
*
- * This class is not thread safe, so adding to this class should only be populated
- * and controlled by the JobTypeManager
+ * This class is not thread safe, so adding to this class should only be
+ * populated and controlled by the JobTypeManager
*/
public class JobTypePluginSet {
- private Map<String, Class<? extends Job>> jobToClass;
- private Map<String, Props> pluginJobPropsMap;
- private Map<String, Props> pluginLoadPropsMap;
-
- private Props commonJobProps;
- private Props commonLoadProps;
-
- /**
- * Base constructor
- */
- public JobTypePluginSet() {
- jobToClass = new HashMap<String, Class<? extends Job>>();
- pluginJobPropsMap = new HashMap<String, Props>();
- pluginLoadPropsMap = new HashMap<String, Props>();
- }
-
- /**
- * Copy constructor
- * @param clone
- */
- public JobTypePluginSet(JobTypePluginSet clone) {
- jobToClass = new HashMap<String, Class<? extends Job>>(clone.jobToClass);
- pluginJobPropsMap = new HashMap<String, Props>(clone.pluginJobPropsMap);
- pluginLoadPropsMap = new HashMap<String, Props>(clone.pluginLoadPropsMap);
- commonJobProps = clone.commonJobProps;
- commonLoadProps = clone.commonLoadProps;
- }
-
- /**
- * Sets the common properties shared in every jobtype
- * @param commonJobProps
- */
- public void setCommonPluginJobProps(Props commonJobProps) {
- this.commonJobProps = commonJobProps;
- }
-
- /**
- * Sets the common properties used to load every plugin
- * @param commonLoadProps
- */
- public void setCommonPluginLoadProps(Props commonLoadProps) {
- this.commonLoadProps = commonLoadProps;
- }
-
- /**
- * Gets common properties for every jobtype
- * @return
- */
- public Props getCommonPluginJobProps() {
- return commonJobProps;
- }
-
- /**
- * Gets the common properties used to load a plugin
- * @return
- */
- public Props getCommonPluginLoadProps() {
- return commonLoadProps;
- }
-
- /**
- * Get the properties for a jobtype used to setup and load a plugin
- *
- * @param jobTypeName
- * @return
- */
- public Props getPluginLoaderProps(String jobTypeName) {
- return pluginLoadPropsMap.get(jobTypeName);
- }
-
- /**
- * Get the properties that will be given to the plugin as default job
- * properties.
- *
- * @param jobTypeName
- * @return
- */
- public Props getPluginJobProps(String jobTypeName) {
- return pluginJobPropsMap.get(jobTypeName);
- }
-
- /**
- * Gets the plugin job runner class
- *
- * @param jobTypeName
- * @return
- */
- public Class<? extends Job> getPluginClass(String jobTypeName) {
- return jobToClass.get(jobTypeName);
- }
-
- /**
- * Adds plugin jobtype class
- */
- public void addPluginClass(String jobTypeName, Class<? extends Job> jobTypeClass) {
- jobToClass.put(jobTypeName, jobTypeClass);
- }
-
- /**
- * Adds plugin job properties used as default runtime properties
- */
- public void addPluginJobProps(String jobTypeName, Props props) {
- pluginJobPropsMap.put(jobTypeName, props);
- }
-
- /**
- * Adds plugin load properties used to load the plugin
- */
- public void addPluginLoadProps(String jobTypeName, Props props) {
- pluginLoadPropsMap.put(jobTypeName, props);
- }
-}
\ No newline at end of file
+ private Map<String, Class<? extends Job>> jobToClass;
+ private Map<String, Props> pluginJobPropsMap;
+ private Map<String, Props> pluginLoadPropsMap;
+
+ private Props commonJobProps;
+ private Props commonLoadProps;
+
+ /**
+ * Base constructor
+ */
+ public JobTypePluginSet() {
+ jobToClass = new HashMap<String, Class<? extends Job>>();
+ pluginJobPropsMap = new HashMap<String, Props>();
+ pluginLoadPropsMap = new HashMap<String, Props>();
+ }
+
+ /**
+ * Copy constructor
+ *
+ * @param clone
+ */
+ public JobTypePluginSet(JobTypePluginSet clone) {
+ jobToClass = new HashMap<String, Class<? extends Job>>(clone.jobToClass);
+ pluginJobPropsMap = new HashMap<String, Props>(clone.pluginJobPropsMap);
+ pluginLoadPropsMap = new HashMap<String, Props>(clone.pluginLoadPropsMap);
+ commonJobProps = clone.commonJobProps;
+ commonLoadProps = clone.commonLoadProps;
+ }
+
+ /**
+ * Sets the common properties shared in every jobtype
+ *
+ * @param commonJobProps
+ */
+ public void setCommonPluginJobProps(Props commonJobProps) {
+ this.commonJobProps = commonJobProps;
+ }
+
+ /**
+ * Sets the common properties used to load every plugin
+ *
+ * @param commonLoadProps
+ */
+ public void setCommonPluginLoadProps(Props commonLoadProps) {
+ this.commonLoadProps = commonLoadProps;
+ }
+
+ /**
+ * Gets common properties for every jobtype
+ *
+ * @return
+ */
+ public Props getCommonPluginJobProps() {
+ return commonJobProps;
+ }
+
+ /**
+ * Gets the common properties used to load a plugin
+ *
+ * @return
+ */
+ public Props getCommonPluginLoadProps() {
+ return commonLoadProps;
+ }
+
+ /**
+ * Get the properties for a jobtype used to setup and load a plugin
+ *
+ * @param jobTypeName
+ * @return
+ */
+ public Props getPluginLoaderProps(String jobTypeName) {
+ return pluginLoadPropsMap.get(jobTypeName);
+ }
+
+ /**
+ * Get the properties that will be given to the plugin as default job
+ * properties.
+ *
+ * @param jobTypeName
+ * @return
+ */
+ public Props getPluginJobProps(String jobTypeName) {
+ return pluginJobPropsMap.get(jobTypeName);
+ }
+
+ /**
+ * Gets the plugin job runner class
+ *
+ * @param jobTypeName
+ * @return
+ */
+ public Class<? extends Job> getPluginClass(String jobTypeName) {
+ return jobToClass.get(jobTypeName);
+ }
+
+ /**
+ * Adds plugin jobtype class
+ */
+ public void addPluginClass(String jobTypeName,
+ Class<? extends Job> jobTypeClass) {
+ jobToClass.put(jobTypeName, jobTypeClass);
+ }
+
+ /**
+ * Adds plugin job properties used as default runtime properties
+ */
+ public void addPluginJobProps(String jobTypeName, Props props) {
+ pluginJobPropsMap.put(jobTypeName, props);
+ }
+
+ /**
+ * Adds plugin load properties used to load the plugin
+ */
+ public void addPluginLoadProps(String jobTypeName, Props props) {
+ pluginLoadPropsMap.put(jobTypeName, props);
+ }
+}
diff --git a/src/main/java/azkaban/migration/schedule2trigger/CommonParams.java b/src/main/java/azkaban/migration/schedule2trigger/CommonParams.java
index 0408f51..9b0ce41 100644
--- a/src/main/java/azkaban/migration/schedule2trigger/CommonParams.java
+++ b/src/main/java/azkaban/migration/schedule2trigger/CommonParams.java
@@ -1,22 +1,38 @@
+/*
+ * Copyright 2014 LinkedIn Corp.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
package azkaban.migration.schedule2trigger;
public class CommonParams {
- public static final String TYPE_FLOW_FINISH = "FlowFinish";
- public static final String TYPE_FLOW_SUCCEED = "FlowSucceed";
- public static final String TYPE_FLOW_PROGRESS = "FlowProgress";
+ public static final String TYPE_FLOW_FINISH = "FlowFinish";
+ public static final String TYPE_FLOW_SUCCEED = "FlowSucceed";
+ public static final String TYPE_FLOW_PROGRESS = "FlowProgress";
- public static final String TYPE_JOB_FINISH = "JobFinish";
- public static final String TYPE_JOB_SUCCEED = "JobSucceed";
- public static final String TYPE_JOB_PROGRESS = "JobProgress";
+ public static final String TYPE_JOB_FINISH = "JobFinish";
+ public static final String TYPE_JOB_SUCCEED = "JobSucceed";
+ public static final String TYPE_JOB_PROGRESS = "JobProgress";
- public static final String INFO_DURATION = "Duration";
- public static final String INFO_FLOW_NAME = "FlowName";
- public static final String INFO_JOB_NAME = "JobName";
- public static final String INFO_PROGRESS_PERCENT = "ProgressPercent";
- public static final String INFO_EMAIL_LIST = "EmailList";
+ public static final String INFO_DURATION = "Duration";
+ public static final String INFO_FLOW_NAME = "FlowName";
+ public static final String INFO_JOB_NAME = "JobName";
+ public static final String INFO_PROGRESS_PERCENT = "ProgressPercent";
+ public static final String INFO_EMAIL_LIST = "EmailList";
- // always alert
- public static final String ALERT_TYPE = "SlaAlertType";
- public static final String ACTION_CANCEL_FLOW = "SlaCancelFlow";
- public static final String ACTION_ALERT = "SlaAlert";
+ // always alert
+ public static final String ALERT_TYPE = "SlaAlertType";
+ public static final String ACTION_CANCEL_FLOW = "SlaCancelFlow";
+ public static final String ACTION_ALERT = "SlaAlert";
}
diff --git a/src/main/java/azkaban/migration/schedule2trigger/Schedule2Trigger.java b/src/main/java/azkaban/migration/schedule2trigger/Schedule2Trigger.java
index 630d54d..d193d42 100644
--- a/src/main/java/azkaban/migration/schedule2trigger/Schedule2Trigger.java
+++ b/src/main/java/azkaban/migration/schedule2trigger/Schedule2Trigger.java
@@ -1,3 +1,19 @@
+/*
+ * Copyright 2014 LinkedIn Corp.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
package azkaban.migration.schedule2trigger;
import java.io.File;
@@ -29,231 +45,269 @@ import azkaban.utils.Utils;
@SuppressWarnings("deprecation")
public class Schedule2Trigger {
-
- private static final Logger logger = Logger.getLogger(Schedule2Trigger.class);
- private static Props props;
- private static File outputDir;
-
- public static void main(String[] args) throws Exception{
- if(args.length < 1) {
- printUsage();
- }
-
- File confFile = new File(args[0]);
- try {
- logger.info("Trying to load config from " + confFile.getAbsolutePath());
- props = loadAzkabanConfig(confFile);
- } catch (Exception e) {
- e.printStackTrace();
- logger.error(e);
- return;
- }
-
- try {
- outputDir = File.createTempFile("schedules", null);
- logger.info("Creating temp dir for dumping existing schedules.");
- outputDir.delete();
- outputDir.mkdir();
- } catch (Exception e) {
- e.printStackTrace();
- logger.error(e);
- return;
- }
-
- try {
- schedule2File();
- } catch (Exception e) {
- e.printStackTrace();
- logger.error(e);
- return;
- }
-
- try {
- file2ScheduleTrigger();
- } catch (Exception e) {
- e.printStackTrace();
- logger.error(e);
- return;
- }
-
- logger.info("Uploaded all schedules. Removing temp dir.");
- FileUtils.deleteDirectory(outputDir);
- System.exit(0);
- }
-
- private static Props loadAzkabanConfig(File confFile) throws IOException {
- return new Props(null, confFile);
- }
-
- private static void printUsage() {
- System.out.println("Usage: schedule2Trigger PATH_TO_CONFIG_FILE");
- }
-
- private static void schedule2File() throws Exception {
- azkaban.migration.scheduler.ScheduleLoader scheduleLoader = new azkaban.migration.scheduler.JdbcScheduleLoader(props);
- logger.info("Loading old schedule info from DB.");
- List<azkaban.migration.scheduler.Schedule> schedules = scheduleLoader.loadSchedules();
- for(azkaban.migration.scheduler.Schedule sched : schedules) {
- writeScheduleFile(sched, outputDir);
- }
- }
-
- private static void writeScheduleFile(azkaban.migration.scheduler.Schedule sched, File outputDir) throws IOException {
- String scheduleFileName = sched.getProjectName()+"-"+sched.getFlowName();
- File outputFile = new File(outputDir, scheduleFileName);
- outputFile.createNewFile();
- Props props = new Props();
- props.put("flowName", sched.getFlowName());
- props.put("projectName", sched.getProjectName());
- props.put("projectId", String.valueOf(sched.getProjectId()));
- props.put("period", azkaban.migration.scheduler.Schedule.createPeriodString(sched.getPeriod()));
- props.put("firstScheduleTimeLong", sched.getFirstSchedTime());
- props.put("timezone", sched.getTimezone().getID());
- props.put("submitUser", sched.getSubmitUser());
- props.put("submitTimeLong", sched.getSubmitTime());
- props.put("nextExecTimeLong", sched.getNextExecTime());
-
- ExecutionOptions executionOptions = sched.getExecutionOptions();
- if(executionOptions != null) {
- props.put("executionOptionsObj", JSONUtils.toJSON(executionOptions.toObject()));
- }
-
- azkaban.migration.sla.SlaOptions slaOptions = sched.getSlaOptions();
- if(slaOptions != null) {
-
- List<Map<String, Object>> settingsObj = new ArrayList<Map<String,Object>>();
- List<azkaban.migration.sla.SLA.SlaSetting> settings = slaOptions.getSettings();
- for(azkaban.migration.sla.SLA.SlaSetting set : settings) {
- Map<String, Object> setObj = new HashMap<String, Object>();
- String setId = set.getId();
- azkaban.migration.sla.SLA.SlaRule rule = set.getRule();
- Map<String, Object> info = new HashMap<String, Object>();
- info.put(INFO_DURATION, azkaban.migration.scheduler.Schedule.createPeriodString(set.getDuration()));
- info.put(INFO_EMAIL_LIST, slaOptions.getSlaEmails());
- List<String> actionsList = new ArrayList<String>();
- for(azkaban.migration.sla.SLA.SlaAction act : set.getActions()) {
- if(act.equals(azkaban.migration.sla.SLA.SlaAction.EMAIL)) {
- actionsList.add(ACTION_ALERT);
- info.put(ALERT_TYPE, "email");
- } else if(act.equals(azkaban.migration.sla.SLA.SlaAction.KILL)) {
- actionsList.add(ACTION_CANCEL_FLOW);
- }
- }
- setObj.put("actions", actionsList);
- if(setId.equals("")) {
- info.put(INFO_FLOW_NAME, sched.getFlowName());
- if(rule.equals(azkaban.migration.sla.SLA.SlaRule.FINISH)) {
- setObj.put("type", TYPE_FLOW_FINISH);
- } else if(rule.equals(azkaban.migration.sla.SLA.SlaRule.SUCCESS)) {
- setObj.put("type", TYPE_FLOW_SUCCEED);
- }
- } else {
- info.put(INFO_JOB_NAME, setId);
- if(rule.equals(azkaban.migration.sla.SLA.SlaRule.FINISH)) {
- setObj.put("type", TYPE_JOB_FINISH);
- } else if(rule.equals(azkaban.migration.sla.SLA.SlaRule.SUCCESS)) {
- setObj.put("type", TYPE_JOB_SUCCEED);
- }
- }
- setObj.put("info", info);
- settingsObj.add(setObj);
- }
-
- props.put("slaOptionsObj", JSONUtils.toJSON(settingsObj));
- }
- props.storeLocal(outputFile);
- }
-
- @SuppressWarnings("unchecked")
- private static void file2ScheduleTrigger() throws Exception {
-
- TriggerLoader triggerLoader = new JdbcTriggerLoader(props);
- for(File scheduleFile : outputDir.listFiles()) {
- logger.info("Trying to load schedule from " + scheduleFile.getAbsolutePath());
- if(scheduleFile.isFile()) {
- Props schedProps = new Props(null, scheduleFile);
- String flowName = schedProps.getString("flowName");
- String projectName = schedProps.getString("projectName");
- int projectId = schedProps.getInt("projectId");
- long firstSchedTimeLong = schedProps.getLong("firstScheduleTimeLong");
-// DateTime firstSchedTime = new DateTime(firstSchedTimeLong);
- String timezoneId = schedProps.getString("timezone");
- DateTimeZone timezone = DateTimeZone.forID(timezoneId);
- ReadablePeriod period = Utils.parsePeriodString(schedProps.getString("period"));
-// DateTime lastModifyTime = DateTime.now();
- long nextExecTimeLong = schedProps.getLong("nextExecTimeLong");
-// DateTime nextExecTime = new DateTime(nextExecTimeLong);
- long submitTimeLong = schedProps.getLong("submitTimeLong");
-// DateTime submitTime = new DateTime(submitTimeLong);
- String submitUser = schedProps.getString("submitUser");
- ExecutionOptions executionOptions = null;
- if(schedProps.containsKey("executionOptionsObj")) {
- String executionOptionsObj = schedProps.getString("executionOptionsObj");
- executionOptions = ExecutionOptions.createFromObject(JSONUtils.parseJSONFromString(executionOptionsObj));
- } else {
- executionOptions = new ExecutionOptions();
- }
- List<azkaban.sla.SlaOption> slaOptions = null;
- if(schedProps.containsKey("slaOptionsObj")) {
- slaOptions = new ArrayList<azkaban.sla.SlaOption>();
- List<Map<String, Object>> settingsObj = (List<Map<String, Object>>) JSONUtils.parseJSONFromString(schedProps.getString("slaOptionsObj"));
- for(Map<String, Object> sla : settingsObj) {
- String type = (String) sla.get("type");
- Map<String, Object> info = (Map<String, Object>) sla.get("info");
- List<String> actions = (List<String>) sla.get("actions");
- azkaban.sla.SlaOption slaOption = new azkaban.sla.SlaOption(type, actions, info);
- slaOptions.add(slaOption);
- }
- }
-
- azkaban.scheduler.Schedule schedule = new azkaban.scheduler.Schedule(-1, projectId, projectName, flowName, "ready", firstSchedTimeLong, timezone, period, DateTime.now().getMillis(), nextExecTimeLong, submitTimeLong, submitUser, executionOptions, slaOptions);
- Trigger t = scheduleToTrigger(schedule);
- logger.info("Ready to insert trigger " + t.getDescription());
- triggerLoader.addTrigger(t);
-
- }
-
- }
- }
-
-
- private static Trigger scheduleToTrigger(azkaban.scheduler.Schedule s) {
-
- Condition triggerCondition = createTimeTriggerCondition(s);
- Condition expireCondition = createTimeExpireCondition(s);
- List<TriggerAction> actions = createActions(s);
- Trigger t = new Trigger(s.getScheduleId(), s.getLastModifyTime(), s.getSubmitTime(), s.getSubmitUser(), azkaban.scheduler.ScheduleManager.triggerSource, triggerCondition, expireCondition, actions);
- if(s.isRecurring()) {
- t.setResetOnTrigger(true);
- }
- return t;
- }
-
- private static List<TriggerAction> createActions (azkaban.scheduler.Schedule s) {
- List<TriggerAction> actions = new ArrayList<TriggerAction>();
- ExecuteFlowAction executeAct = new ExecuteFlowAction("executeFlowAction", s.getProjectId(), s.getProjectName(), s.getFlowName(), s.getSubmitUser(), s.getExecutionOptions(), s.getSlaOptions());
- actions.add(executeAct);
-
- return actions;
- }
-
- private static Condition createTimeTriggerCondition (azkaban.scheduler.Schedule s) {
- Map<String, ConditionChecker> checkers = new HashMap<String, ConditionChecker>();
- ConditionChecker checker = new BasicTimeChecker("BasicTimeChecker_1", s.getFirstSchedTime(), s.getTimezone(), s.isRecurring(), s.skipPastOccurrences(), s.getPeriod());
- checkers.put(checker.getId(), checker);
- String expr = checker.getId() + ".eval()";
- Condition cond = new Condition(checkers, expr);
- return cond;
- }
-
- // if failed to trigger, auto expire?
- private static Condition createTimeExpireCondition (azkaban.scheduler.Schedule s) {
- Map<String, ConditionChecker> checkers = new HashMap<String, ConditionChecker>();
- ConditionChecker checker = new BasicTimeChecker("BasicTimeChecker_2", s.getFirstSchedTime(), s.getTimezone(), s.isRecurring(), s.skipPastOccurrences(), s.getPeriod());
- checkers.put(checker.getId(), checker);
- String expr = checker.getId() + ".eval()";
- Condition cond = new Condition(checkers, expr);
- return cond;
- }
+
+ private static final Logger logger = Logger.getLogger(Schedule2Trigger.class);
+ private static Props props;
+ private static File outputDir;
+
+ public static void main(String[] args) throws Exception {
+ if (args.length < 1) {
+ printUsage();
+ }
+
+ File confFile = new File(args[0]);
+ try {
+ logger.info("Trying to load config from " + confFile.getAbsolutePath());
+ props = loadAzkabanConfig(confFile);
+ } catch (Exception e) {
+ e.printStackTrace();
+ logger.error(e);
+ return;
+ }
+
+ try {
+ outputDir = File.createTempFile("schedules", null);
+ logger.info("Creating temp dir for dumping existing schedules.");
+ outputDir.delete();
+ outputDir.mkdir();
+ } catch (Exception e) {
+ e.printStackTrace();
+ logger.error(e);
+ return;
+ }
+
+ try {
+ schedule2File();
+ } catch (Exception e) {
+ e.printStackTrace();
+ logger.error(e);
+ return;
+ }
+
+ try {
+ file2ScheduleTrigger();
+ } catch (Exception e) {
+ e.printStackTrace();
+ logger.error(e);
+ return;
+ }
+
+ logger.info("Uploaded all schedules. Removing temp dir.");
+ FileUtils.deleteDirectory(outputDir);
+ System.exit(0);
+ }
+
+ private static Props loadAzkabanConfig(File confFile) throws IOException {
+ return new Props(null, confFile);
+ }
+
+ private static void printUsage() {
+ System.out.println("Usage: schedule2Trigger PATH_TO_CONFIG_FILE");
+ }
+
+ private static void schedule2File() throws Exception {
+ azkaban.migration.scheduler.ScheduleLoader scheduleLoader =
+ new azkaban.migration.scheduler.JdbcScheduleLoader(props);
+ logger.info("Loading old schedule info from DB.");
+ List<azkaban.migration.scheduler.Schedule> schedules =
+ scheduleLoader.loadSchedules();
+ for (azkaban.migration.scheduler.Schedule sched : schedules) {
+ writeScheduleFile(sched, outputDir);
+ }
+ }
+
+ private static void writeScheduleFile(
+ azkaban.migration.scheduler.Schedule sched, File outputDir)
+ throws IOException {
+ String scheduleFileName =
+ sched.getProjectName() + "-" + sched.getFlowName();
+ File outputFile = new File(outputDir, scheduleFileName);
+ outputFile.createNewFile();
+ Props props = new Props();
+ props.put("flowName", sched.getFlowName());
+ props.put("projectName", sched.getProjectName());
+ props.put("projectId", String.valueOf(sched.getProjectId()));
+ props.put("period", azkaban.migration.scheduler.Schedule
+ .createPeriodString(sched.getPeriod()));
+ props.put("firstScheduleTimeLong", sched.getFirstSchedTime());
+ props.put("timezone", sched.getTimezone().getID());
+ props.put("submitUser", sched.getSubmitUser());
+ props.put("submitTimeLong", sched.getSubmitTime());
+ props.put("nextExecTimeLong", sched.getNextExecTime());
+
+ ExecutionOptions executionOptions = sched.getExecutionOptions();
+ if (executionOptions != null) {
+ props.put("executionOptionsObj",
+ JSONUtils.toJSON(executionOptions.toObject()));
+ }
+
+ azkaban.migration.sla.SlaOptions slaOptions = sched.getSlaOptions();
+ if (slaOptions != null) {
+
+ List<Map<String, Object>> settingsObj =
+ new ArrayList<Map<String, Object>>();
+ List<azkaban.migration.sla.SLA.SlaSetting> settings =
+ slaOptions.getSettings();
+ for (azkaban.migration.sla.SLA.SlaSetting set : settings) {
+ Map<String, Object> setObj = new HashMap<String, Object>();
+ String setId = set.getId();
+ azkaban.migration.sla.SLA.SlaRule rule = set.getRule();
+ Map<String, Object> info = new HashMap<String, Object>();
+ info.put(INFO_DURATION, azkaban.migration.scheduler.Schedule
+ .createPeriodString(set.getDuration()));
+ info.put(INFO_EMAIL_LIST, slaOptions.getSlaEmails());
+ List<String> actionsList = new ArrayList<String>();
+ for (azkaban.migration.sla.SLA.SlaAction act : set.getActions()) {
+ if (act.equals(azkaban.migration.sla.SLA.SlaAction.EMAIL)) {
+ actionsList.add(ACTION_ALERT);
+ info.put(ALERT_TYPE, "email");
+ } else if (act.equals(azkaban.migration.sla.SLA.SlaAction.KILL)) {
+ actionsList.add(ACTION_CANCEL_FLOW);
+ }
+ }
+ setObj.put("actions", actionsList);
+ if (setId.equals("")) {
+ info.put(INFO_FLOW_NAME, sched.getFlowName());
+ if (rule.equals(azkaban.migration.sla.SLA.SlaRule.FINISH)) {
+ setObj.put("type", TYPE_FLOW_FINISH);
+ } else if (rule.equals(azkaban.migration.sla.SLA.SlaRule.SUCCESS)) {
+ setObj.put("type", TYPE_FLOW_SUCCEED);
+ }
+ } else {
+ info.put(INFO_JOB_NAME, setId);
+ if (rule.equals(azkaban.migration.sla.SLA.SlaRule.FINISH)) {
+ setObj.put("type", TYPE_JOB_FINISH);
+ } else if (rule.equals(azkaban.migration.sla.SLA.SlaRule.SUCCESS)) {
+ setObj.put("type", TYPE_JOB_SUCCEED);
+ }
+ }
+ setObj.put("info", info);
+ settingsObj.add(setObj);
+ }
+
+ props.put("slaOptionsObj", JSONUtils.toJSON(settingsObj));
+ }
+ props.storeLocal(outputFile);
+ }
+
+ @SuppressWarnings("unchecked")
+ private static void file2ScheduleTrigger() throws Exception {
+
+ TriggerLoader triggerLoader = new JdbcTriggerLoader(props);
+ for (File scheduleFile : outputDir.listFiles()) {
+ logger.info("Trying to load schedule from "
+ + scheduleFile.getAbsolutePath());
+ if (scheduleFile.isFile()) {
+ Props schedProps = new Props(null, scheduleFile);
+ String flowName = schedProps.getString("flowName");
+ String projectName = schedProps.getString("projectName");
+ int projectId = schedProps.getInt("projectId");
+ long firstSchedTimeLong = schedProps.getLong("firstScheduleTimeLong");
+ // DateTime firstSchedTime = new DateTime(firstSchedTimeLong);
+ String timezoneId = schedProps.getString("timezone");
+ DateTimeZone timezone = DateTimeZone.forID(timezoneId);
+ ReadablePeriod period =
+ Utils.parsePeriodString(schedProps.getString("period"));
+ // DateTime lastModifyTime = DateTime.now();
+ long nextExecTimeLong = schedProps.getLong("nextExecTimeLong");
+ // DateTime nextExecTime = new DateTime(nextExecTimeLong);
+ long submitTimeLong = schedProps.getLong("submitTimeLong");
+ // DateTime submitTime = new DateTime(submitTimeLong);
+ String submitUser = schedProps.getString("submitUser");
+ ExecutionOptions executionOptions = null;
+ if (schedProps.containsKey("executionOptionsObj")) {
+ String executionOptionsObj =
+ schedProps.getString("executionOptionsObj");
+ executionOptions =
+ ExecutionOptions.createFromObject(JSONUtils
+ .parseJSONFromString(executionOptionsObj));
+ } else {
+ executionOptions = new ExecutionOptions();
+ }
+ List<azkaban.sla.SlaOption> slaOptions = null;
+ if (schedProps.containsKey("slaOptionsObj")) {
+ slaOptions = new ArrayList<azkaban.sla.SlaOption>();
+ List<Map<String, Object>> settingsObj =
+ (List<Map<String, Object>>) JSONUtils
+ .parseJSONFromString(schedProps.getString("slaOptionsObj"));
+ for (Map<String, Object> sla : settingsObj) {
+ String type = (String) sla.get("type");
+ Map<String, Object> info = (Map<String, Object>) sla.get("info");
+ List<String> actions = (List<String>) sla.get("actions");
+ azkaban.sla.SlaOption slaOption =
+ new azkaban.sla.SlaOption(type, actions, info);
+ slaOptions.add(slaOption);
+ }
+ }
+
+ azkaban.scheduler.Schedule schedule =
+ new azkaban.scheduler.Schedule(-1, projectId, projectName,
+ flowName, "ready", firstSchedTimeLong, timezone, period,
+ DateTime.now().getMillis(), nextExecTimeLong, submitTimeLong,
+ submitUser, executionOptions, slaOptions);
+ Trigger t = scheduleToTrigger(schedule);
+ logger.info("Ready to insert trigger " + t.getDescription());
+ triggerLoader.addTrigger(t);
+
+ }
+
+ }
+ }
+
+ private static Trigger scheduleToTrigger(azkaban.scheduler.Schedule s) {
+
+ Condition triggerCondition = createTimeTriggerCondition(s);
+ Condition expireCondition = createTimeExpireCondition(s);
+ List<TriggerAction> actions = createActions(s);
+ Trigger t =
+ new Trigger(s.getScheduleId(), s.getLastModifyTime(),
+ s.getSubmitTime(), s.getSubmitUser(),
+ azkaban.scheduler.ScheduleManager.triggerSource, triggerCondition,
+ expireCondition, actions);
+ if (s.isRecurring()) {
+ t.setResetOnTrigger(true);
+ }
+ return t;
+ }
+
+ private static List<TriggerAction> createActions(azkaban.scheduler.Schedule s) {
+ List<TriggerAction> actions = new ArrayList<TriggerAction>();
+ ExecuteFlowAction executeAct =
+ new ExecuteFlowAction("executeFlowAction", s.getProjectId(),
+ s.getProjectName(), s.getFlowName(), s.getSubmitUser(),
+ s.getExecutionOptions(), s.getSlaOptions());
+ actions.add(executeAct);
+
+ return actions;
+ }
+
+ private static Condition createTimeTriggerCondition(
+ azkaban.scheduler.Schedule s) {
+ Map<String, ConditionChecker> checkers =
+ new HashMap<String, ConditionChecker>();
+ ConditionChecker checker =
+ new BasicTimeChecker("BasicTimeChecker_1", s.getFirstSchedTime(),
+ s.getTimezone(), s.isRecurring(), s.skipPastOccurrences(),
+ s.getPeriod());
+ checkers.put(checker.getId(), checker);
+ String expr = checker.getId() + ".eval()";
+ Condition cond = new Condition(checkers, expr);
+ return cond;
+ }
+
+ // if failed to trigger, auto expire?
+ private static Condition createTimeExpireCondition(
+ azkaban.scheduler.Schedule s) {
+ Map<String, ConditionChecker> checkers =
+ new HashMap<String, ConditionChecker>();
+ ConditionChecker checker =
+ new BasicTimeChecker("BasicTimeChecker_2", s.getFirstSchedTime(),
+ s.getTimezone(), s.isRecurring(), s.skipPastOccurrences(),
+ s.getPeriod());
+ checkers.put(checker.getId(), checker);
+ String expr = checker.getId() + ".eval()";
+ Condition cond = new Condition(checkers, expr);
+ return cond;
+ }
}
diff --git a/src/main/java/azkaban/migration/scheduler/JdbcScheduleLoader.java b/src/main/java/azkaban/migration/scheduler/JdbcScheduleLoader.java
index bcef168..70449cd 100644
--- a/src/main/java/azkaban/migration/scheduler/JdbcScheduleLoader.java
+++ b/src/main/java/azkaban/migration/scheduler/JdbcScheduleLoader.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn, Inc
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -16,7 +16,6 @@
package azkaban.migration.scheduler;
-
import azkaban.database.DataSourceUtils;
import azkaban.utils.GZIPUtils;
import azkaban.utils.JSONUtils;
@@ -40,325 +39,341 @@ import org.joda.time.ReadablePeriod;
@Deprecated
public class JdbcScheduleLoader implements ScheduleLoader {
- private static Logger logger = Logger.getLogger(JdbcScheduleLoader.class);
-
- public static enum EncodingType {
- PLAIN(1), GZIP(2);
-
- private int numVal;
-
- EncodingType(int numVal) {
- this.numVal = numVal;
- }
-
- public int getNumVal() {
- return numVal;
- }
-
- public static EncodingType fromInteger(int x) {
- switch (x) {
- case 1:
- return PLAIN;
- case 2:
- return GZIP;
- default:
- return PLAIN;
- }
- }
- }
-
- private DataSource dataSource;
- private EncodingType defaultEncodingType = EncodingType.GZIP;
-
- private static final String scheduleTableName = "schedules";
-
- private static String SELECT_ALL_SCHEDULES =
- "SELECT project_id, project_name, flow_name, status, first_sched_time, timezone, period, last_modify_time, next_exec_time, submit_time, submit_user, enc_type, schedule_options FROM " + scheduleTableName;
-
- private static String INSERT_SCHEDULE =
- "INSERT INTO " + scheduleTableName + " ( project_id, project_name, flow_name, status, first_sched_time, timezone, period, last_modify_time, next_exec_time, submit_time, submit_user, enc_type, schedule_options) values (?,?,?,?,?,?,?,?,?,?,?,?,?)";
-
- private static String REMOVE_SCHEDULE_BY_KEY =
- "DELETE FROM " + scheduleTableName + " WHERE project_id=? AND flow_name=?";
-
- private static String UPDATE_SCHEDULE_BY_KEY =
- "UPDATE " + scheduleTableName + " SET status=?, first_sched_time=?, timezone=?, period=?, last_modify_time=?, next_exec_time=?, submit_time=?, submit_user=?, enc_type=?, schedule_options=? WHERE project_id=? AND flow_name=?";
-
- private static String UPDATE_NEXT_EXEC_TIME =
- "UPDATE " + scheduleTableName + " SET next_exec_time=? WHERE project_id=? AND flow_name=?";
-
- private Connection getConnection() throws ScheduleManagerException {
- Connection connection = null;
- try {
- connection = dataSource.getConnection();
- } catch (Exception e) {
- DbUtils.closeQuietly(connection);
- throw new ScheduleManagerException("Error getting DB connection.", e);
- }
-
- return connection;
- }
-
- public EncodingType getDefaultEncodingType() {
- return defaultEncodingType;
- }
-
- public void setDefaultEncodingType(EncodingType defaultEncodingType) {
- this.defaultEncodingType = defaultEncodingType;
- }
-
- public JdbcScheduleLoader(Props props) {
- String databaseType = props.getString("database.type");
-
- if (databaseType.equals("mysql")) {
- int port = props.getInt("mysql.port");
- String host = props.getString("mysql.host");
- String database = props.getString("mysql.database");
- String user = props.getString("mysql.user");
- String password = props.getString("mysql.password");
- int numConnections = props.getInt("mysql.numconnections");
-
- dataSource = DataSourceUtils.getMySQLDataSource(host, port, database, user, password, numConnections);
- }
- }
-
- @Override
- public List<Schedule> loadSchedules() throws ScheduleManagerException {
- logger.info("Loading all schedules from db.");
- Connection connection = getConnection();
-
- QueryRunner runner = new QueryRunner();
- ResultSetHandler<List<Schedule>> handler = new ScheduleResultHandler();
-
- List<Schedule> schedules;
-
- try {
- schedules = runner.query(connection, SELECT_ALL_SCHEDULES, handler);
- } catch (SQLException e) {
- logger.error(SELECT_ALL_SCHEDULES + " failed.");
-
- DbUtils.closeQuietly(connection);
- throw new ScheduleManagerException("Loading schedules from db failed. ", e);
- } finally {
- DbUtils.closeQuietly(connection);
- }
-
- logger.info("Now trying to update the schedules");
-
- // filter the schedules
- Iterator<Schedule> scheduleIterator = schedules.iterator();
- while (scheduleIterator.hasNext()) {
- Schedule sched = scheduleIterator.next();
- if(!sched.updateTime()) {
- logger.info("Schedule " + sched.getScheduleName() + " was scheduled before azkaban start, skipping it.");
- scheduleIterator.remove();
- removeSchedule(sched);
- }
- else {
- logger.info("Recurring schedule, need to update next exec time");
- try {
- updateNextExecTime(sched);
- } catch (Exception e) {
- e.printStackTrace();
- throw new ScheduleManagerException("Update next execution time failed.", e);
- }
- logger.info("Schedule " + sched.getScheduleName() + " loaded and updated.");
- }
- }
-
-
-
- logger.info("Loaded " + schedules.size() + " schedules.");
-
- return schedules;
- }
-
- @Override
- public void removeSchedule(Schedule s) throws ScheduleManagerException {
- logger.info("Removing schedule " + s.getScheduleName() + " from db.");
-
- QueryRunner runner = new QueryRunner(dataSource);
-
- try {
- int removes = runner.update(REMOVE_SCHEDULE_BY_KEY, s.getProjectId(), s.getFlowName());
- if (removes == 0) {
- throw new ScheduleManagerException("No schedule has been removed.");
- }
- } catch (SQLException e) {
- logger.error(REMOVE_SCHEDULE_BY_KEY + " failed.");
- throw new ScheduleManagerException("Remove schedule " + s.getScheduleName() + " from db failed. ", e);
- }
- }
-
-
- public void insertSchedule(Schedule s) throws ScheduleManagerException {
- logger.info("Inserting schedule " + s.getScheduleName() + " into db.");
- insertSchedule(s, defaultEncodingType);
- }
-
- public void insertSchedule(Schedule s, EncodingType encType) throws ScheduleManagerException {
-
- String json = JSONUtils.toJSON(s.optionsToObject());
- byte[] data = null;
- try {
- byte[] stringData = json.getBytes("UTF-8");
- data = stringData;
-
- if (encType == EncodingType.GZIP) {
- data = GZIPUtils.gzipBytes(stringData);
- }
- logger.debug("NumChars: " + json.length() + " UTF-8:" + stringData.length + " Gzip:"+ data.length);
- }
- catch (IOException e) {
- throw new ScheduleManagerException("Error encoding the schedule options. " + s.getScheduleName());
- }
-
- QueryRunner runner = new QueryRunner(dataSource);
- try {
- int inserts = runner.update(
- INSERT_SCHEDULE,
- s.getProjectId(),
- s.getProjectName(),
- s.getFlowName(),
- s.getStatus(),
- s.getFirstSchedTime(),
- s.getTimezone().getID(),
- Schedule.createPeriodString(s.getPeriod()),
- s.getLastModifyTime(),
- s.getNextExecTime(),
- s.getSubmitTime(),
- s.getSubmitUser(),
- encType.getNumVal(),
- data);
- if (inserts == 0) {
- throw new ScheduleManagerException("No schedule has been inserted.");
- }
- } catch (SQLException e) {
- logger.error(INSERT_SCHEDULE + " failed.");
- throw new ScheduleManagerException("Insert schedule " + s.getScheduleName() + " into db failed. ", e);
- }
- }
-
- @Override
- public void updateNextExecTime(Schedule s) throws ScheduleManagerException
- {
- logger.info("Update schedule " + s.getScheduleName() + " into db. ");
- Connection connection = getConnection();
- QueryRunner runner = new QueryRunner();
- try {
-
- runner.update(connection, UPDATE_NEXT_EXEC_TIME, s.getNextExecTime(), s.getProjectId(), s.getFlowName());
- } catch (SQLException e) {
- e.printStackTrace();
- logger.error(UPDATE_NEXT_EXEC_TIME + " failed.", e);
- throw new ScheduleManagerException("Update schedule " + s.getScheduleName() + " into db failed. ", e);
- } finally {
- DbUtils.closeQuietly(connection);
- }
- }
-
- @Override
- public void updateSchedule(Schedule s) throws ScheduleManagerException {
- logger.info("Updating schedule " + s.getScheduleName() + " into db.");
- updateSchedule(s, defaultEncodingType);
- }
-
- public void updateSchedule(Schedule s, EncodingType encType) throws ScheduleManagerException {
-
- String json = JSONUtils.toJSON(s.optionsToObject());
- byte[] data = null;
- try {
- byte[] stringData = json.getBytes("UTF-8");
- data = stringData;
-
- if (encType == EncodingType.GZIP) {
- data = GZIPUtils.gzipBytes(stringData);
- }
- logger.debug("NumChars: " + json.length() + " UTF-8:" + stringData.length + " Gzip:"+ data.length);
- }
- catch (IOException e) {
- throw new ScheduleManagerException("Error encoding the schedule options " + s.getScheduleName());
- }
-
- QueryRunner runner = new QueryRunner(dataSource);
-
- try {
- int updates = runner.update(
- UPDATE_SCHEDULE_BY_KEY,
- s.getStatus(),
- s.getFirstSchedTime(),
- s.getTimezone().getID(),
- Schedule.createPeriodString(s.getPeriod()),
- s.getLastModifyTime(),
- s.getNextExecTime(),
- s.getSubmitTime(),
- s.getSubmitUser(),
- encType.getNumVal(),
- data,
- s.getProjectId(),
- s.getFlowName());
- if (updates == 0) {
- throw new ScheduleManagerException("No schedule has been updated.");
- }
- } catch (SQLException e) {
- logger.error(UPDATE_SCHEDULE_BY_KEY + " failed.");
- throw new ScheduleManagerException("Update schedule " + s.getScheduleName() + " into db failed. ", e);
- }
- }
-
- public class ScheduleResultHandler implements ResultSetHandler<List<Schedule>> {
- @Override
- public List<Schedule> handle(ResultSet rs) throws SQLException {
- if (!rs.next()) {
- return Collections.<Schedule>emptyList();
- }
-
- ArrayList<Schedule> schedules = new ArrayList<Schedule>();
- do {
- int projectId = rs.getInt(1);
- String projectName = rs.getString(2);
- String flowName = rs.getString(3);
- String status = rs.getString(4);
- long firstSchedTime = rs.getLong(5);
- DateTimeZone timezone = DateTimeZone.forID(rs.getString(6));
- ReadablePeriod period = Schedule.parsePeriodString(rs.getString(7));
- long lastModifyTime = rs.getLong(8);
- long nextExecTime = rs.getLong(9);
- long submitTime = rs.getLong(10);
- String submitUser = rs.getString(11);
- int encodingType = rs.getInt(12);
- byte[] data = rs.getBytes(13);
-
- Object optsObj = null;
- if (data != null) {
- EncodingType encType = EncodingType.fromInteger(encodingType);
-
- try {
- // Convoluted way to inflate strings. Should find common package or helper function.
- if (encType == EncodingType.GZIP) {
- // Decompress the sucker.
- String jsonString = GZIPUtils.unGzipString(data, "UTF-8");
- optsObj = JSONUtils.parseJSONFromString(jsonString);
- }
- else {
- String jsonString = new String(data, "UTF-8");
- optsObj = JSONUtils.parseJSONFromString(jsonString);
- }
- } catch (IOException e) {
- throw new SQLException("Error reconstructing schedule options " + projectName + "." + flowName);
- }
- }
-
- Schedule s = new Schedule(projectId, projectName, flowName, status, firstSchedTime, timezone, period, lastModifyTime, nextExecTime, submitTime, submitUser);
- if (optsObj != null) {
- s.createAndSetScheduleOptions(optsObj);
- }
-
- schedules.add(s);
- } while (rs.next());
-
- return schedules;
- }
-
- }
-}
\ No newline at end of file
+ private static Logger logger = Logger.getLogger(JdbcScheduleLoader.class);
+
+ public static enum EncodingType {
+ PLAIN(1), GZIP(2);
+
+ private int numVal;
+
+ EncodingType(int numVal) {
+ this.numVal = numVal;
+ }
+
+ public int getNumVal() {
+ return numVal;
+ }
+
+ public static EncodingType fromInteger(int x) {
+ switch (x) {
+ case 1:
+ return PLAIN;
+ case 2:
+ return GZIP;
+ default:
+ return PLAIN;
+ }
+ }
+ }
+
+ private DataSource dataSource;
+ private EncodingType defaultEncodingType = EncodingType.GZIP;
+
+ private static final String scheduleTableName = "schedules";
+
+ private static String SELECT_ALL_SCHEDULES =
+ "SELECT project_id, project_name, flow_name, status, "
+ + "first_sched_time, timezone, period, last_modify_time, "
+ + "next_exec_time, submit_time, submit_user, enc_type, "
+ + "schedule_options FROM "
+ + scheduleTableName;
+
+ private static String INSERT_SCHEDULE =
+ "INSERT INTO "
+ + scheduleTableName
+ + " ( project_id, project_name, flow_name, status, "
+ + "first_sched_time, timezone, period, last_modify_time, "
+ + "next_exec_time, submit_time, submit_user, enc_type, "
+ + "schedule_options) values (?,?,?,?,?,?,?,?,?,?,?,?,?)";
+
+ private static String REMOVE_SCHEDULE_BY_KEY = "DELETE FROM "
+ + scheduleTableName + " WHERE project_id=? AND flow_name=?";
+
+ private static String UPDATE_SCHEDULE_BY_KEY =
+ "UPDATE "
+ + scheduleTableName
+ + " SET status=?, first_sched_time=?, timezone=?, period=?, "
+ + "last_modify_time=?, next_exec_time=?, submit_time=?, "
+ + "submit_user=?, enc_type=?, schedule_options=? "
+ + "WHERE project_id=? AND flow_name=?";
+
+ private static String UPDATE_NEXT_EXEC_TIME = "UPDATE " + scheduleTableName
+ + " SET next_exec_time=? WHERE project_id=? AND flow_name=?";
+
+ private Connection getConnection() throws ScheduleManagerException {
+ Connection connection = null;
+ try {
+ connection = dataSource.getConnection();
+ } catch (Exception e) {
+ DbUtils.closeQuietly(connection);
+ throw new ScheduleManagerException("Error getting DB connection.", e);
+ }
+
+ return connection;
+ }
+
+ public EncodingType getDefaultEncodingType() {
+ return defaultEncodingType;
+ }
+
+ public void setDefaultEncodingType(EncodingType defaultEncodingType) {
+ this.defaultEncodingType = defaultEncodingType;
+ }
+
+ public JdbcScheduleLoader(Props props) {
+ String databaseType = props.getString("database.type");
+
+ if (databaseType.equals("mysql")) {
+ int port = props.getInt("mysql.port");
+ String host = props.getString("mysql.host");
+ String database = props.getString("mysql.database");
+ String user = props.getString("mysql.user");
+ String password = props.getString("mysql.password");
+ int numConnections = props.getInt("mysql.numconnections");
+
+ dataSource =
+ DataSourceUtils.getMySQLDataSource(host, port, database, user,
+ password, numConnections);
+ }
+ }
+
+ @Override
+ public List<Schedule> loadSchedules() throws ScheduleManagerException {
+ logger.info("Loading all schedules from db.");
+ Connection connection = getConnection();
+
+ QueryRunner runner = new QueryRunner();
+ ResultSetHandler<List<Schedule>> handler = new ScheduleResultHandler();
+
+ List<Schedule> schedules;
+
+ try {
+ schedules = runner.query(connection, SELECT_ALL_SCHEDULES, handler);
+ } catch (SQLException e) {
+ logger.error(SELECT_ALL_SCHEDULES + " failed.");
+
+ DbUtils.closeQuietly(connection);
+ throw new ScheduleManagerException("Loading schedules from db failed. ",
+ e);
+ } finally {
+ DbUtils.closeQuietly(connection);
+ }
+
+ logger.info("Now trying to update the schedules");
+
+ // filter the schedules
+ Iterator<Schedule> scheduleIterator = schedules.iterator();
+ while (scheduleIterator.hasNext()) {
+ Schedule sched = scheduleIterator.next();
+ if (!sched.updateTime()) {
+ logger.info("Schedule " + sched.getScheduleName()
+ + " was scheduled before azkaban start, skipping it.");
+ scheduleIterator.remove();
+ removeSchedule(sched);
+ } else {
+ logger.info("Recurring schedule, need to update next exec time");
+ try {
+ updateNextExecTime(sched);
+ } catch (Exception e) {
+ e.printStackTrace();
+ throw new ScheduleManagerException(
+ "Update next execution time failed.", e);
+ }
+ logger.info("Schedule " + sched.getScheduleName()
+ + " loaded and updated.");
+ }
+ }
+
+ logger.info("Loaded " + schedules.size() + " schedules.");
+
+ return schedules;
+ }
+
+ @Override
+ public void removeSchedule(Schedule s) throws ScheduleManagerException {
+ logger.info("Removing schedule " + s.getScheduleName() + " from db.");
+
+ QueryRunner runner = new QueryRunner(dataSource);
+
+ try {
+ int removes =
+ runner.update(REMOVE_SCHEDULE_BY_KEY, s.getProjectId(),
+ s.getFlowName());
+ if (removes == 0) {
+ throw new ScheduleManagerException("No schedule has been removed.");
+ }
+ } catch (SQLException e) {
+ logger.error(REMOVE_SCHEDULE_BY_KEY + " failed.");
+ throw new ScheduleManagerException("Remove schedule "
+ + s.getScheduleName() + " from db failed. ", e);
+ }
+ }
+
+ public void insertSchedule(Schedule s) throws ScheduleManagerException {
+ logger.info("Inserting schedule " + s.getScheduleName() + " into db.");
+ insertSchedule(s, defaultEncodingType);
+ }
+
+ public void insertSchedule(Schedule s, EncodingType encType)
+ throws ScheduleManagerException {
+
+ String json = JSONUtils.toJSON(s.optionsToObject());
+ byte[] data = null;
+ try {
+ byte[] stringData = json.getBytes("UTF-8");
+ data = stringData;
+
+ if (encType == EncodingType.GZIP) {
+ data = GZIPUtils.gzipBytes(stringData);
+ }
+ logger.debug("NumChars: " + json.length() + " UTF-8:" + stringData.length
+ + " Gzip:" + data.length);
+ } catch (IOException e) {
+ throw new ScheduleManagerException(
+ "Error encoding the schedule options. " + s.getScheduleName());
+ }
+
+ QueryRunner runner = new QueryRunner(dataSource);
+ try {
+ int inserts =
+ runner.update(INSERT_SCHEDULE, s.getProjectId(), s.getProjectName(),
+ s.getFlowName(), s.getStatus(), s.getFirstSchedTime(), s
+ .getTimezone().getID(), Schedule.createPeriodString(s
+ .getPeriod()), s.getLastModifyTime(), s.getNextExecTime(), s
+ .getSubmitTime(), s.getSubmitUser(), encType.getNumVal(),
+ data);
+ if (inserts == 0) {
+ throw new ScheduleManagerException("No schedule has been inserted.");
+ }
+ } catch (SQLException e) {
+ logger.error(INSERT_SCHEDULE + " failed.");
+ throw new ScheduleManagerException("Insert schedule "
+ + s.getScheduleName() + " into db failed. ", e);
+ }
+ }
+
+ @Override
+ public void updateNextExecTime(Schedule s) throws ScheduleManagerException {
+ logger.info("Update schedule " + s.getScheduleName() + " into db. ");
+ Connection connection = getConnection();
+ QueryRunner runner = new QueryRunner();
+ try {
+
+ runner.update(connection, UPDATE_NEXT_EXEC_TIME, s.getNextExecTime(),
+ s.getProjectId(), s.getFlowName());
+ } catch (SQLException e) {
+ e.printStackTrace();
+ logger.error(UPDATE_NEXT_EXEC_TIME + " failed.", e);
+ throw new ScheduleManagerException("Update schedule "
+ + s.getScheduleName() + " into db failed. ", e);
+ } finally {
+ DbUtils.closeQuietly(connection);
+ }
+ }
+
+ @Override
+ public void updateSchedule(Schedule s) throws ScheduleManagerException {
+ logger.info("Updating schedule " + s.getScheduleName() + " into db.");
+ updateSchedule(s, defaultEncodingType);
+ }
+
+ public void updateSchedule(Schedule s, EncodingType encType)
+ throws ScheduleManagerException {
+
+ String json = JSONUtils.toJSON(s.optionsToObject());
+ byte[] data = null;
+ try {
+ byte[] stringData = json.getBytes("UTF-8");
+ data = stringData;
+
+ if (encType == EncodingType.GZIP) {
+ data = GZIPUtils.gzipBytes(stringData);
+ }
+ logger.debug("NumChars: " + json.length() + " UTF-8:" + stringData.length
+ + " Gzip:" + data.length);
+ } catch (IOException e) {
+ throw new ScheduleManagerException("Error encoding the schedule options "
+ + s.getScheduleName());
+ }
+
+ QueryRunner runner = new QueryRunner(dataSource);
+
+ try {
+ int updates =
+ runner.update(UPDATE_SCHEDULE_BY_KEY, s.getStatus(),
+ s.getFirstSchedTime(), s.getTimezone().getID(),
+ Schedule.createPeriodString(s.getPeriod()),
+ s.getLastModifyTime(), s.getNextExecTime(), s.getSubmitTime(),
+ s.getSubmitUser(), encType.getNumVal(), data, s.getProjectId(),
+ s.getFlowName());
+ if (updates == 0) {
+ throw new ScheduleManagerException("No schedule has been updated.");
+ }
+ } catch (SQLException e) {
+ logger.error(UPDATE_SCHEDULE_BY_KEY + " failed.");
+ throw new ScheduleManagerException("Update schedule "
+ + s.getScheduleName() + " into db failed. ", e);
+ }
+ }
+
+ public class ScheduleResultHandler implements
+ ResultSetHandler<List<Schedule>> {
+ @Override
+ public List<Schedule> handle(ResultSet rs) throws SQLException {
+ if (!rs.next()) {
+ return Collections.<Schedule> emptyList();
+ }
+
+ ArrayList<Schedule> schedules = new ArrayList<Schedule>();
+ do {
+ int projectId = rs.getInt(1);
+ String projectName = rs.getString(2);
+ String flowName = rs.getString(3);
+ String status = rs.getString(4);
+ long firstSchedTime = rs.getLong(5);
+ DateTimeZone timezone = DateTimeZone.forID(rs.getString(6));
+ ReadablePeriod period = Schedule.parsePeriodString(rs.getString(7));
+ long lastModifyTime = rs.getLong(8);
+ long nextExecTime = rs.getLong(9);
+ long submitTime = rs.getLong(10);
+ String submitUser = rs.getString(11);
+ int encodingType = rs.getInt(12);
+ byte[] data = rs.getBytes(13);
+
+ Object optsObj = null;
+ if (data != null) {
+ EncodingType encType = EncodingType.fromInteger(encodingType);
+
+ try {
+ // Convoluted way to inflate strings. Should find common package or
+ // helper function.
+ if (encType == EncodingType.GZIP) {
+ // Decompress the sucker.
+ String jsonString = GZIPUtils.unGzipString(data, "UTF-8");
+ optsObj = JSONUtils.parseJSONFromString(jsonString);
+ } else {
+ String jsonString = new String(data, "UTF-8");
+ optsObj = JSONUtils.parseJSONFromString(jsonString);
+ }
+ } catch (IOException e) {
+ throw new SQLException("Error reconstructing schedule options "
+ + projectName + "." + flowName);
+ }
+ }
+
+ Schedule s =
+ new Schedule(projectId, projectName, flowName, status,
+ firstSchedTime, timezone, period, lastModifyTime, nextExecTime,
+ submitTime, submitUser);
+ if (optsObj != null) {
+ s.createAndSetScheduleOptions(optsObj);
+ }
+
+ schedules.add(s);
+ } while (rs.next());
+
+ return schedules;
+ }
+
+ }
+}
src/main/java/azkaban/migration/scheduler/Schedule.java 633(+304 -329)
diff --git a/src/main/java/azkaban/migration/scheduler/Schedule.java b/src/main/java/azkaban/migration/scheduler/Schedule.java
index 9490243..3572ce3 100644
--- a/src/main/java/azkaban/migration/scheduler/Schedule.java
+++ b/src/main/java/azkaban/migration/scheduler/Schedule.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn, Inc
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -35,329 +35,304 @@ import azkaban.migration.sla.SlaOptions;
import azkaban.utils.Pair;
@Deprecated
-public class Schedule{
-
-// private long projectGuid;
-// private long flowGuid;
-
-// private String scheduleId;
-
- private int projectId;
- private String projectName;
- private String flowName;
- private long firstSchedTime;
- private DateTimeZone timezone;
- private long lastModifyTime;
- private ReadablePeriod period;
- private long nextExecTime;
- private String submitUser;
- private String status;
- private long submitTime;
-
- private ExecutionOptions executionOptions;
- private SlaOptions slaOptions;
-
- public Schedule(
- int projectId,
- String projectName,
- String flowName,
- String status,
- long firstSchedTime,
- DateTimeZone timezone,
- ReadablePeriod period,
- long lastModifyTime,
- long nextExecTime,
- long submitTime,
- String submitUser
- ) {
- this.projectId = projectId;
- this.projectName = projectName;
- this.flowName = flowName;
- this.firstSchedTime = firstSchedTime;
- this.timezone = timezone;
- this.lastModifyTime = lastModifyTime;
- this.period = period;
- this.nextExecTime = nextExecTime;
- this.submitUser = submitUser;
- this.status = status;
- this.submitTime = submitTime;
- this.executionOptions = null;
- this.slaOptions = null;
- }
-
- public Schedule(
- int projectId,
- String projectName,
- String flowName,
- String status,
- long firstSchedTime,
- String timezoneId,
- String period,
- long lastModifyTime,
- long nextExecTime,
- long submitTime,
- String submitUser,
- ExecutionOptions executionOptions,
- SlaOptions slaOptions
- ) {
- this.projectId = projectId;
- this.projectName = projectName;
- this.flowName = flowName;
- this.firstSchedTime = firstSchedTime;
- this.timezone = DateTimeZone.forID(timezoneId);
- this.lastModifyTime = lastModifyTime;
- this.period = parsePeriodString(period);
- this.nextExecTime = nextExecTime;
- this.submitUser = submitUser;
- this.status = status;
- this.submitTime = submitTime;
- this.executionOptions = executionOptions;
- this.slaOptions = slaOptions;
- }
-
- public Schedule(
- int projectId,
- String projectName,
- String flowName,
- String status,
- long firstSchedTime,
- DateTimeZone timezone,
- ReadablePeriod period,
- long lastModifyTime,
- long nextExecTime,
- long submitTime,
- String submitUser,
- ExecutionOptions executionOptions,
- SlaOptions slaOptions
- ) {
- this.projectId = projectId;
- this.projectName = projectName;
- this.flowName = flowName;
- this.firstSchedTime = firstSchedTime;
- this.timezone = timezone;
- this.lastModifyTime = lastModifyTime;
- this.period = period;
- this.nextExecTime = nextExecTime;
- this.submitUser = submitUser;
- this.status = status;
- this.submitTime = submitTime;
- this.executionOptions = executionOptions;
- this.slaOptions = slaOptions;
- }
-
- public ExecutionOptions getExecutionOptions() {
- return executionOptions;
- }
-
- public void setFlowOptions(ExecutionOptions executionOptions) {
- this.executionOptions = executionOptions;
- }
-
- public SlaOptions getSlaOptions() {
- return slaOptions;
- }
-
- public void setSlaOptions(SlaOptions slaOptions) {
- this.slaOptions = slaOptions;
- }
-
- public String getScheduleName() {
- return projectName + "." + flowName + " (" + projectId + ")";
- }
-
- public String toString() {
- return projectName + "." + flowName + " (" + projectId + ")" + " to be run at (starting) " +
- new DateTime(firstSchedTime).toDateTimeISO() + " with recurring period of " + (period == null ? "non-recurring" : createPeriodString(period));
- }
-
- public Pair<Integer, String> getScheduleId() {
- return new Pair<Integer, String>(getProjectId(), getFlowName());
- }
-
- public int getProjectId() {
- return projectId;
- }
-
- public String getProjectName() {
- return projectName;
- }
-
- public String getFlowName() {
- return flowName;
- }
-
- public long getFirstSchedTime() {
- return firstSchedTime;
- }
-
- public DateTimeZone getTimezone() {
- return timezone;
- }
-
- public long getLastModifyTime() {
- return lastModifyTime;
- }
-
- public ReadablePeriod getPeriod() {
- return period;
- }
-
- public long getNextExecTime() {
- return nextExecTime;
- }
-
- public String getSubmitUser() {
- return submitUser;
- }
-
- public String getStatus() {
- return status;
- }
-
- public long getSubmitTime() {
- return submitTime;
- }
-
- public boolean updateTime() {
- if (new DateTime(nextExecTime).isAfterNow()) {
- return true;
- }
-
- if (period != null) {
- DateTime nextTime = getNextRuntime(nextExecTime, timezone, period);
-
- this.nextExecTime = nextTime.getMillis();
- return true;
- }
-
- return false;
- }
-
- private DateTime getNextRuntime(long scheduleTime, DateTimeZone timezone, ReadablePeriod period) {
- DateTime now = new DateTime();
- DateTime date = new DateTime(scheduleTime).withZone(timezone);
- int count = 0;
- while (!now.isBefore(date)) {
- if (count > 100000) {
- throw new IllegalStateException(
- "100000 increments of period did not get to present time.");
- }
-
- if (period == null) {
- break;
- } else {
- date = date.plus(period);
- }
-
- count += 1;
- }
-
- return date;
- }
-
- public static ReadablePeriod parsePeriodString(String periodStr) {
- ReadablePeriod period;
- char periodUnit = periodStr.charAt(periodStr.length() - 1);
- if (periodUnit == 'n') {
- return null;
- }
-
- int periodInt = Integer.parseInt(periodStr.substring(0,
- periodStr.length() - 1));
- switch (periodUnit) {
- case 'M':
- period = Months.months(periodInt);
- break;
- case 'w':
- period = Weeks.weeks(periodInt);
- break;
- case 'd':
- period = Days.days(periodInt);
- break;
- case 'h':
- period = Hours.hours(periodInt);
- break;
- case 'm':
- period = Minutes.minutes(periodInt);
- break;
- case 's':
- period = Seconds.seconds(periodInt);
- break;
- default:
- throw new IllegalArgumentException("Invalid schedule period unit '"
- + periodUnit);
- }
-
- return period;
- }
-
- public static String createPeriodString(ReadablePeriod period) {
- String periodStr = "n";
-
- if (period == null) {
- return "n";
- }
-
- if (period.get(DurationFieldType.months()) > 0) {
- int months = period.get(DurationFieldType.months());
- periodStr = months + "M";
- } else if (period.get(DurationFieldType.weeks()) > 0) {
- int weeks = period.get(DurationFieldType.weeks());
- periodStr = weeks + "w";
- } else if (period.get(DurationFieldType.days()) > 0) {
- int days = period.get(DurationFieldType.days());
- periodStr = days + "d";
- } else if (period.get(DurationFieldType.hours()) > 0) {
- int hours = period.get(DurationFieldType.hours());
- periodStr = hours + "h";
- } else if (period.get(DurationFieldType.minutes()) > 0) {
- int minutes = period.get(DurationFieldType.minutes());
- periodStr = minutes + "m";
- } else if (period.get(DurationFieldType.seconds()) > 0) {
- int seconds = period.get(DurationFieldType.seconds());
- periodStr = seconds + "s";
- }
-
- return periodStr;
- }
-
-
- public Map<String,Object> optionsToObject() {
- if(executionOptions != null || slaOptions != null) {
- HashMap<String, Object> schedObj = new HashMap<String, Object>();
-
- if(executionOptions != null) {
- schedObj.put("executionOptions", executionOptions.toObject());
- }
- if(slaOptions != null) {
- schedObj.put("slaOptions", slaOptions.toObject());
- }
-
- return schedObj;
- }
- return null;
- }
-
- public void createAndSetScheduleOptions(Object obj) {
- @SuppressWarnings("unchecked")
- HashMap<String, Object> schedObj = (HashMap<String, Object>)obj;
- if (schedObj.containsKey("executionOptions")) {
- ExecutionOptions execOptions = ExecutionOptions.createFromObject(schedObj.get("executionOptions"));
- this.executionOptions = execOptions;
- }
- else if (schedObj.containsKey("flowOptions")){
- ExecutionOptions execOptions = ExecutionOptions.createFromObject(schedObj.get("flowOptions"));
- this.executionOptions = execOptions;
- execOptions.setConcurrentOption(ExecutionOptions.CONCURRENT_OPTION_SKIP);
- }
- else {
- this.executionOptions = new ExecutionOptions();
- this.executionOptions.setConcurrentOption(ExecutionOptions.CONCURRENT_OPTION_SKIP);
- }
-
- if (schedObj.containsKey("slaOptions")) {
- SlaOptions slaOptions = SlaOptions.fromObject(schedObj.get("slaOptions"));
- this.slaOptions = slaOptions;
- }
- }
-}
\ No newline at end of file
+public class Schedule {
+
+ // private long projectGuid;
+ // private long flowGuid;
+
+ // private String scheduleId;
+
+ private int projectId;
+ private String projectName;
+ private String flowName;
+ private long firstSchedTime;
+ private DateTimeZone timezone;
+ private long lastModifyTime;
+ private ReadablePeriod period;
+ private long nextExecTime;
+ private String submitUser;
+ private String status;
+ private long submitTime;
+
+ private ExecutionOptions executionOptions;
+ private SlaOptions slaOptions;
+
+ public Schedule(int projectId, String projectName, String flowName,
+ String status, long firstSchedTime, DateTimeZone timezone,
+ ReadablePeriod period, long lastModifyTime, long nextExecTime,
+ long submitTime, String submitUser) {
+ this.projectId = projectId;
+ this.projectName = projectName;
+ this.flowName = flowName;
+ this.firstSchedTime = firstSchedTime;
+ this.timezone = timezone;
+ this.lastModifyTime = lastModifyTime;
+ this.period = period;
+ this.nextExecTime = nextExecTime;
+ this.submitUser = submitUser;
+ this.status = status;
+ this.submitTime = submitTime;
+ this.executionOptions = null;
+ this.slaOptions = null;
+ }
+
+ public Schedule(int projectId, String projectName, String flowName,
+ String status, long firstSchedTime, String timezoneId, String period,
+ long lastModifyTime, long nextExecTime, long submitTime,
+ String submitUser, ExecutionOptions executionOptions,
+ SlaOptions slaOptions) {
+ this.projectId = projectId;
+ this.projectName = projectName;
+ this.flowName = flowName;
+ this.firstSchedTime = firstSchedTime;
+ this.timezone = DateTimeZone.forID(timezoneId);
+ this.lastModifyTime = lastModifyTime;
+ this.period = parsePeriodString(period);
+ this.nextExecTime = nextExecTime;
+ this.submitUser = submitUser;
+ this.status = status;
+ this.submitTime = submitTime;
+ this.executionOptions = executionOptions;
+ this.slaOptions = slaOptions;
+ }
+
+ public Schedule(int projectId, String projectName, String flowName,
+ String status, long firstSchedTime, DateTimeZone timezone,
+ ReadablePeriod period, long lastModifyTime, long nextExecTime,
+ long submitTime, String submitUser, ExecutionOptions executionOptions,
+ SlaOptions slaOptions) {
+ this.projectId = projectId;
+ this.projectName = projectName;
+ this.flowName = flowName;
+ this.firstSchedTime = firstSchedTime;
+ this.timezone = timezone;
+ this.lastModifyTime = lastModifyTime;
+ this.period = period;
+ this.nextExecTime = nextExecTime;
+ this.submitUser = submitUser;
+ this.status = status;
+ this.submitTime = submitTime;
+ this.executionOptions = executionOptions;
+ this.slaOptions = slaOptions;
+ }
+
+ public ExecutionOptions getExecutionOptions() {
+ return executionOptions;
+ }
+
+ public void setFlowOptions(ExecutionOptions executionOptions) {
+ this.executionOptions = executionOptions;
+ }
+
+ public SlaOptions getSlaOptions() {
+ return slaOptions;
+ }
+
+ public void setSlaOptions(SlaOptions slaOptions) {
+ this.slaOptions = slaOptions;
+ }
+
+ public String getScheduleName() {
+ return projectName + "." + flowName + " (" + projectId + ")";
+ }
+
+ public String toString() {
+ return projectName + "." + flowName + " (" + projectId + ")"
+ + " to be run at (starting) "
+ + new DateTime(firstSchedTime).toDateTimeISO()
+ + " with recurring period of "
+ + (period == null ? "non-recurring" : createPeriodString(period));
+ }
+
+ public Pair<Integer, String> getScheduleId() {
+ return new Pair<Integer, String>(getProjectId(), getFlowName());
+ }
+
+ public int getProjectId() {
+ return projectId;
+ }
+
+ public String getProjectName() {
+ return projectName;
+ }
+
+ public String getFlowName() {
+ return flowName;
+ }
+
+ public long getFirstSchedTime() {
+ return firstSchedTime;
+ }
+
+ public DateTimeZone getTimezone() {
+ return timezone;
+ }
+
+ public long getLastModifyTime() {
+ return lastModifyTime;
+ }
+
+ public ReadablePeriod getPeriod() {
+ return period;
+ }
+
+ public long getNextExecTime() {
+ return nextExecTime;
+ }
+
+ public String getSubmitUser() {
+ return submitUser;
+ }
+
+ public String getStatus() {
+ return status;
+ }
+
+ public long getSubmitTime() {
+ return submitTime;
+ }
+
+ public boolean updateTime() {
+ if (new DateTime(nextExecTime).isAfterNow()) {
+ return true;
+ }
+
+ if (period != null) {
+ DateTime nextTime = getNextRuntime(nextExecTime, timezone, period);
+
+ this.nextExecTime = nextTime.getMillis();
+ return true;
+ }
+
+ return false;
+ }
+
+ private DateTime getNextRuntime(long scheduleTime, DateTimeZone timezone,
+ ReadablePeriod period) {
+ DateTime now = new DateTime();
+ DateTime date = new DateTime(scheduleTime).withZone(timezone);
+ int count = 0;
+ while (!now.isBefore(date)) {
+ if (count > 100000) {
+ throw new IllegalStateException(
+ "100000 increments of period did not get to present time.");
+ }
+
+ if (period == null) {
+ break;
+ } else {
+ date = date.plus(period);
+ }
+
+ count += 1;
+ }
+
+ return date;
+ }
+
+ public static ReadablePeriod parsePeriodString(String periodStr) {
+ ReadablePeriod period;
+ char periodUnit = periodStr.charAt(periodStr.length() - 1);
+ if (periodUnit == 'n') {
+ return null;
+ }
+
+ int periodInt =
+ Integer.parseInt(periodStr.substring(0, periodStr.length() - 1));
+ switch (periodUnit) {
+ case 'M':
+ period = Months.months(periodInt);
+ break;
+ case 'w':
+ period = Weeks.weeks(periodInt);
+ break;
+ case 'd':
+ period = Days.days(periodInt);
+ break;
+ case 'h':
+ period = Hours.hours(periodInt);
+ break;
+ case 'm':
+ period = Minutes.minutes(periodInt);
+ break;
+ case 's':
+ period = Seconds.seconds(periodInt);
+ break;
+ default:
+ throw new IllegalArgumentException("Invalid schedule period unit '"
+ + periodUnit);
+ }
+
+ return period;
+ }
+
+ public static String createPeriodString(ReadablePeriod period) {
+ String periodStr = "n";
+
+ if (period == null) {
+ return "n";
+ }
+
+ if (period.get(DurationFieldType.months()) > 0) {
+ int months = period.get(DurationFieldType.months());
+ periodStr = months + "M";
+ } else if (period.get(DurationFieldType.weeks()) > 0) {
+ int weeks = period.get(DurationFieldType.weeks());
+ periodStr = weeks + "w";
+ } else if (period.get(DurationFieldType.days()) > 0) {
+ int days = period.get(DurationFieldType.days());
+ periodStr = days + "d";
+ } else if (period.get(DurationFieldType.hours()) > 0) {
+ int hours = period.get(DurationFieldType.hours());
+ periodStr = hours + "h";
+ } else if (period.get(DurationFieldType.minutes()) > 0) {
+ int minutes = period.get(DurationFieldType.minutes());
+ periodStr = minutes + "m";
+ } else if (period.get(DurationFieldType.seconds()) > 0) {
+ int seconds = period.get(DurationFieldType.seconds());
+ periodStr = seconds + "s";
+ }
+
+ return periodStr;
+ }
+
+ public Map<String, Object> optionsToObject() {
+ if (executionOptions != null || slaOptions != null) {
+ HashMap<String, Object> schedObj = new HashMap<String, Object>();
+
+ if (executionOptions != null) {
+ schedObj.put("executionOptions", executionOptions.toObject());
+ }
+ if (slaOptions != null) {
+ schedObj.put("slaOptions", slaOptions.toObject());
+ }
+
+ return schedObj;
+ }
+ return null;
+ }
+
+ public void createAndSetScheduleOptions(Object obj) {
+ @SuppressWarnings("unchecked")
+ HashMap<String, Object> schedObj = (HashMap<String, Object>) obj;
+ if (schedObj.containsKey("executionOptions")) {
+ ExecutionOptions execOptions =
+ ExecutionOptions.createFromObject(schedObj.get("executionOptions"));
+ this.executionOptions = execOptions;
+ } else if (schedObj.containsKey("flowOptions")) {
+ ExecutionOptions execOptions =
+ ExecutionOptions.createFromObject(schedObj.get("flowOptions"));
+ this.executionOptions = execOptions;
+ execOptions.setConcurrentOption(ExecutionOptions.CONCURRENT_OPTION_SKIP);
+ } else {
+ this.executionOptions = new ExecutionOptions();
+ this.executionOptions
+ .setConcurrentOption(ExecutionOptions.CONCURRENT_OPTION_SKIP);
+ }
+
+ if (schedObj.containsKey("slaOptions")) {
+ SlaOptions slaOptions = SlaOptions.fromObject(schedObj.get("slaOptions"));
+ this.slaOptions = slaOptions;
+ }
+ }
+}
diff --git a/src/main/java/azkaban/migration/scheduler/ScheduleLoader.java b/src/main/java/azkaban/migration/scheduler/ScheduleLoader.java
index 6511d9c..0169548 100644
--- a/src/main/java/azkaban/migration/scheduler/ScheduleLoader.java
+++ b/src/main/java/azkaban/migration/scheduler/ScheduleLoader.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn, Inc
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -20,15 +20,15 @@ import java.util.List;
@Deprecated
public interface ScheduleLoader {
-
- public void insertSchedule(Schedule s) throws ScheduleManagerException;
-
- public void updateSchedule(Schedule s) throws ScheduleManagerException;
-
- public List<Schedule> loadSchedules() throws ScheduleManagerException;
-
- public void removeSchedule(Schedule s) throws ScheduleManagerException;
-
- public void updateNextExecTime(Schedule s) throws ScheduleManagerException;
-
-}
\ No newline at end of file
+
+ public void insertSchedule(Schedule s) throws ScheduleManagerException;
+
+ public void updateSchedule(Schedule s) throws ScheduleManagerException;
+
+ public List<Schedule> loadSchedules() throws ScheduleManagerException;
+
+ public void removeSchedule(Schedule s) throws ScheduleManagerException;
+
+ public void updateNextExecTime(Schedule s) throws ScheduleManagerException;
+
+}
diff --git a/src/main/java/azkaban/migration/scheduler/ScheduleManagerException.java b/src/main/java/azkaban/migration/scheduler/ScheduleManagerException.java
index f0f6705..536d098 100644
--- a/src/main/java/azkaban/migration/scheduler/ScheduleManagerException.java
+++ b/src/main/java/azkaban/migration/scheduler/ScheduleManagerException.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn, Inc
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -17,14 +17,14 @@
package azkaban.migration.scheduler;
@Deprecated
-public class ScheduleManagerException extends Exception{
- private static final long serialVersionUID = 1L;
+public class ScheduleManagerException extends Exception {
+ private static final long serialVersionUID = 1L;
- public ScheduleManagerException(String message) {
- super(message);
- }
-
- public ScheduleManagerException(String message, Throwable cause) {
- super(message, cause);
- }
-}
\ No newline at end of file
+ public ScheduleManagerException(String message) {
+ super(message);
+ }
+
+ public ScheduleManagerException(String message, Throwable cause) {
+ super(message, cause);
+ }
+}
src/main/java/azkaban/migration/sla/SLA.java 490(+255 -235)
diff --git a/src/main/java/azkaban/migration/sla/SLA.java b/src/main/java/azkaban/migration/sla/SLA.java
index 0e75965..0ac7a7b 100644
--- a/src/main/java/azkaban/migration/sla/SLA.java
+++ b/src/main/java/azkaban/migration/sla/SLA.java
@@ -1,3 +1,19 @@
+/*
+ * Copyright 2014 LinkedIn Corp.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
package azkaban.migration.sla;
import java.util.ArrayList;
@@ -13,240 +29,244 @@ import azkaban.migration.scheduler.Schedule;
@Deprecated
public class SLA {
- public static enum SlaRule {
- SUCCESS(1), FINISH(2), WAITANDCHECKJOB(3);
-
- private int numVal;
-
- SlaRule(int numVal) {
- this.numVal = numVal;
- }
-
- public int getNumVal() {
- return numVal;
- }
-
- public static SlaRule fromInteger(int x) {
- switch (x) {
- case 1:
- return SUCCESS;
- case 2:
- return FINISH;
- case 3:
- return WAITANDCHECKJOB;
- default:
- return SUCCESS;
- }
- }
- }
-
- public static enum SlaAction {
- EMAIL(1), KILL(2);
-
- private int numVal;
-
- SlaAction(int numVal) {
- this.numVal = numVal;
- }
-
- public int getNumVal() {
- return numVal;
- }
-
- public static SlaAction fromInteger(int x) {
- switch (x) {
- case 1:
- return EMAIL;
- case 2:
- return KILL;
- default:
- return EMAIL;
- }
- }
- }
-
- public static class SlaSetting {
- public String getId() {
- return id;
- }
- public void setId(String id) {
- this.id = id;
- }
- public ReadablePeriod getDuration() {
- return duration;
- }
- public void setDuration(ReadablePeriod duration) {
- this.duration = duration;
- }
- public SlaRule getRule() {
- return rule;
- }
- public void setRule(SlaRule rule) {
- this.rule = rule;
- }
- public List<SlaAction> getActions() {
- return actions;
- }
- public void setActions(List<SlaAction> actions) {
- this.actions = actions;
- }
-
- public Object toObject() {
- Map<String, Object> obj = new HashMap<String, Object>();
- obj.put("id", id);
- obj.put("duration", Schedule.createPeriodString(duration));
-// List<String> rulesObj = new ArrayList<String>();
-// for(SlaRule rule : rules) {
-// rulesObj.add(rule.toString());
-// }
-// obj.put("rules", rulesObj);
- obj.put("rule", rule.toString());
- List<String> actionsObj = new ArrayList<String>();
- for(SlaAction act : actions) {
- actionsObj.add(act.toString());
- }
- obj.put("actions", actionsObj);
- return obj;
- }
-
- @SuppressWarnings("unchecked")
- public static SlaSetting fromObject(Object obj) {
- Map<String, Object> slaObj = (HashMap<String, Object>) obj;
- String subId = (String) slaObj.get("id");
- ReadablePeriod dur = Schedule.parsePeriodString((String) slaObj.get("duration"));
-// List<String> rulesObj = (ArrayList<String>) slaObj.get("rules");
-// List<SlaRule> slaRules = new ArrayList<SLA.SlaRule>();
-// for(String rule : rulesObj) {
-// slaRules.add(SlaRule.valueOf(rule));
-// }
- SlaRule slaRule = SlaRule.valueOf((String) slaObj.get("rule"));
- List<String> actsObj = (ArrayList<String>) slaObj.get("actions");
- List<SlaAction> slaActs = new ArrayList<SlaAction>();
- for(String act : actsObj) {
- slaActs.add(SlaAction.valueOf(act));
- }
-
- SlaSetting ret = new SlaSetting();
- ret.setId(subId);
- ret.setDuration(dur);
- ret.setRule(slaRule);
- ret.setActions(slaActs);
- return ret;
- }
-
- private String id;
- private ReadablePeriod duration;
- private SlaRule rule = SlaRule.SUCCESS;
- private List<SlaAction> actions;
- }
-
- private int execId;
- private String jobName;
- private DateTime checkTime;
- private List<String> emails;
- private List<SlaAction> actions;
- private List<SlaSetting> jobSettings;
- private SlaRule rule;
-
- public SLA(
- int execId,
- String jobName,
- DateTime checkTime,
- List<String> emails,
- List<SlaAction> slaActions,
- List<SlaSetting> jobSettings,
- SlaRule slaRule
- ) {
- this.execId = execId;
- this.jobName = jobName;
- this.checkTime = checkTime;
- this.emails = emails;
- this.actions = slaActions;
- this.jobSettings = jobSettings;
- this.rule = slaRule;
- }
-
- public int getExecId() {
- return execId;
- }
-
- public String getJobName() {
- return jobName;
- }
-
- public DateTime getCheckTime() {
- return checkTime;
- }
-
- public List<String> getEmails() {
- return emails;
- }
-
- public List<SlaAction> getActions() {
- return actions;
- }
-
- public List<SlaSetting> getJobSettings() {
- return jobSettings;
- }
-
- public SlaRule getRule() {
- return rule;
- }
-
- public String toString() {
- return execId + " " + jobName + " to be checked at " + checkTime.toDateTimeISO();
- }
-
- public Map<String,Object> optionToObject() {
- HashMap<String, Object> slaObj = new HashMap<String, Object>();
-
- slaObj.put("emails", emails);
-// slaObj.put("rule", rule.toString());
-
- List<String> actionsObj = new ArrayList<String>();
- for(SlaAction act : actions) {
- actionsObj.add(act.toString());
- }
- slaObj.put("actions", actionsObj);
-
- if(jobSettings != null && jobSettings.size() > 0) {
- List<Object> settingsObj = new ArrayList<Object>();
- for(SlaSetting set : jobSettings) {
- settingsObj.add(set.toObject());
- }
- slaObj.put("jobSettings", settingsObj);
- }
-
- return slaObj;
- }
-
- @SuppressWarnings("unchecked")
- public static SLA createSlaFromObject(int execId, String jobName, DateTime checkTime, SlaRule rule, Object obj) {
-
- HashMap<String, Object> slaObj = (HashMap<String,Object>)obj;
-
- List<String> emails = (List<String>)slaObj.get("emails");
-// SlaRule rule = SlaRule.valueOf((String)slaObj.get("rule"));
- List<String> actsObj = (ArrayList<String>) slaObj.get("actions");
- List<SlaAction> slaActs = new ArrayList<SlaAction>();
- for(String act : actsObj) {
- slaActs.add(SlaAction.valueOf(act));
- }
- List<SlaSetting> jobSets = null;
- if(slaObj.containsKey("jobSettings") && slaObj.get("jobSettings") != null) {
- jobSets = new ArrayList<SLA.SlaSetting>();
- for(Object set : (List<Object>)slaObj.get("jobSettings")) {
- SlaSetting jobSet = SlaSetting.fromObject(set);
- jobSets.add(jobSet);
- }
- }
-
- return new SLA(execId, jobName, checkTime, emails, slaActs, jobSets, rule);
- }
-
- public void setCheckTime(DateTime time) {
- this.checkTime = time;
- }
+ public static enum SlaRule {
+ SUCCESS(1), FINISH(2), WAITANDCHECKJOB(3);
+
+ private int numVal;
+
+ SlaRule(int numVal) {
+ this.numVal = numVal;
+ }
+
+ public int getNumVal() {
+ return numVal;
+ }
+
+ public static SlaRule fromInteger(int x) {
+ switch (x) {
+ case 1:
+ return SUCCESS;
+ case 2:
+ return FINISH;
+ case 3:
+ return WAITANDCHECKJOB;
+ default:
+ return SUCCESS;
+ }
+ }
+ }
+
+ public static enum SlaAction {
+ EMAIL(1), KILL(2);
+
+ private int numVal;
+
+ SlaAction(int numVal) {
+ this.numVal = numVal;
+ }
+
+ public int getNumVal() {
+ return numVal;
+ }
+
+ public static SlaAction fromInteger(int x) {
+ switch (x) {
+ case 1:
+ return EMAIL;
+ case 2:
+ return KILL;
+ default:
+ return EMAIL;
+ }
+ }
+ }
+
+ public static class SlaSetting {
+ public String getId() {
+ return id;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ public ReadablePeriod getDuration() {
+ return duration;
+ }
+
+ public void setDuration(ReadablePeriod duration) {
+ this.duration = duration;
+ }
+
+ public SlaRule getRule() {
+ return rule;
+ }
+
+ public void setRule(SlaRule rule) {
+ this.rule = rule;
+ }
+
+ public List<SlaAction> getActions() {
+ return actions;
+ }
+
+ public void setActions(List<SlaAction> actions) {
+ this.actions = actions;
+ }
+
+ public Object toObject() {
+ Map<String, Object> obj = new HashMap<String, Object>();
+ obj.put("id", id);
+ obj.put("duration", Schedule.createPeriodString(duration));
+ // List<String> rulesObj = new ArrayList<String>();
+ // for(SlaRule rule : rules) {
+ // rulesObj.add(rule.toString());
+ // }
+ // obj.put("rules", rulesObj);
+ obj.put("rule", rule.toString());
+ List<String> actionsObj = new ArrayList<String>();
+ for (SlaAction act : actions) {
+ actionsObj.add(act.toString());
+ }
+ obj.put("actions", actionsObj);
+ return obj;
+ }
+
+ @SuppressWarnings("unchecked")
+ public static SlaSetting fromObject(Object obj) {
+ Map<String, Object> slaObj = (HashMap<String, Object>) obj;
+ String subId = (String) slaObj.get("id");
+ ReadablePeriod dur =
+ Schedule.parsePeriodString((String) slaObj.get("duration"));
+ // List<String> rulesObj = (ArrayList<String>) slaObj.get("rules");
+ // List<SlaRule> slaRules = new ArrayList<SLA.SlaRule>();
+ // for(String rule : rulesObj) {
+ // slaRules.add(SlaRule.valueOf(rule));
+ // }
+ SlaRule slaRule = SlaRule.valueOf((String) slaObj.get("rule"));
+ List<String> actsObj = (ArrayList<String>) slaObj.get("actions");
+ List<SlaAction> slaActs = new ArrayList<SlaAction>();
+ for (String act : actsObj) {
+ slaActs.add(SlaAction.valueOf(act));
+ }
+
+ SlaSetting ret = new SlaSetting();
+ ret.setId(subId);
+ ret.setDuration(dur);
+ ret.setRule(slaRule);
+ ret.setActions(slaActs);
+ return ret;
+ }
+
+ private String id;
+ private ReadablePeriod duration;
+ private SlaRule rule = SlaRule.SUCCESS;
+ private List<SlaAction> actions;
+ }
+
+ private int execId;
+ private String jobName;
+ private DateTime checkTime;
+ private List<String> emails;
+ private List<SlaAction> actions;
+ private List<SlaSetting> jobSettings;
+ private SlaRule rule;
+
+ public SLA(int execId, String jobName, DateTime checkTime,
+ List<String> emails, List<SlaAction> slaActions,
+ List<SlaSetting> jobSettings, SlaRule slaRule) {
+ this.execId = execId;
+ this.jobName = jobName;
+ this.checkTime = checkTime;
+ this.emails = emails;
+ this.actions = slaActions;
+ this.jobSettings = jobSettings;
+ this.rule = slaRule;
+ }
+
+ public int getExecId() {
+ return execId;
+ }
+
+ public String getJobName() {
+ return jobName;
+ }
+
+ public DateTime getCheckTime() {
+ return checkTime;
+ }
+
+ public List<String> getEmails() {
+ return emails;
+ }
+
+ public List<SlaAction> getActions() {
+ return actions;
+ }
+
+ public List<SlaSetting> getJobSettings() {
+ return jobSettings;
+ }
+
+ public SlaRule getRule() {
+ return rule;
+ }
+
+ public String toString() {
+ return execId + " " + jobName + " to be checked at "
+ + checkTime.toDateTimeISO();
+ }
+
+ public Map<String, Object> optionToObject() {
+ HashMap<String, Object> slaObj = new HashMap<String, Object>();
+
+ slaObj.put("emails", emails);
+ // slaObj.put("rule", rule.toString());
+
+ List<String> actionsObj = new ArrayList<String>();
+ for (SlaAction act : actions) {
+ actionsObj.add(act.toString());
+ }
+ slaObj.put("actions", actionsObj);
+
+ if (jobSettings != null && jobSettings.size() > 0) {
+ List<Object> settingsObj = new ArrayList<Object>();
+ for (SlaSetting set : jobSettings) {
+ settingsObj.add(set.toObject());
+ }
+ slaObj.put("jobSettings", settingsObj);
+ }
+
+ return slaObj;
+ }
+
+ @SuppressWarnings("unchecked")
+ public static SLA createSlaFromObject(int execId, String jobName,
+ DateTime checkTime, SlaRule rule, Object obj) {
+
+ HashMap<String, Object> slaObj = (HashMap<String, Object>) obj;
+
+ List<String> emails = (List<String>) slaObj.get("emails");
+ // SlaRule rule = SlaRule.valueOf((String)slaObj.get("rule"));
+ List<String> actsObj = (ArrayList<String>) slaObj.get("actions");
+ List<SlaAction> slaActs = new ArrayList<SlaAction>();
+ for (String act : actsObj) {
+ slaActs.add(SlaAction.valueOf(act));
+ }
+ List<SlaSetting> jobSets = null;
+ if (slaObj.containsKey("jobSettings") && slaObj.get("jobSettings") != null) {
+ jobSets = new ArrayList<SLA.SlaSetting>();
+ for (Object set : (List<Object>) slaObj.get("jobSettings")) {
+ SlaSetting jobSet = SlaSetting.fromObject(set);
+ jobSets.add(jobSet);
+ }
+ }
+
+ return new SLA(execId, jobName, checkTime, emails, slaActs, jobSets, rule);
+ }
+
+ public void setCheckTime(DateTime time) {
+ this.checkTime = time;
+ }
}
src/main/java/azkaban/migration/sla/SlaOptions.java 102(+62 -40)
diff --git a/src/main/java/azkaban/migration/sla/SlaOptions.java b/src/main/java/azkaban/migration/sla/SlaOptions.java
index f7b9d49..5a01c7f 100644
--- a/src/main/java/azkaban/migration/sla/SlaOptions.java
+++ b/src/main/java/azkaban/migration/sla/SlaOptions.java
@@ -1,3 +1,19 @@
+/*
+ * Copyright 2014 LinkedIn Corp.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
package azkaban.migration.sla;
import java.util.ArrayList;
@@ -10,43 +26,49 @@ import azkaban.migration.sla.SLA.SlaSetting;
@Deprecated
public class SlaOptions {
- public List<String> getSlaEmails() {
- return slaEmails;
- }
- public void setSlaEmails(List<String> slaEmails) {
- this.slaEmails = slaEmails;
- }
- public List<SlaSetting> getSettings() {
- return settings;
- }
- public void setSettings(List<SlaSetting> settings) {
- this.settings = settings;
- }
- private List<String> slaEmails;
- private List<SlaSetting> settings;
- public Object toObject() {
- Map<String, Object> obj = new HashMap<String, Object>();
- obj.put("slaEmails", slaEmails);
- List<Object> slaSettings = new ArrayList<Object>();
- for(SlaSetting s : settings) {
- slaSettings.add(s.toObject());
- }
- obj.put("settings", slaSettings);
- return obj;
- }
- @SuppressWarnings("unchecked")
- public static SlaOptions fromObject(Object object) {
- if(object != null) {
- SlaOptions slaOptions = new SlaOptions();
- Map<String, Object> obj = (HashMap<String, Object>) object;
- slaOptions.setSlaEmails((List<String>) obj.get("slaEmails"));
- List<SlaSetting> slaSets = new ArrayList<SlaSetting>();
- for(Object set: (List<Object>)obj.get("settings")) {
- slaSets.add(SlaSetting.fromObject(set));
- }
- slaOptions.setSettings(slaSets);
- return slaOptions;
- }
- return null;
- }
-}
\ No newline at end of file
+ public List<String> getSlaEmails() {
+ return slaEmails;
+ }
+
+ public void setSlaEmails(List<String> slaEmails) {
+ this.slaEmails = slaEmails;
+ }
+
+ public List<SlaSetting> getSettings() {
+ return settings;
+ }
+
+ public void setSettings(List<SlaSetting> settings) {
+ this.settings = settings;
+ }
+
+ private List<String> slaEmails;
+ private List<SlaSetting> settings;
+
+ public Object toObject() {
+ Map<String, Object> obj = new HashMap<String, Object>();
+ obj.put("slaEmails", slaEmails);
+ List<Object> slaSettings = new ArrayList<Object>();
+ for (SlaSetting s : settings) {
+ slaSettings.add(s.toObject());
+ }
+ obj.put("settings", slaSettings);
+ return obj;
+ }
+
+ @SuppressWarnings("unchecked")
+ public static SlaOptions fromObject(Object object) {
+ if (object != null) {
+ SlaOptions slaOptions = new SlaOptions();
+ Map<String, Object> obj = (HashMap<String, Object>) object;
+ slaOptions.setSlaEmails((List<String>) obj.get("slaEmails"));
+ List<SlaSetting> slaSets = new ArrayList<SlaSetting>();
+ for (Object set : (List<Object>) obj.get("settings")) {
+ slaSets.add(SlaSetting.fromObject(set));
+ }
+ slaOptions.setSettings(slaSets);
+ return slaOptions;
+ }
+ return null;
+ }
+}
src/main/java/azkaban/project/JdbcProjectLoader.java 2663(+1403 -1260)
diff --git a/src/main/java/azkaban/project/JdbcProjectLoader.java b/src/main/java/azkaban/project/JdbcProjectLoader.java
index c3cf424..e7146af 100644
--- a/src/main/java/azkaban/project/JdbcProjectLoader.java
+++ b/src/main/java/azkaban/project/JdbcProjectLoader.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -52,1261 +52,1404 @@ import azkaban.utils.Props;
import azkaban.utils.PropsUtils;
import azkaban.utils.Triple;
-public class JdbcProjectLoader extends AbstractJdbcLoader implements ProjectLoader {
- private static final Logger logger = Logger.getLogger(JdbcProjectLoader.class);
-
- private static final int CHUCK_SIZE = 1024*1024*10;
- private File tempDir;
-
- private EncodingType defaultEncodingType = EncodingType.GZIP;
-
- public JdbcProjectLoader(Props props) {
- super(props);
- tempDir = new File(props.getString("project.temp.dir", "temp"));
- if (!tempDir.exists()) {
- tempDir.mkdirs();
- }
- }
-
- @Override
- public List<Project> fetchAllActiveProjects() throws ProjectManagerException {
- Connection connection = getConnection();
-
- List<Project> projects = null;
- try {
- projects = fetchAllActiveProjects(connection);
- }
- finally {
- DbUtils.closeQuietly(connection);
- }
-
- return projects;
- }
-
- private List<Project> fetchAllActiveProjects(Connection connection) throws ProjectManagerException {
- QueryRunner runner = new QueryRunner();
-
- ProjectResultHandler handler = new ProjectResultHandler();
- List<Project> projects = null;
- try {
- projects = runner.query(connection, ProjectResultHandler.SELECT_ALL_ACTIVE_PROJECTS, handler);
-
- for (Project project: projects) {
- List<Triple<String, Boolean, Permission>> permissions = fetchPermissionsForProject(connection, project);
-
- for (Triple<String, Boolean, Permission> entry: permissions) {
- if(entry.getSecond()) {
- project.setGroupPermission(entry.getFirst(), entry.getThird());
- }
- else {
- project.setUserPermission(entry.getFirst(), entry.getThird());
- }
- }
- }
- }
- catch (SQLException e) {
- throw new ProjectManagerException("Error retrieving all projects", e);
- }
- finally {
- DbUtils.closeQuietly(connection);
- }
-
- return projects;
- }
-
- @Override
- public Project fetchProjectById(int id) throws ProjectManagerException {
- Connection connection = getConnection();
-
- Project project = null;
- try {
- project = fetchProjectById(connection, id);
- } finally {
- DbUtils.closeQuietly(connection);
- }
-
- return project;
- }
-
- private Project fetchProjectById(Connection connection, int id) throws ProjectManagerException {
- QueryRunner runner = new QueryRunner();
- // Fetch the project
- Project project = null;
- ProjectResultHandler handler = new ProjectResultHandler();
- try {
- List<Project> projects = runner.query(connection, ProjectResultHandler.SELECT_PROJECT_BY_ID, handler, id);
- if (projects.isEmpty()) {
- throw new ProjectManagerException("No active project with id " + id + " exists in db.");
- }
-
- project = projects.get(0);
- } catch (SQLException e) {
- logger.error(ProjectResultHandler.SELECT_PROJECT_BY_ID + " failed.");
- throw new ProjectManagerException("Query for existing project failed. Project " + id, e);
- }
-
- // Fetch the user permissions
- List<Triple<String, Boolean,Permission>> permissions = fetchPermissionsForProject(connection, project);
-
- for (Triple<String, Boolean, Permission> perm: permissions) {
- if (perm.getThird().toFlags() != 0) {
- if (perm.getSecond()) {
- project.setGroupPermission(perm.getFirst(), perm.getThird());
- }
- else {
- project.setUserPermission(perm.getFirst(), perm.getThird());
- }
- }
- }
-
- return project;
- }
-
- private List<Triple<String, Boolean, Permission>> fetchPermissionsForProject(Connection connection, Project project) throws ProjectManagerException {
- ProjectPermissionsResultHandler permHander = new ProjectPermissionsResultHandler();
-
- QueryRunner runner = new QueryRunner();
- List<Triple<String, Boolean,Permission>> permissions = null;
- try {
- permissions = runner.query(connection, ProjectPermissionsResultHandler.SELECT_PROJECT_PERMISSION, permHander, project.getId());
- } catch (SQLException e) {
- throw new ProjectManagerException("Query for permissions for " + project.getName() + " failed.", e);
- }
-
- return permissions;
- }
-
- /**
- * Creates a Project in the db.
- *
- * It will throw an exception if it finds an active project of the same name, or the SQL fails
- */
- @Override
- public Project createNewProject(String name, String description, User creator) throws ProjectManagerException {
- Connection connection = getConnection();
-
- Project project;
- try {
- // No need to commit, since createNewProject should commit.
- project = createNewProject(connection, name, description, creator);
- }
- finally {
- DbUtils.closeQuietly(connection);
- }
-
- return project;
- }
-
- private synchronized Project createNewProject(Connection connection, String name, String description, User creator) throws ProjectManagerException {
- QueryRunner runner = new QueryRunner();
- ProjectResultHandler handler = new ProjectResultHandler();
-
- // See if it exists first.
- try {
- List<Project> project = runner.query(connection, ProjectResultHandler.SELECT_ACTIVE_PROJECT_BY_NAME, handler, name);
- if (!project.isEmpty()) {
- throw new ProjectManagerException("Active project with name " + name + " already exists in db.");
- }
- } catch (SQLException e) {
- logger.error(e);
- throw new ProjectManagerException("Checking for existing project failed. " + name, e);
- }
-
- final String INSERT_PROJECT = "INSERT INTO projects ( name, active, modified_time, create_time, version, last_modified_by, description, enc_type, settings_blob) values (?,?,?,?,?,?,?,?,?)";
- // Insert project
- try {
- long time = System.currentTimeMillis();
- int i = runner.update(connection, INSERT_PROJECT, name, true, time, time, null, creator.getUserId(), description, defaultEncodingType.getNumVal(), null);
- if (i == 0) {
- throw new ProjectManagerException("No projects have been inserted.");
- }
- connection.commit();
-
- } catch (SQLException e) {
- logger.error(INSERT_PROJECT + " failed.");
- try {
- connection.rollback();
- } catch (SQLException e1) {
- e1.printStackTrace();
- }
- throw new ProjectManagerException("Insert project for existing project failed. " + name, e);
- }
-
- // Do another query to grab and return the project.
- Project project = null;
- try {
- List<Project> projects = runner.query(connection, ProjectResultHandler.SELECT_ACTIVE_PROJECT_BY_NAME, handler, name);
- if (projects.isEmpty()) {
- throw new ProjectManagerException("No active project with name " + name + " exists in db.");
- }
- else if (projects.size() > 1) {
- throw new ProjectManagerException("More than one active project " + name);
- }
-
- project = projects.get(0);
- } catch (SQLException e) {
- logger.error(e);
- throw new ProjectManagerException("Checking for existing project failed. " + name, e);
- }
-
- return project;
- }
-
- @Override
- public void uploadProjectFile(Project project, int version, String filetype, String filename, File localFile, String uploader) throws ProjectManagerException {
- logger.info("Uploading to " + project.getName() + " version:" + version + " file:" + filename);
- Connection connection = getConnection();
-
- try {
- uploadProjectFile(connection, project, version, filetype, filename, localFile, uploader);
- connection.commit();
- logger.info("Commiting upload " + localFile.getName());
- }
- catch (SQLException e) {
- logger.error(e);
- throw new ProjectManagerException("Error getting DB connection.", e);
- }
- finally {
- DbUtils.closeQuietly(connection);
- }
- }
-
- private void uploadProjectFile(Connection connection, Project project, int version, String filetype, String filename, File localFile, String uploader) throws ProjectManagerException {
- QueryRunner runner = new QueryRunner();
- long updateTime = System.currentTimeMillis();
-
- logger.info("Creating message digest for upload " + localFile.getName());
- byte[] md5 = null;
- try {
- md5 = Md5Hasher.md5Hash(localFile);
- } catch (IOException e) {
- throw new ProjectManagerException("Error getting md5 hash.", e);
- }
-
- logger.info("Md5 hash created");
- // Really... I doubt we'll get a > 2gig file. So int casting it is!
- byte[] buffer = new byte[CHUCK_SIZE];
- final String INSERT_PROJECT_FILES = "INSERT INTO project_files (project_id, version, chunk, size, file) values (?,?,?,?,?)";
-
- //int numChunks = (localFileSize / buffer.length) + 1;
- BufferedInputStream bufferedStream = null;
- int chunk = 0;
- try {
- bufferedStream = new BufferedInputStream(new FileInputStream(localFile));
- int size = bufferedStream.read(buffer);
- while (size >= 0) {
- logger.info("Read bytes for " + filename + " size:" + size);
- byte[] buf = buffer;
- if (size < buffer.length) {
- buf = Arrays.copyOfRange(buffer, 0, size);
- }
- try {
- logger.info("Running update for " + filename + " chunk " + chunk);
- runner.update(connection, INSERT_PROJECT_FILES, project.getId(), version, chunk, size, buf);
- logger.info("Finished update for " + filename + " chunk " + chunk);
- } catch (SQLException e) {
- throw new ProjectManagerException("Error chunking", e);
- }
- ++chunk;
-
- size = bufferedStream.read(buffer);
- }
- } catch (IOException e) {
- throw new ProjectManagerException("Error chunking file " + filename);
- } finally {
- IOUtils.closeQuietly(bufferedStream);
- }
-
- final String INSERT_PROJECT_VERSION =
- "INSERT INTO project_versions (project_id, version, upload_time, uploader, file_type, file_name, md5, num_chunks) values (?,?,?,?,?,?,?,?)";
-
- try {
- runner.update(connection, INSERT_PROJECT_VERSION, project.getId(), version, updateTime, uploader, filetype, filename, md5, chunk);
- } catch (SQLException e) {
- logger.error(e);
- throw new ProjectManagerException("Error updating project version " + project.getName(), e);
- }
- }
-
- @Override
- public ProjectFileHandler getUploadedFile(Project project, int version) throws ProjectManagerException {
- logger.info("Retrieving to " + project.getName() + " version:" + version);
- Connection connection = getConnection();
- ProjectFileHandler handler = null;
- try {
- handler = getUploadedFile(connection, project.getId(), version);
- }
- finally {
- DbUtils.closeQuietly(connection);
- }
-
- return handler;
- }
-
- @Override
- public ProjectFileHandler getUploadedFile(int projectId, int version) throws ProjectManagerException {
- logger.info("Retrieving to " + projectId + " version:" + version);
- Connection connection = getConnection();
- ProjectFileHandler handler = null;
- try {
- handler = getUploadedFile(connection, projectId, version);
- }
- finally {
- DbUtils.closeQuietly(connection);
- }
-
- return handler;
- }
-
- private ProjectFileHandler getUploadedFile(Connection connection, int projectId, int version) throws ProjectManagerException {
- QueryRunner runner = new QueryRunner();
- ProjectVersionResultHandler pfHandler = new ProjectVersionResultHandler();
-
- List<ProjectFileHandler> projectFiles = null;
- try {
- projectFiles = runner.query(connection, ProjectVersionResultHandler.SELECT_PROJECT_VERSION, pfHandler, projectId, version);
- } catch (SQLException e) {
- logger.error(e);
- throw new ProjectManagerException("Query for uploaded file for project id " + projectId + " failed.", e);
- }
- if (projectFiles == null || projectFiles.isEmpty()) {
- return null;
- }
-
- ProjectFileHandler projHandler = projectFiles.get(0);
- int numChunks = projHandler.getNumChunks();
- BufferedOutputStream bStream = null;
- File file = null;
- try {
- try {
- file = File.createTempFile(projHandler.getFileName(), String.valueOf(version), tempDir);
-
- bStream = new BufferedOutputStream(new FileOutputStream(file));
- }
- catch (IOException e) {
- throw new ProjectManagerException("Error creating temp file for stream.");
- }
-
- int collect = 5;
- int fromChunk = 0;
- int toChunk = collect;
- do {
- ProjectFileChunkResultHandler chunkHandler = new ProjectFileChunkResultHandler();
- List<byte[]> data = null;
- try {
- data = runner.query(connection, ProjectFileChunkResultHandler.SELECT_PROJECT_CHUNKS_FILE, chunkHandler, projectId, version, fromChunk, toChunk);
- }
- catch(SQLException e) {
- logger.error(e);
- throw new ProjectManagerException("Query for uploaded file for " + projectId + " failed.", e);
- }
-
- try {
- for (byte[] d : data) {
- bStream.write(d);
- }
- }
- catch (IOException e) {
- throw new ProjectManagerException("Error writing file", e);
- }
-
- // Add all the bytes to the stream.
- fromChunk += collect;
- toChunk += collect;
- } while (fromChunk <= numChunks);
- } finally {
- IOUtils.closeQuietly(bStream);
- }
-
- // Check md5.
- byte[] md5 = null;
- try {
- md5 = Md5Hasher.md5Hash(file);
- } catch (IOException e) {
- throw new ProjectManagerException("Error getting md5 hash.", e);
- }
-
- if (Arrays.equals(projHandler.getMd5Hash(), md5)) {
- logger.info("Md5 Hash is valid");
- }
- else {
- throw new ProjectManagerException("Md5 Hash failed on retrieval of file");
- }
-
- projHandler.setLocalFile(file);
- return projHandler;
- }
-
- @Override
- public void changeProjectVersion(Project project, int version, String user) throws ProjectManagerException {
- long timestamp = System.currentTimeMillis();
- QueryRunner runner = createQueryRunner();
- try {
- final String UPDATE_PROJECT_VERSION = "UPDATE projects SET version=?,modified_time=?,last_modified_by=? WHERE id=?";
-
- runner.update(UPDATE_PROJECT_VERSION, version, timestamp, user, project.getId());
- project.setVersion(version);
- project.setLastModifiedTimestamp(timestamp);
- project.setLastModifiedUser(user);
- } catch (SQLException e) {
- logger.error(e);
- throw new ProjectManagerException("Error updating switching project version " + project.getName(), e);
- }
- }
-
- @Override
- public void updatePermission(Project project, String name, Permission perm, boolean isGroup) throws ProjectManagerException {
- QueryRunner runner = createQueryRunner();
-
- if (this.allowsOnDuplicateKey()) {
- long updateTime = System.currentTimeMillis();
- final String INSERT_PROJECT_PERMISSION =
- "INSERT INTO project_permissions (project_id, modified_time, name, permissions, isGroup) values (?,?,?,?,?)" +
- "ON DUPLICATE KEY UPDATE modified_time = VALUES(modified_time), permissions = VALUES(permissions)";
-
- try {
- runner.update(INSERT_PROJECT_PERMISSION, project.getId(), updateTime, name, perm.toFlags(), isGroup);
- } catch (SQLException e) {
- logger.error(e);
- throw new ProjectManagerException("Error updating project " + project.getName() + " permissions for " + name, e);
- }
- }
- else {
- long updateTime = System.currentTimeMillis();
- final String MERGE_PROJECT_PERMISSION =
- "MERGE INTO project_permissions (project_id, modified_time, name, permissions, isGroup) KEY (project_id, name) values (?,?,?,?,?)";
-
- try {
- runner.update(MERGE_PROJECT_PERMISSION, project.getId(), updateTime, name, perm.toFlags(), isGroup);
- } catch (SQLException e) {
- logger.error(e);
- throw new ProjectManagerException("Error updating project " + project.getName() + " permissions for " + name, e);
- }
- }
-
- if (isGroup) {
- project.setGroupPermission(name, perm);
- }
- else {
- project.setUserPermission(name, perm);
- }
- }
-
-
-
- @Override
- public void updateProjectSettings(Project project) throws ProjectManagerException {
- Connection connection = getConnection();
- try {
- updateProjectSettings(connection, project, defaultEncodingType);
- connection.commit();
- }
- catch (SQLException e) {
- throw new ProjectManagerException("Error updating project settings", e);
- }
- finally {
- DbUtils.closeQuietly(connection);
- }
- }
-
- private void updateProjectSettings(Connection connection, Project project, EncodingType encType) throws ProjectManagerException {
- QueryRunner runner = new QueryRunner();
- final String UPDATE_PROJECT_SETTINGS = "UPDATE projects SET enc_type=?, settings_blob=? WHERE id=?";
-
- String json = JSONUtils.toJSON(project.toObject());
- byte[] data = null;
- try {
- byte[] stringData = json.getBytes("UTF-8");
- data = stringData;
-
- if (encType == EncodingType.GZIP) {
- data = GZIPUtils.gzipBytes(stringData);
- }
- logger.debug("NumChars: " + json.length() + " UTF-8:" + stringData.length + " Gzip:"+ data.length);
- } catch(IOException e) {
- throw new ProjectManagerException("Failed to encode. ", e);
- }
-
- try {
- runner.update(connection, UPDATE_PROJECT_SETTINGS, encType.getNumVal(), data, project.getId());
- connection.commit();
- } catch (SQLException e) {
- throw new ProjectManagerException("Error updating project " + project.getName() + " version " + project.getVersion(), e);
- }
- }
-
- @Override
- public void removePermission(Project project, String name, boolean isGroup) throws ProjectManagerException {
- QueryRunner runner = createQueryRunner();
- final String DELETE_PROJECT_PERMISSION = "DELETE FROM project_permissions WHERE project_id=? AND name=? AND isGroup=?";
-
- try {
- runner.update(DELETE_PROJECT_PERMISSION, project.getId(), name, isGroup);
- } catch (SQLException e) {
- logger.error(e);
- throw new ProjectManagerException("Error deleting project " + project.getName() + " permissions for " + name, e);
- }
-
- if (isGroup) {
- project.removeGroupPermission(name);
- }
- else {
- project.removeUserPermission(name);
- }
- }
-
- @Override
- public List<Triple<String, Boolean, Permission>> getProjectPermissions(int projectId) throws ProjectManagerException {
- ProjectPermissionsResultHandler permHander = new ProjectPermissionsResultHandler();
- QueryRunner runner = createQueryRunner();
- List<Triple<String, Boolean,Permission>> permissions = null;
- try {
- permissions = runner.query(ProjectPermissionsResultHandler.SELECT_PROJECT_PERMISSION, permHander, projectId);
- } catch (SQLException e) {
- throw new ProjectManagerException("Query for permissions for " + projectId + " failed.", e);
- }
-
- return permissions;
- }
-
-
- @Override
- public void removeProject(Project project, String user) throws ProjectManagerException {
- QueryRunner runner = createQueryRunner();
-
- long updateTime = System.currentTimeMillis();
- final String UPDATE_INACTIVE_PROJECT = "UPDATE projects SET active=false,modified_time=?,last_modified_by=? WHERE id=?";
- try {
- runner.update(UPDATE_INACTIVE_PROJECT, updateTime, user, project.getId());
- } catch (SQLException e) {
- logger.error(e);
- throw new ProjectManagerException("Error marking project " + project.getName() + " as inactive", e);
- }
- }
-
- @Override
- public boolean postEvent(Project project, EventType type, String user, String message) {
- QueryRunner runner = createQueryRunner();
-
- final String INSERT_PROJECT_EVENTS =
- "INSERT INTO project_events (project_id, event_type, event_time, username, message) values (?,?,?,?,?)";
- long updateTime = System.currentTimeMillis();
- try {
- runner.update(INSERT_PROJECT_EVENTS, project.getId(), type.getNumVal(), updateTime, user, message);
- } catch (SQLException e) {
- e.printStackTrace();
- return false;
- }
-
- return true;
- }
-
- /**
- * Get all the logs for a given project
- *
- * @param project
- * @return
- * @throws ProjectManagerException
- */
- public List<ProjectLogEvent> getProjectEvents(Project project, int num, int skip) throws ProjectManagerException {
- QueryRunner runner = createQueryRunner();
-
- ProjectLogsResultHandler logHandler = new ProjectLogsResultHandler();
- List<ProjectLogEvent> events = null;
- try {
- events = runner.query(ProjectLogsResultHandler.SELECT_PROJECT_EVENTS_ORDER, logHandler, project.getId(), num, skip);
- } catch (SQLException e) {
- logger.error(e);
- }
-
- return events;
- }
-
- @Override
- public void updateDescription(Project project, String description, String user) throws ProjectManagerException {
- QueryRunner runner = createQueryRunner();
-
- final String UPDATE_PROJECT_DESCRIPTION =
- "UPDATE projects SET description=?,modified_time=?,last_modified_by=? WHERE id=?";
- long updateTime = System.currentTimeMillis();
- try {
- runner.update(UPDATE_PROJECT_DESCRIPTION, description, updateTime, user, project.getId());
- project.setDescription(description);
- project.setLastModifiedTimestamp(updateTime);
- project.setLastModifiedUser(user);
- } catch (SQLException e) {
- logger.error(e);
- throw new ProjectManagerException("Error marking project " + project.getName() + " as inactive", e);
- }
- }
-
- @Override
- public int getLatestProjectVersion(Project project) throws ProjectManagerException {
- QueryRunner runner = createQueryRunner();
-
- IntHander handler = new IntHander();
- try {
- return runner.query(IntHander.SELECT_LATEST_VERSION, handler, project.getId());
- } catch (SQLException e) {
- logger.error(e);
- throw new ProjectManagerException("Error marking project " + project.getName() + " as inactive", e);
- }
- }
-
- @Override
- public void uploadFlows(Project project, int version, Collection<Flow> flows) throws ProjectManagerException {
- // We do one at a time instead of batch... because well, the batch could be large.
- logger.info("Uploading flows");
- Connection connection = getConnection();
-
- try {
- for (Flow flow: flows) {
- uploadFlow(connection, project, version, flow, defaultEncodingType);
- }
- connection.commit();
- }
- catch (IOException e) {
- throw new ProjectManagerException("Flow Upload failed.", e);
- }
- catch (SQLException e) {
- throw new ProjectManagerException("Flow Upload failed.", e);
- }
- finally {
- DbUtils.closeQuietly(connection);
- }
- }
-
- @Override
- public void uploadFlow(Project project, int version, Flow flow) throws ProjectManagerException {
- logger.info("Uploading flows");
- Connection connection = getConnection();
-
- try {
- uploadFlow(connection, project, version, flow, defaultEncodingType);
- connection.commit();
- }
- catch (IOException e) {
- throw new ProjectManagerException("Flow Upload failed.", e);
- }
- catch (SQLException e) {
- throw new ProjectManagerException("Flow Upload failed commit.", e);
- }
- finally {
- DbUtils.closeQuietly(connection);
- }
- }
-
- @Override
- public void updateFlow(Project project, int version, Flow flow) throws ProjectManagerException {
- logger.info("Uploading flows");
- Connection connection = getConnection();
-
- try {
- QueryRunner runner = new QueryRunner();
- String json = JSONUtils.toJSON(flow.toObject());
- byte[] stringData = json.getBytes("UTF-8");
- byte[] data = stringData;
-
- logger.info("UTF-8 size:" + data.length);
- if (defaultEncodingType == EncodingType.GZIP) {
- data = GZIPUtils.gzipBytes(stringData);
- }
-
- logger.info("Flow upload " + flow.getId() + " is byte size " + data.length);
- final String UPDATE_FLOW = "UPDATE project_flows SET encoding_type=?,json=? WHERE project_id=? AND version=? AND flow_id=?";
- try {
- runner.update(connection, UPDATE_FLOW, defaultEncodingType.getNumVal(), data, project.getId(), version, flow.getId());
- } catch (SQLException e) {
- e.printStackTrace();
- throw new ProjectManagerException("Error inserting flow " + flow.getId(), e);
- }
- connection.commit();
- } catch (IOException e) {
- throw new ProjectManagerException("Flow Upload failed.", e);
- } catch (SQLException e) {
- throw new ProjectManagerException("Flow Upload failed commit.", e);
- }
- finally {
- DbUtils.closeQuietly(connection);
- }
- }
-
- public EncodingType getDefaultEncodingType() {
- return defaultEncodingType;
- }
-
- public void setDefaultEncodingType(EncodingType defaultEncodingType) {
- this.defaultEncodingType = defaultEncodingType;
- }
-
- private void uploadFlow(Connection connection, Project project, int version, Flow flow, EncodingType encType) throws ProjectManagerException, IOException {
- QueryRunner runner = new QueryRunner();
- String json = JSONUtils.toJSON(flow.toObject());
- byte[] stringData = json.getBytes("UTF-8");
- byte[] data = stringData;
-
- logger.info("UTF-8 size:" + data.length);
- if (encType == EncodingType.GZIP) {
- data = GZIPUtils.gzipBytes(stringData);
- }
-
- logger.info("Flow upload " + flow.getId() + " is byte size " + data.length);
- final String INSERT_FLOW = "INSERT INTO project_flows (project_id, version, flow_id, modified_time, encoding_type, json) values (?,?,?,?,?,?)";
- try {
- runner.update(connection, INSERT_FLOW, project.getId(), version, flow.getId(), System.currentTimeMillis(), encType.getNumVal(), data );
- } catch (SQLException e) {
- throw new ProjectManagerException("Error inserting flow " + flow.getId(), e);
- }
- }
-
- @Override
- public Flow fetchFlow(Project project, String flowId) throws ProjectManagerException {
- QueryRunner runner = createQueryRunner();
- ProjectFlowsResultHandler handler = new ProjectFlowsResultHandler();
-
- try {
- List<Flow> flows = runner.query(ProjectFlowsResultHandler.SELECT_PROJECT_FLOW, handler, project.getId(), project.getVersion(), flowId);
- if (flows.isEmpty()) {
- return null;
- }
- else {
- return flows.get(0);
- }
- } catch (SQLException e) {
- throw new ProjectManagerException("Error fetching flow " + flowId, e);
- }
- }
-
- @Override
- public List<Flow> fetchAllProjectFlows(Project project) throws ProjectManagerException {
- QueryRunner runner = createQueryRunner();
- ProjectFlowsResultHandler handler = new ProjectFlowsResultHandler();
-
- List<Flow> flows = null;
- try {
- flows = runner.query(ProjectFlowsResultHandler.SELECT_ALL_PROJECT_FLOWS, handler, project.getId(), project.getVersion());
- } catch (SQLException e) {
- throw new ProjectManagerException("Error fetching flows from project " + project.getName() + " version " + project.getVersion(), e);
- }
-
- return flows;
- }
-
- @Override
- public void uploadProjectProperties(Project project, List<Props> properties) throws ProjectManagerException {
- Connection connection = getConnection();
-
- try {
- for (Props props: properties) {
- uploadProjectProperty(connection, project, props.getSource(), props);
- }
- connection.commit();
- }
- catch (SQLException e) {
- throw new ProjectManagerException("Error uploading project property files", e);
- }
- catch (IOException e) {
- throw new ProjectManagerException("Error uploading project property files", e);
- }
- finally {
- DbUtils.closeQuietly(connection);
- }
- }
-
- @Override
- public void uploadProjectProperty(Project project, Props props) throws ProjectManagerException {
- Connection connection = getConnection();
- try {
- uploadProjectProperty(connection, project, props.getSource(), props);
- connection.commit();
- }
- catch (SQLException e) {
- throw new ProjectManagerException("Error uploading project property files", e);
- }
- catch (IOException e) {
- throw new ProjectManagerException("Error uploading project property file", e);
- }
- finally {
- DbUtils.closeQuietly(connection);
- }
- }
-
- @Override
- public void updateProjectProperty(Project project, Props props) throws ProjectManagerException {
- Connection connection = getConnection();
- try {
- updateProjectProperty(connection, project, props.getSource(), props);
- connection.commit();
- }
- catch (SQLException e) {
- throw new ProjectManagerException("Error uploading project property files", e);
- }
- catch (IOException e) {
- throw new ProjectManagerException("Error uploading project property file", e);
- }
- finally {
- DbUtils.closeQuietly(connection);
- }
- }
-
- private void updateProjectProperty(Connection connection, Project project, String name, Props props) throws ProjectManagerException, IOException {
- QueryRunner runner = new QueryRunner();
- final String UPDATE_PROPERTIES = "UPDATE project_properties SET property=? WHERE project_id=? AND version=? AND name=?";
-
- String propertyJSON = PropsUtils.toJSONString(props, true);
- byte[] data = propertyJSON.getBytes("UTF-8");
- logger.info("UTF-8 size:" + data.length);
- if (defaultEncodingType == EncodingType.GZIP) {
- data = GZIPUtils.gzipBytes(data);
- }
-
- try {
- runner.update(connection, UPDATE_PROPERTIES, data, project.getId(), project.getVersion(), name);
- connection.commit();
- } catch (SQLException e) {
- throw new ProjectManagerException("Error updating property " + project.getName() + " version " + project.getVersion(), e);
- }
- }
-
- private void uploadProjectProperty(Connection connection, Project project, String name, Props props) throws ProjectManagerException, IOException {
- QueryRunner runner = new QueryRunner();
- final String INSERT_PROPERTIES = "INSERT INTO project_properties (project_id, version, name, modified_time, encoding_type, property) values (?,?,?,?,?,?)";
-
- String propertyJSON = PropsUtils.toJSONString(props, true);
- byte[] data = propertyJSON.getBytes("UTF-8");
- logger.info("UTF-8 size:" + data.length);
- if (defaultEncodingType == EncodingType.GZIP) {
- data = GZIPUtils.gzipBytes(data);
- }
-
- try {
- runner.update(connection, INSERT_PROPERTIES, project.getId(), project.getVersion(), name, System.currentTimeMillis(), defaultEncodingType.getNumVal(), data);
- connection.commit();
- } catch (SQLException e) {
- throw new ProjectManagerException("Error uploading project properties " + name + " into " + project.getName() + " version " + project.getVersion(), e);
- }
- }
-
- @Override
- public Props fetchProjectProperty(int projectId, int projectVer, String propsName) throws ProjectManagerException {
- QueryRunner runner = createQueryRunner();
-
- ProjectPropertiesResultsHandler handler = new ProjectPropertiesResultsHandler();
- try {
- List<Pair<String, Props>> properties =
- runner.query(ProjectPropertiesResultsHandler.SELECT_PROJECT_PROPERTY, handler, projectId, projectVer, propsName);
-
- if (properties == null || properties.isEmpty()) {
- return null;
- }
-
- return properties.get(0).getSecond();
- } catch (SQLException e) {
- throw new ProjectManagerException("Error fetching property " + propsName, e);
- }
- }
-
- @Override
- public Props fetchProjectProperty(Project project, String propsName) throws ProjectManagerException {
- QueryRunner runner = createQueryRunner();
-
- ProjectPropertiesResultsHandler handler = new ProjectPropertiesResultsHandler();
- try {
- List<Pair<String, Props>> properties =
- runner.query(ProjectPropertiesResultsHandler.SELECT_PROJECT_PROPERTY, handler, project.getId(), project.getVersion(), propsName);
-
- if (properties == null || properties.isEmpty()) {
- return null;
- }
-
- return properties.get(0).getSecond();
- } catch (SQLException e) {
- throw new ProjectManagerException("Error fetching property " + propsName, e);
- }
- }
-
- @Override
- public void cleanOlderProjectVersion(int projectId, int version) throws ProjectManagerException {
- Connection connection = getConnection();
-
- try {
- cleanOlderProjectVersionFlows(connection, projectId, version);
- cleanOlderProjectVersionProperties(connection, projectId, version);
- cleanOlderProjectFiles(connection, projectId, version);
- cleanOlderProjectVersion(connection, projectId, version);
- }
- finally {
- DbUtils.closeQuietly(connection);
- }
- }
-
- private void cleanOlderProjectVersionFlows(Connection connection, int projectId, int version) throws ProjectManagerException {
- final String DELETE_FLOW = "DELETE FROM project_flows WHERE project_id=? AND version<?";
- QueryRunner runner = new QueryRunner();
- try {
- runner.update(connection, DELETE_FLOW, projectId, version);
- connection.commit();
- } catch (SQLException e) {
- throw new ProjectManagerException("Error deleting project version flows " + projectId + ":" + version, e);
- }
- }
-
- private void cleanOlderProjectVersionProperties(Connection connection, int projectId, int version) throws ProjectManagerException {
- final String DELETE_PROPERTIES = "DELETE FROM project_properties WHERE project_id=? AND version<?";
- QueryRunner runner = new QueryRunner();
- try {
- runner.update(connection, DELETE_PROPERTIES, projectId, version);
- connection.commit();
- } catch (SQLException e) {
- throw new ProjectManagerException("Error deleting project version properties " + projectId + ":" + version, e);
- }
- }
-
- private void cleanOlderProjectFiles(Connection connection, int projectId, int version) throws ProjectManagerException {
- final String DELETE_PROJECT_FILES = "DELETE FROM project_files WHERE project_id=? AND version<?";
- QueryRunner runner = new QueryRunner();
- try {
- runner.update(connection, DELETE_PROJECT_FILES, projectId, version);
- connection.commit();
- } catch (SQLException e) {
- throw new ProjectManagerException("Error deleting project version files " + projectId + ":" + version, e);
- }
- }
-
- private void cleanOlderProjectVersion(Connection connection, int projectId, int version) throws ProjectManagerException {
- final String UPDATE_PROJECT_VERSIONS = "UPDATE project_versions SET num_chunks=0 WHERE project_id=? AND version<?";
- QueryRunner runner = new QueryRunner();
- try {
- runner.update(connection, UPDATE_PROJECT_VERSIONS, projectId, version);
- connection.commit();
- } catch (SQLException e) {
- throw new ProjectManagerException("Error updating project version chunksize " + projectId + ":" + version, e);
- }
- }
-
- @Override
- public Map<String,Props> fetchProjectProperties(int projectId, int version) throws ProjectManagerException {
- QueryRunner runner = createQueryRunner();
-
- ProjectPropertiesResultsHandler handler = new ProjectPropertiesResultsHandler();
- try {
- List<Pair<String, Props>> properties =
- runner.query(ProjectPropertiesResultsHandler.SELECT_PROJECT_PROPERTIES, handler, projectId, version);
-
- if (properties == null || properties.isEmpty()) {
- return null;
- }
-
- HashMap<String, Props> props = new HashMap<String, Props>();
- for (Pair<String, Props> pair: properties) {
- props.put(pair.getFirst(), pair.getSecond());
- }
- return props;
- } catch (SQLException e) {
- throw new ProjectManagerException("Error fetching properties", e);
- }
- }
-
- private static class ProjectResultHandler implements ResultSetHandler<List<Project>> {
- private static String SELECT_PROJECT_BY_ID =
- "SELECT id, name, active, modified_time, create_time, version, last_modified_by, description, enc_type, settings_blob FROM projects WHERE id=?";
-
- private static String SELECT_ALL_ACTIVE_PROJECTS =
- "SELECT id, name, active, modified_time, create_time, version, last_modified_by, description, enc_type, settings_blob FROM projects WHERE active=true";
-
- private static String SELECT_ACTIVE_PROJECT_BY_NAME =
- "SELECT id, name, active, modified_time, create_time, version, last_modified_by, description, enc_type, settings_blob FROM projects WHERE name=? AND active=true";
-
- @Override
- public List<Project> handle(ResultSet rs) throws SQLException {
- if (!rs.next()) {
- return Collections.<Project>emptyList();
- }
-
- ArrayList<Project> projects = new ArrayList<Project>();
- do {
- int id = rs.getInt(1);
- String name = rs.getString(2);
- boolean active = rs.getBoolean(3);
- long modifiedTime = rs.getLong(4);
- long createTime = rs.getLong(5);
- int version = rs.getInt(6);
- String lastModifiedBy = rs.getString(7);
- String description = rs.getString(8);
- int encodingType = rs.getInt(9);
- byte[] data = rs.getBytes(10);
-
- Project project;
- if (data != null) {
- EncodingType encType = EncodingType.fromInteger(encodingType);
- Object blobObj;
- try {
- // Convoluted way to inflate strings. Should find common package or helper function.
- if (encType == EncodingType.GZIP) {
- // Decompress the sucker.
- String jsonString = GZIPUtils.unGzipString(data, "UTF-8");
- blobObj = JSONUtils.parseJSONFromString(jsonString);
- }
- else {
- String jsonString = new String(data, "UTF-8");
- blobObj = JSONUtils.parseJSONFromString(jsonString);
- }
- project = Project.projectFromObject(blobObj);
- } catch (IOException e) {
- throw new SQLException("Failed to get project.", e);
- }
- }
- else {
- project = new Project(id, name);
- }
-
- // update the fields as they may have changed
-
- project.setActive(active);
- project.setLastModifiedTimestamp(modifiedTime);
- project.setCreateTimestamp(createTime);
- project.setVersion(version);
- project.setLastModifiedUser(lastModifiedBy);
- project.setDescription(description);
-
- projects.add(project);
- } while (rs.next());
-
- return projects;
- }
- }
-
- private static class ProjectPermissionsResultHandler implements ResultSetHandler<List<Triple<String, Boolean, Permission>>> {
- private static String SELECT_PROJECT_PERMISSION =
- "SELECT project_id, modified_time, name, permissions, isGroup FROM project_permissions WHERE project_id=?";
-
- @Override
- public List<Triple<String, Boolean, Permission>> handle(ResultSet rs) throws SQLException {
- if (!rs.next()) {
- return Collections.<Triple<String, Boolean, Permission>>emptyList();
- }
-
- ArrayList<Triple<String, Boolean, Permission>> permissions = new ArrayList<Triple<String, Boolean, Permission>>();
- do {
- //int project_id = rs.getInt(1);
- //long modifiedTime = rs.getLong(2);
- String username = rs.getString(3);
- int permissionFlag = rs.getInt(4);
- boolean val = rs.getBoolean(5);
-
- Permission perm = new Permission(permissionFlag);
- permissions.add(new Triple<String, Boolean, Permission>(username, val, perm));
- } while (rs.next());
-
- return permissions;
- }
- }
-
- private static class ProjectFlowsResultHandler implements ResultSetHandler<List<Flow>> {
- private static String SELECT_PROJECT_FLOW =
- "SELECT project_id, version, flow_id, modified_time, encoding_type, json FROM project_flows WHERE project_id=? AND version=? AND flow_id=?";
-
- private static String SELECT_ALL_PROJECT_FLOWS =
- "SELECT project_id, version, flow_id, modified_time, encoding_type, json FROM project_flows WHERE project_id=? AND version=?";
-
- @Override
- public List<Flow> handle(ResultSet rs) throws SQLException {
- if (!rs.next()) {
- return Collections.<Flow>emptyList();
- }
-
- ArrayList<Flow> flows = new ArrayList<Flow>();
- do {
- //int projectId = rs.getInt(1);
- //int version = rs.getInt(2);
- String flowId = rs.getString(3);
- //long modifiedTime = rs.getLong(4);
- int encodingType = rs.getInt(5);
- byte[] dataBytes = rs.getBytes(6);
-
- if (dataBytes == null) {
- continue;
- }
-
- EncodingType encType = EncodingType.fromInteger(encodingType);
-
- Object flowObj = null;
- try {
- // Convoluted way to inflate strings. Should find common package or helper function.
- if (encType == EncodingType.GZIP) {
- // Decompress the sucker.
- String jsonString = GZIPUtils.unGzipString(dataBytes, "UTF-8");
- flowObj = JSONUtils.parseJSONFromString(jsonString);
- }
- else {
- String jsonString = new String(dataBytes, "UTF-8");
- flowObj = JSONUtils.parseJSONFromString(jsonString);
- }
-
- Flow flow = Flow.flowFromObject(flowObj);
- flows.add(flow);
- } catch (IOException e) {
- throw new SQLException("Error retrieving flow data " + flowId, e);
- }
-
- } while (rs.next());
-
- return flows;
- }
- }
-
- private static class ProjectPropertiesResultsHandler implements ResultSetHandler<List<Pair<String, Props>>> {
- private static String SELECT_PROJECT_PROPERTY =
- "SELECT project_id, version, name, modified_time, encoding_type, property FROM project_properties WHERE project_id=? AND version=? AND name=?";
-
- private static String SELECT_PROJECT_PROPERTIES =
- "SELECT project_id, version, name, modified_time, encoding_type, property FROM project_properties WHERE project_id=? AND version=?";
-
- @Override
- public List<Pair<String, Props>> handle(ResultSet rs) throws SQLException {
- if (!rs.next()) {
- return Collections.<Pair<String, Props>>emptyList();
- }
-
- List<Pair<String, Props>> properties = new ArrayList<Pair<String, Props>>();
- do {
- //int projectId = rs.getInt(1);
- //int version = rs.getInt(2);
- String name = rs.getString(3);
- //long modifiedTime = rs.getLong(4);
- int eventType = rs.getInt(5);
- byte[] dataBytes = rs.getBytes(6);
-
- EncodingType encType = EncodingType.fromInteger(eventType);
- String propertyString = null;
-
- try {
- if (encType == EncodingType.GZIP) {
- // Decompress the sucker.
- propertyString = GZIPUtils.unGzipString(dataBytes, "UTF-8");
- }
- else {
- propertyString = new String(dataBytes, "UTF-8");
- }
-
- Props props = PropsUtils.fromJSONString(propertyString);
- props.setSource(name);
- properties.add(new Pair<String, Props>(name, props));
- } catch (IOException e) {
- throw new SQLException(e);
- }
- } while (rs.next());
-
- return properties;
- }
- }
-
- private static class ProjectLogsResultHandler implements ResultSetHandler<List<ProjectLogEvent>> {
- private static String SELECT_PROJECT_EVENTS_ORDER =
- "SELECT project_id, event_type, event_time, username, message FROM project_events WHERE project_id=? ORDER BY event_time DESC LIMIT ? OFFSET ?";
-
- @Override
- public List<ProjectLogEvent> handle(ResultSet rs) throws SQLException {
- if (!rs.next()) {
- return Collections.<ProjectLogEvent>emptyList();
- }
-
- ArrayList<ProjectLogEvent> events = new ArrayList<ProjectLogEvent>();
- do {
- int projectId = rs.getInt(1);
- int eventType = rs.getInt(2);
- long eventTime = rs.getLong(3);
- String username = rs.getString(4);
- String message = rs.getString(5);
-
- ProjectLogEvent event = new ProjectLogEvent(projectId, EventType.fromInteger(eventType), eventTime, username, message);
- events.add(event);
- } while (rs.next());
-
- return events;
- }
- }
-
- private static class ProjectFileChunkResultHandler implements ResultSetHandler<List<byte[]>> {
- private static String SELECT_PROJECT_CHUNKS_FILE = "SELECT project_id, version, chunk, size, file FROM project_files WHERE project_id=? AND version=? AND chunk >= ? AND chunk < ? ORDER BY chunk ASC";
-
- @Override
- public List<byte[]> handle(ResultSet rs) throws SQLException {
- if (!rs.next()) {
- return Collections.<byte[]>emptyList();
- }
-
- ArrayList<byte[]> data = new ArrayList<byte[]>();
- do {
-// int project_id = rs.getInt(1);
-// int version = rs.getInt(2);
-// int chunk = rs.getInt(3);
-// int size = rs.getInt(4);
- byte[] bytes = rs.getBytes(5);
-
- data.add(bytes);
- } while (rs.next());
-
- return data;
- }
-
- }
-
- private static class ProjectVersionResultHandler implements ResultSetHandler<List<ProjectFileHandler>> {
- private static String SELECT_PROJECT_VERSION = "SELECT project_id, version, upload_time, uploader, file_type, file_name, md5, num_chunks FROM project_versions WHERE project_id=? AND version=?";
- //private static String SELECT_ALL_PER_PROJECT = "SELECT project_id, version, upload_time, uploader, file_type, file_name, md5, num_chunks FROM project_versions WHERE project_id=?";
-
- @Override
- public List<ProjectFileHandler> handle(ResultSet rs) throws SQLException {
- if (!rs.next()) {
- return null;
- }
-
- List<ProjectFileHandler> handlers = new ArrayList<ProjectFileHandler>();
- do {
- int projectId = rs.getInt(1);
- int version = rs.getInt(2);
- long uploadTime = rs.getLong(3);
- String uploader = rs.getString(4);
- String fileType = rs.getString(5);
- String fileName = rs.getString(6);
- byte[] md5 = rs.getBytes(7);
- int numChunks = rs.getInt(8);
-
- ProjectFileHandler handler = new ProjectFileHandler(
- projectId, version, uploadTime, uploader, fileType, fileName, numChunks, md5
- );
-
- handlers.add(handler);
- } while (rs.next());
-
- return handlers;
- }
- }
-
- private static class IntHander implements ResultSetHandler<Integer> {
- private static String SELECT_LATEST_VERSION = "SELECT MAX(version) FROM project_versions WHERE project_id=?";
-
- @Override
- public Integer handle(ResultSet rs) throws SQLException {
- if (!rs.next()) {
- return 0;
- }
-
- return rs.getInt(1);
- }
- }
-
- private Connection getConnection() throws ProjectManagerException {
- Connection connection = null;
- try {
- connection = super.getDBConnection(false);
- } catch (Exception e) {
- DbUtils.closeQuietly(connection);
- throw new ProjectManagerException("Error getting DB connection.", e);
- }
-
- return connection;
- }
+public class JdbcProjectLoader extends AbstractJdbcLoader implements
+ ProjectLoader {
+ private static final Logger logger = Logger
+ .getLogger(JdbcProjectLoader.class);
+
+ private static final int CHUCK_SIZE = 1024 * 1024 * 10;
+ private File tempDir;
+
+ private EncodingType defaultEncodingType = EncodingType.GZIP;
+
+ public JdbcProjectLoader(Props props) {
+ super(props);
+ tempDir = new File(props.getString("project.temp.dir", "temp"));
+ if (!tempDir.exists()) {
+ tempDir.mkdirs();
+ }
+ }
+
+ @Override
+ public List<Project> fetchAllActiveProjects() throws ProjectManagerException {
+ Connection connection = getConnection();
+
+ List<Project> projects = null;
+ try {
+ projects = fetchAllActiveProjects(connection);
+ } finally {
+ DbUtils.closeQuietly(connection);
+ }
+
+ return projects;
+ }
+
+ private List<Project> fetchAllActiveProjects(Connection connection)
+ throws ProjectManagerException {
+ QueryRunner runner = new QueryRunner();
+
+ ProjectResultHandler handler = new ProjectResultHandler();
+ List<Project> projects = null;
+ try {
+ projects =
+ runner.query(connection,
+ ProjectResultHandler.SELECT_ALL_ACTIVE_PROJECTS, handler);
+
+ for (Project project : projects) {
+ List<Triple<String, Boolean, Permission>> permissions =
+ fetchPermissionsForProject(connection, project);
+
+ for (Triple<String, Boolean, Permission> entry : permissions) {
+ if (entry.getSecond()) {
+ project.setGroupPermission(entry.getFirst(), entry.getThird());
+ } else {
+ project.setUserPermission(entry.getFirst(), entry.getThird());
+ }
+ }
+ }
+ } catch (SQLException e) {
+ throw new ProjectManagerException("Error retrieving all projects", e);
+ } finally {
+ DbUtils.closeQuietly(connection);
+ }
+
+ return projects;
+ }
+
+ @Override
+ public Project fetchProjectById(int id) throws ProjectManagerException {
+ Connection connection = getConnection();
+
+ Project project = null;
+ try {
+ project = fetchProjectById(connection, id);
+ } finally {
+ DbUtils.closeQuietly(connection);
+ }
+
+ return project;
+ }
+
+ private Project fetchProjectById(Connection connection, int id)
+ throws ProjectManagerException {
+ QueryRunner runner = new QueryRunner();
+ // Fetch the project
+ Project project = null;
+ ProjectResultHandler handler = new ProjectResultHandler();
+ try {
+ List<Project> projects =
+ runner.query(connection, ProjectResultHandler.SELECT_PROJECT_BY_ID,
+ handler, id);
+ if (projects.isEmpty()) {
+ throw new ProjectManagerException("No active project with id " + id
+ + " exists in db.");
+ }
+
+ project = projects.get(0);
+ } catch (SQLException e) {
+ logger.error(ProjectResultHandler.SELECT_PROJECT_BY_ID + " failed.");
+ throw new ProjectManagerException(
+ "Query for existing project failed. Project " + id, e);
+ }
+
+ // Fetch the user permissions
+ List<Triple<String, Boolean, Permission>> permissions =
+ fetchPermissionsForProject(connection, project);
+
+ for (Triple<String, Boolean, Permission> perm : permissions) {
+ if (perm.getThird().toFlags() != 0) {
+ if (perm.getSecond()) {
+ project.setGroupPermission(perm.getFirst(), perm.getThird());
+ } else {
+ project.setUserPermission(perm.getFirst(), perm.getThird());
+ }
+ }
+ }
+
+ return project;
+ }
+
+ private List<Triple<String, Boolean, Permission>> fetchPermissionsForProject(
+ Connection connection, Project project) throws ProjectManagerException {
+ ProjectPermissionsResultHandler permHander =
+ new ProjectPermissionsResultHandler();
+
+ QueryRunner runner = new QueryRunner();
+ List<Triple<String, Boolean, Permission>> permissions = null;
+ try {
+ permissions =
+ runner.query(connection,
+ ProjectPermissionsResultHandler.SELECT_PROJECT_PERMISSION,
+ permHander, project.getId());
+ } catch (SQLException e) {
+ throw new ProjectManagerException("Query for permissions for "
+ + project.getName() + " failed.", e);
+ }
+
+ return permissions;
+ }
+
+ /**
+ * Creates a Project in the db.
+ *
+ * It will throw an exception if it finds an active project of the same name,
+ * or the SQL fails
+ */
+ @Override
+ public Project createNewProject(String name, String description, User creator)
+ throws ProjectManagerException {
+ Connection connection = getConnection();
+
+ Project project;
+ try {
+ // No need to commit, since createNewProject should commit.
+ project = createNewProject(connection, name, description, creator);
+ } finally {
+ DbUtils.closeQuietly(connection);
+ }
+
+ return project;
+ }
+
+ private synchronized Project createNewProject(Connection connection,
+ String name, String description, User creator)
+ throws ProjectManagerException {
+ QueryRunner runner = new QueryRunner();
+ ProjectResultHandler handler = new ProjectResultHandler();
+
+ // See if it exists first.
+ try {
+ List<Project> project =
+ runner
+ .query(connection,
+ ProjectResultHandler.SELECT_ACTIVE_PROJECT_BY_NAME, handler,
+ name);
+ if (!project.isEmpty()) {
+ throw new ProjectManagerException("Active project with name " + name
+ + " already exists in db.");
+ }
+ } catch (SQLException e) {
+ logger.error(e);
+ throw new ProjectManagerException(
+ "Checking for existing project failed. " + name, e);
+ }
+
+ final String INSERT_PROJECT =
+ "INSERT INTO projects ( name, active, modified_time, create_time, version, last_modified_by, description, enc_type, settings_blob) values (?,?,?,?,?,?,?,?,?)";
+ // Insert project
+ try {
+ long time = System.currentTimeMillis();
+ int i =
+ runner.update(connection, INSERT_PROJECT, name, true, time, time,
+ null, creator.getUserId(), description,
+ defaultEncodingType.getNumVal(), null);
+ if (i == 0) {
+ throw new ProjectManagerException("No projects have been inserted.");
+ }
+ connection.commit();
+
+ } catch (SQLException e) {
+ logger.error(INSERT_PROJECT + " failed.");
+ try {
+ connection.rollback();
+ } catch (SQLException e1) {
+ e1.printStackTrace();
+ }
+ throw new ProjectManagerException(
+ "Insert project for existing project failed. " + name, e);
+ }
+
+ // Do another query to grab and return the project.
+ Project project = null;
+ try {
+ List<Project> projects =
+ runner
+ .query(connection,
+ ProjectResultHandler.SELECT_ACTIVE_PROJECT_BY_NAME, handler,
+ name);
+ if (projects.isEmpty()) {
+ throw new ProjectManagerException("No active project with name " + name
+ + " exists in db.");
+ } else if (projects.size() > 1) {
+ throw new ProjectManagerException("More than one active project "
+ + name);
+ }
+
+ project = projects.get(0);
+ } catch (SQLException e) {
+ logger.error(e);
+ throw new ProjectManagerException(
+ "Checking for existing project failed. " + name, e);
+ }
+
+ return project;
+ }
+
+ @Override
+ public void uploadProjectFile(Project project, int version, String filetype,
+ String filename, File localFile, String uploader)
+ throws ProjectManagerException {
+ logger.info("Uploading to " + project.getName() + " version:" + version
+ + " file:" + filename);
+ Connection connection = getConnection();
+
+ try {
+ uploadProjectFile(connection, project, version, filetype, filename,
+ localFile, uploader);
+ connection.commit();
+ logger.info("Commiting upload " + localFile.getName());
+ } catch (SQLException e) {
+ logger.error(e);
+ throw new ProjectManagerException("Error getting DB connection.", e);
+ } finally {
+ DbUtils.closeQuietly(connection);
+ }
+ }
+
+ private void uploadProjectFile(Connection connection, Project project,
+ int version, String filetype, String filename, File localFile,
+ String uploader) throws ProjectManagerException {
+ QueryRunner runner = new QueryRunner();
+ long updateTime = System.currentTimeMillis();
+
+ logger.info("Creating message digest for upload " + localFile.getName());
+ byte[] md5 = null;
+ try {
+ md5 = Md5Hasher.md5Hash(localFile);
+ } catch (IOException e) {
+ throw new ProjectManagerException("Error getting md5 hash.", e);
+ }
+
+ logger.info("Md5 hash created");
+ // Really... I doubt we'll get a > 2gig file. So int casting it is!
+ byte[] buffer = new byte[CHUCK_SIZE];
+ final String INSERT_PROJECT_FILES =
+ "INSERT INTO project_files (project_id, version, chunk, size, file) values (?,?,?,?,?)";
+
+ // int numChunks = (localFileSize / buffer.length) + 1;
+ BufferedInputStream bufferedStream = null;
+ int chunk = 0;
+ try {
+ bufferedStream = new BufferedInputStream(new FileInputStream(localFile));
+ int size = bufferedStream.read(buffer);
+ while (size >= 0) {
+ logger.info("Read bytes for " + filename + " size:" + size);
+ byte[] buf = buffer;
+ if (size < buffer.length) {
+ buf = Arrays.copyOfRange(buffer, 0, size);
+ }
+ try {
+ logger.info("Running update for " + filename + " chunk " + chunk);
+ runner.update(connection, INSERT_PROJECT_FILES, project.getId(),
+ version, chunk, size, buf);
+ logger.info("Finished update for " + filename + " chunk " + chunk);
+ } catch (SQLException e) {
+ throw new ProjectManagerException("Error chunking", e);
+ }
+ ++chunk;
+
+ size = bufferedStream.read(buffer);
+ }
+ } catch (IOException e) {
+ throw new ProjectManagerException("Error chunking file " + filename);
+ } finally {
+ IOUtils.closeQuietly(bufferedStream);
+ }
+
+ final String INSERT_PROJECT_VERSION =
+ "INSERT INTO project_versions (project_id, version, upload_time, uploader, file_type, file_name, md5, num_chunks) values (?,?,?,?,?,?,?,?)";
+
+ try {
+ runner.update(connection, INSERT_PROJECT_VERSION, project.getId(),
+ version, updateTime, uploader, filetype, filename, md5, chunk);
+ } catch (SQLException e) {
+ logger.error(e);
+ throw new ProjectManagerException("Error updating project version "
+ + project.getName(), e);
+ }
+ }
+
+ @Override
+ public ProjectFileHandler getUploadedFile(Project project, int version)
+ throws ProjectManagerException {
+ logger.info("Retrieving to " + project.getName() + " version:" + version);
+ Connection connection = getConnection();
+ ProjectFileHandler handler = null;
+ try {
+ handler = getUploadedFile(connection, project.getId(), version);
+ } finally {
+ DbUtils.closeQuietly(connection);
+ }
+
+ return handler;
+ }
+
+ @Override
+ public ProjectFileHandler getUploadedFile(int projectId, int version)
+ throws ProjectManagerException {
+ logger.info("Retrieving to " + projectId + " version:" + version);
+ Connection connection = getConnection();
+ ProjectFileHandler handler = null;
+ try {
+ handler = getUploadedFile(connection, projectId, version);
+ } finally {
+ DbUtils.closeQuietly(connection);
+ }
+
+ return handler;
+ }
+
+ private ProjectFileHandler getUploadedFile(Connection connection,
+ int projectId, int version) throws ProjectManagerException {
+ QueryRunner runner = new QueryRunner();
+ ProjectVersionResultHandler pfHandler = new ProjectVersionResultHandler();
+
+ List<ProjectFileHandler> projectFiles = null;
+ try {
+ projectFiles =
+ runner.query(connection,
+ ProjectVersionResultHandler.SELECT_PROJECT_VERSION, pfHandler,
+ projectId, version);
+ } catch (SQLException e) {
+ logger.error(e);
+ throw new ProjectManagerException(
+ "Query for uploaded file for project id " + projectId + " failed.", e);
+ }
+ if (projectFiles == null || projectFiles.isEmpty()) {
+ return null;
+ }
+
+ ProjectFileHandler projHandler = projectFiles.get(0);
+ int numChunks = projHandler.getNumChunks();
+ BufferedOutputStream bStream = null;
+ File file = null;
+ try {
+ try {
+ file =
+ File.createTempFile(projHandler.getFileName(),
+ String.valueOf(version), tempDir);
+
+ bStream = new BufferedOutputStream(new FileOutputStream(file));
+ } catch (IOException e) {
+ throw new ProjectManagerException(
+ "Error creating temp file for stream.");
+ }
+
+ int collect = 5;
+ int fromChunk = 0;
+ int toChunk = collect;
+ do {
+ ProjectFileChunkResultHandler chunkHandler =
+ new ProjectFileChunkResultHandler();
+ List<byte[]> data = null;
+ try {
+ data =
+ runner.query(connection,
+ ProjectFileChunkResultHandler.SELECT_PROJECT_CHUNKS_FILE,
+ chunkHandler, projectId, version, fromChunk, toChunk);
+ } catch (SQLException e) {
+ logger.error(e);
+ throw new ProjectManagerException("Query for uploaded file for "
+ + projectId + " failed.", e);
+ }
+
+ try {
+ for (byte[] d : data) {
+ bStream.write(d);
+ }
+ } catch (IOException e) {
+ throw new ProjectManagerException("Error writing file", e);
+ }
+
+ // Add all the bytes to the stream.
+ fromChunk += collect;
+ toChunk += collect;
+ } while (fromChunk <= numChunks);
+ } finally {
+ IOUtils.closeQuietly(bStream);
+ }
+
+ // Check md5.
+ byte[] md5 = null;
+ try {
+ md5 = Md5Hasher.md5Hash(file);
+ } catch (IOException e) {
+ throw new ProjectManagerException("Error getting md5 hash.", e);
+ }
+
+ if (Arrays.equals(projHandler.getMd5Hash(), md5)) {
+ logger.info("Md5 Hash is valid");
+ } else {
+ throw new ProjectManagerException("Md5 Hash failed on retrieval of file");
+ }
+
+ projHandler.setLocalFile(file);
+ return projHandler;
+ }
+
+ @Override
+ public void changeProjectVersion(Project project, int version, String user)
+ throws ProjectManagerException {
+ long timestamp = System.currentTimeMillis();
+ QueryRunner runner = createQueryRunner();
+ try {
+ final String UPDATE_PROJECT_VERSION =
+ "UPDATE projects SET version=?,modified_time=?,last_modified_by=? WHERE id=?";
+
+ runner.update(UPDATE_PROJECT_VERSION, version, timestamp, user,
+ project.getId());
+ project.setVersion(version);
+ project.setLastModifiedTimestamp(timestamp);
+ project.setLastModifiedUser(user);
+ } catch (SQLException e) {
+ logger.error(e);
+ throw new ProjectManagerException(
+ "Error updating switching project version " + project.getName(), e);
+ }
+ }
+
+ @Override
+ public void updatePermission(Project project, String name, Permission perm,
+ boolean isGroup) throws ProjectManagerException {
+ QueryRunner runner = createQueryRunner();
+
+ if (this.allowsOnDuplicateKey()) {
+ long updateTime = System.currentTimeMillis();
+ final String INSERT_PROJECT_PERMISSION =
+ "INSERT INTO project_permissions (project_id, modified_time, name, permissions, isGroup) values (?,?,?,?,?)"
+ + "ON DUPLICATE KEY UPDATE modified_time = VALUES(modified_time), permissions = VALUES(permissions)";
+
+ try {
+ runner.update(INSERT_PROJECT_PERMISSION, project.getId(), updateTime,
+ name, perm.toFlags(), isGroup);
+ } catch (SQLException e) {
+ logger.error(e);
+ throw new ProjectManagerException("Error updating project "
+ + project.getName() + " permissions for " + name, e);
+ }
+ } else {
+ long updateTime = System.currentTimeMillis();
+ final String MERGE_PROJECT_PERMISSION =
+ "MERGE INTO project_permissions (project_id, modified_time, name, permissions, isGroup) KEY (project_id, name) values (?,?,?,?,?)";
+
+ try {
+ runner.update(MERGE_PROJECT_PERMISSION, project.getId(), updateTime,
+ name, perm.toFlags(), isGroup);
+ } catch (SQLException e) {
+ logger.error(e);
+ throw new ProjectManagerException("Error updating project "
+ + project.getName() + " permissions for " + name, e);
+ }
+ }
+
+ if (isGroup) {
+ project.setGroupPermission(name, perm);
+ } else {
+ project.setUserPermission(name, perm);
+ }
+ }
+
+ @Override
+ public void updateProjectSettings(Project project)
+ throws ProjectManagerException {
+ Connection connection = getConnection();
+ try {
+ updateProjectSettings(connection, project, defaultEncodingType);
+ connection.commit();
+ } catch (SQLException e) {
+ throw new ProjectManagerException("Error updating project settings", e);
+ } finally {
+ DbUtils.closeQuietly(connection);
+ }
+ }
+
+ private void updateProjectSettings(Connection connection, Project project,
+ EncodingType encType) throws ProjectManagerException {
+ QueryRunner runner = new QueryRunner();
+ final String UPDATE_PROJECT_SETTINGS =
+ "UPDATE projects SET enc_type=?, settings_blob=? WHERE id=?";
+
+ String json = JSONUtils.toJSON(project.toObject());
+ byte[] data = null;
+ try {
+ byte[] stringData = json.getBytes("UTF-8");
+ data = stringData;
+
+ if (encType == EncodingType.GZIP) {
+ data = GZIPUtils.gzipBytes(stringData);
+ }
+ logger.debug("NumChars: " + json.length() + " UTF-8:" + stringData.length
+ + " Gzip:" + data.length);
+ } catch (IOException e) {
+ throw new ProjectManagerException("Failed to encode. ", e);
+ }
+
+ try {
+ runner.update(connection, UPDATE_PROJECT_SETTINGS, encType.getNumVal(),
+ data, project.getId());
+ connection.commit();
+ } catch (SQLException e) {
+ throw new ProjectManagerException("Error updating project "
+ + project.getName() + " version " + project.getVersion(), e);
+ }
+ }
+
+ @Override
+ public void removePermission(Project project, String name, boolean isGroup)
+ throws ProjectManagerException {
+ QueryRunner runner = createQueryRunner();
+ final String DELETE_PROJECT_PERMISSION =
+ "DELETE FROM project_permissions WHERE project_id=? AND name=? AND isGroup=?";
+
+ try {
+ runner.update(DELETE_PROJECT_PERMISSION, project.getId(), name, isGroup);
+ } catch (SQLException e) {
+ logger.error(e);
+ throw new ProjectManagerException("Error deleting project "
+ + project.getName() + " permissions for " + name, e);
+ }
+
+ if (isGroup) {
+ project.removeGroupPermission(name);
+ } else {
+ project.removeUserPermission(name);
+ }
+ }
+
+ @Override
+ public List<Triple<String, Boolean, Permission>> getProjectPermissions(
+ int projectId) throws ProjectManagerException {
+ ProjectPermissionsResultHandler permHander =
+ new ProjectPermissionsResultHandler();
+ QueryRunner runner = createQueryRunner();
+ List<Triple<String, Boolean, Permission>> permissions = null;
+ try {
+ permissions =
+ runner.query(
+ ProjectPermissionsResultHandler.SELECT_PROJECT_PERMISSION,
+ permHander, projectId);
+ } catch (SQLException e) {
+ throw new ProjectManagerException("Query for permissions for "
+ + projectId + " failed.", e);
+ }
+
+ return permissions;
+ }
+
+ @Override
+ public void removeProject(Project project, String user)
+ throws ProjectManagerException {
+ QueryRunner runner = createQueryRunner();
+
+ long updateTime = System.currentTimeMillis();
+ final String UPDATE_INACTIVE_PROJECT =
+ "UPDATE projects SET active=false,modified_time=?,last_modified_by=? WHERE id=?";
+ try {
+ runner.update(UPDATE_INACTIVE_PROJECT, updateTime, user, project.getId());
+ } catch (SQLException e) {
+ logger.error(e);
+ throw new ProjectManagerException("Error marking project "
+ + project.getName() + " as inactive", e);
+ }
+ }
+
+ @Override
+ public boolean postEvent(Project project, EventType type, String user,
+ String message) {
+ QueryRunner runner = createQueryRunner();
+
+ final String INSERT_PROJECT_EVENTS =
+ "INSERT INTO project_events (project_id, event_type, event_time, username, message) values (?,?,?,?,?)";
+ long updateTime = System.currentTimeMillis();
+ try {
+ runner.update(INSERT_PROJECT_EVENTS, project.getId(), type.getNumVal(),
+ updateTime, user, message);
+ } catch (SQLException e) {
+ e.printStackTrace();
+ return false;
+ }
+
+ return true;
+ }
+
+ /**
+ * Get all the logs for a given project
+ *
+ * @param project
+ * @return
+ * @throws ProjectManagerException
+ */
+ public List<ProjectLogEvent> getProjectEvents(Project project, int num,
+ int skip) throws ProjectManagerException {
+ QueryRunner runner = createQueryRunner();
+
+ ProjectLogsResultHandler logHandler = new ProjectLogsResultHandler();
+ List<ProjectLogEvent> events = null;
+ try {
+ events =
+ runner.query(ProjectLogsResultHandler.SELECT_PROJECT_EVENTS_ORDER,
+ logHandler, project.getId(), num, skip);
+ } catch (SQLException e) {
+ logger.error(e);
+ }
+
+ return events;
+ }
+
+ @Override
+ public void updateDescription(Project project, String description, String user)
+ throws ProjectManagerException {
+ QueryRunner runner = createQueryRunner();
+
+ final String UPDATE_PROJECT_DESCRIPTION =
+ "UPDATE projects SET description=?,modified_time=?,last_modified_by=? WHERE id=?";
+ long updateTime = System.currentTimeMillis();
+ try {
+ runner.update(UPDATE_PROJECT_DESCRIPTION, description, updateTime, user,
+ project.getId());
+ project.setDescription(description);
+ project.setLastModifiedTimestamp(updateTime);
+ project.setLastModifiedUser(user);
+ } catch (SQLException e) {
+ logger.error(e);
+ throw new ProjectManagerException("Error marking project "
+ + project.getName() + " as inactive", e);
+ }
+ }
+
+ @Override
+ public int getLatestProjectVersion(Project project)
+ throws ProjectManagerException {
+ QueryRunner runner = createQueryRunner();
+
+ IntHander handler = new IntHander();
+ try {
+ return runner.query(IntHander.SELECT_LATEST_VERSION, handler,
+ project.getId());
+ } catch (SQLException e) {
+ logger.error(e);
+ throw new ProjectManagerException("Error marking project "
+ + project.getName() + " as inactive", e);
+ }
+ }
+
+ @Override
+ public void uploadFlows(Project project, int version, Collection<Flow> flows)
+ throws ProjectManagerException {
+ // We do one at a time instead of batch... because well, the batch could be
+ // large.
+ logger.info("Uploading flows");
+ Connection connection = getConnection();
+
+ try {
+ for (Flow flow : flows) {
+ uploadFlow(connection, project, version, flow, defaultEncodingType);
+ }
+ connection.commit();
+ } catch (IOException e) {
+ throw new ProjectManagerException("Flow Upload failed.", e);
+ } catch (SQLException e) {
+ throw new ProjectManagerException("Flow Upload failed.", e);
+ } finally {
+ DbUtils.closeQuietly(connection);
+ }
+ }
+
+ @Override
+ public void uploadFlow(Project project, int version, Flow flow)
+ throws ProjectManagerException {
+ logger.info("Uploading flows");
+ Connection connection = getConnection();
+
+ try {
+ uploadFlow(connection, project, version, flow, defaultEncodingType);
+ connection.commit();
+ } catch (IOException e) {
+ throw new ProjectManagerException("Flow Upload failed.", e);
+ } catch (SQLException e) {
+ throw new ProjectManagerException("Flow Upload failed commit.", e);
+ } finally {
+ DbUtils.closeQuietly(connection);
+ }
+ }
+
+ @Override
+ public void updateFlow(Project project, int version, Flow flow)
+ throws ProjectManagerException {
+ logger.info("Uploading flows");
+ Connection connection = getConnection();
+
+ try {
+ QueryRunner runner = new QueryRunner();
+ String json = JSONUtils.toJSON(flow.toObject());
+ byte[] stringData = json.getBytes("UTF-8");
+ byte[] data = stringData;
+
+ logger.info("UTF-8 size:" + data.length);
+ if (defaultEncodingType == EncodingType.GZIP) {
+ data = GZIPUtils.gzipBytes(stringData);
+ }
+
+ logger.info("Flow upload " + flow.getId() + " is byte size "
+ + data.length);
+ final String UPDATE_FLOW =
+ "UPDATE project_flows SET encoding_type=?,json=? WHERE project_id=? AND version=? AND flow_id=?";
+ try {
+ runner.update(connection, UPDATE_FLOW, defaultEncodingType.getNumVal(),
+ data, project.getId(), version, flow.getId());
+ } catch (SQLException e) {
+ e.printStackTrace();
+ throw new ProjectManagerException("Error inserting flow "
+ + flow.getId(), e);
+ }
+ connection.commit();
+ } catch (IOException e) {
+ throw new ProjectManagerException("Flow Upload failed.", e);
+ } catch (SQLException e) {
+ throw new ProjectManagerException("Flow Upload failed commit.", e);
+ } finally {
+ DbUtils.closeQuietly(connection);
+ }
+ }
+
+ public EncodingType getDefaultEncodingType() {
+ return defaultEncodingType;
+ }
+
+ public void setDefaultEncodingType(EncodingType defaultEncodingType) {
+ this.defaultEncodingType = defaultEncodingType;
+ }
+
+ private void uploadFlow(Connection connection, Project project, int version,
+ Flow flow, EncodingType encType) throws ProjectManagerException,
+ IOException {
+ QueryRunner runner = new QueryRunner();
+ String json = JSONUtils.toJSON(flow.toObject());
+ byte[] stringData = json.getBytes("UTF-8");
+ byte[] data = stringData;
+
+ logger.info("UTF-8 size:" + data.length);
+ if (encType == EncodingType.GZIP) {
+ data = GZIPUtils.gzipBytes(stringData);
+ }
+
+ logger.info("Flow upload " + flow.getId() + " is byte size " + data.length);
+ final String INSERT_FLOW =
+ "INSERT INTO project_flows (project_id, version, flow_id, modified_time, encoding_type, json) values (?,?,?,?,?,?)";
+ try {
+ runner.update(connection, INSERT_FLOW, project.getId(), version,
+ flow.getId(), System.currentTimeMillis(), encType.getNumVal(), data);
+ } catch (SQLException e) {
+ throw new ProjectManagerException("Error inserting flow " + flow.getId(),
+ e);
+ }
+ }
+
+ @Override
+ public Flow fetchFlow(Project project, String flowId)
+ throws ProjectManagerException {
+ QueryRunner runner = createQueryRunner();
+ ProjectFlowsResultHandler handler = new ProjectFlowsResultHandler();
+
+ try {
+ List<Flow> flows =
+ runner.query(ProjectFlowsResultHandler.SELECT_PROJECT_FLOW, handler,
+ project.getId(), project.getVersion(), flowId);
+ if (flows.isEmpty()) {
+ return null;
+ } else {
+ return flows.get(0);
+ }
+ } catch (SQLException e) {
+ throw new ProjectManagerException("Error fetching flow " + flowId, e);
+ }
+ }
+
+ @Override
+ public List<Flow> fetchAllProjectFlows(Project project)
+ throws ProjectManagerException {
+ QueryRunner runner = createQueryRunner();
+ ProjectFlowsResultHandler handler = new ProjectFlowsResultHandler();
+
+ List<Flow> flows = null;
+ try {
+ flows =
+ runner.query(ProjectFlowsResultHandler.SELECT_ALL_PROJECT_FLOWS,
+ handler, project.getId(), project.getVersion());
+ } catch (SQLException e) {
+ throw new ProjectManagerException("Error fetching flows from project "
+ + project.getName() + " version " + project.getVersion(), e);
+ }
+
+ return flows;
+ }
+
+ @Override
+ public void uploadProjectProperties(Project project, List<Props> properties)
+ throws ProjectManagerException {
+ Connection connection = getConnection();
+
+ try {
+ for (Props props : properties) {
+ uploadProjectProperty(connection, project, props.getSource(), props);
+ }
+ connection.commit();
+ } catch (SQLException e) {
+ throw new ProjectManagerException(
+ "Error uploading project property files", e);
+ } catch (IOException e) {
+ throw new ProjectManagerException(
+ "Error uploading project property files", e);
+ } finally {
+ DbUtils.closeQuietly(connection);
+ }
+ }
+
+ @Override
+ public void uploadProjectProperty(Project project, Props props)
+ throws ProjectManagerException {
+ Connection connection = getConnection();
+ try {
+ uploadProjectProperty(connection, project, props.getSource(), props);
+ connection.commit();
+ } catch (SQLException e) {
+ throw new ProjectManagerException(
+ "Error uploading project property files", e);
+ } catch (IOException e) {
+ throw new ProjectManagerException(
+ "Error uploading project property file", e);
+ } finally {
+ DbUtils.closeQuietly(connection);
+ }
+ }
+
+ @Override
+ public void updateProjectProperty(Project project, Props props)
+ throws ProjectManagerException {
+ Connection connection = getConnection();
+ try {
+ updateProjectProperty(connection, project, props.getSource(), props);
+ connection.commit();
+ } catch (SQLException e) {
+ throw new ProjectManagerException(
+ "Error uploading project property files", e);
+ } catch (IOException e) {
+ throw new ProjectManagerException(
+ "Error uploading project property file", e);
+ } finally {
+ DbUtils.closeQuietly(connection);
+ }
+ }
+
+ private void updateProjectProperty(Connection connection, Project project,
+ String name, Props props) throws ProjectManagerException, IOException {
+ QueryRunner runner = new QueryRunner();
+ final String UPDATE_PROPERTIES =
+ "UPDATE project_properties SET property=? WHERE project_id=? AND version=? AND name=?";
+
+ String propertyJSON = PropsUtils.toJSONString(props, true);
+ byte[] data = propertyJSON.getBytes("UTF-8");
+ logger.info("UTF-8 size:" + data.length);
+ if (defaultEncodingType == EncodingType.GZIP) {
+ data = GZIPUtils.gzipBytes(data);
+ }
+
+ try {
+ runner.update(connection, UPDATE_PROPERTIES, data, project.getId(),
+ project.getVersion(), name);
+ connection.commit();
+ } catch (SQLException e) {
+ throw new ProjectManagerException("Error updating property "
+ + project.getName() + " version " + project.getVersion(), e);
+ }
+ }
+
+ private void uploadProjectProperty(Connection connection, Project project,
+ String name, Props props) throws ProjectManagerException, IOException {
+ QueryRunner runner = new QueryRunner();
+ final String INSERT_PROPERTIES =
+ "INSERT INTO project_properties (project_id, version, name, modified_time, encoding_type, property) values (?,?,?,?,?,?)";
+
+ String propertyJSON = PropsUtils.toJSONString(props, true);
+ byte[] data = propertyJSON.getBytes("UTF-8");
+ logger.info("UTF-8 size:" + data.length);
+ if (defaultEncodingType == EncodingType.GZIP) {
+ data = GZIPUtils.gzipBytes(data);
+ }
+
+ try {
+ runner.update(connection, INSERT_PROPERTIES, project.getId(),
+ project.getVersion(), name, System.currentTimeMillis(),
+ defaultEncodingType.getNumVal(), data);
+ connection.commit();
+ } catch (SQLException e) {
+ throw new ProjectManagerException("Error uploading project properties "
+ + name + " into " + project.getName() + " version "
+ + project.getVersion(), e);
+ }
+ }
+
+ @Override
+ public Props fetchProjectProperty(int projectId, int projectVer,
+ String propsName) throws ProjectManagerException {
+ QueryRunner runner = createQueryRunner();
+
+ ProjectPropertiesResultsHandler handler =
+ new ProjectPropertiesResultsHandler();
+ try {
+ List<Pair<String, Props>> properties =
+ runner.query(ProjectPropertiesResultsHandler.SELECT_PROJECT_PROPERTY,
+ handler, projectId, projectVer, propsName);
+
+ if (properties == null || properties.isEmpty()) {
+ return null;
+ }
+
+ return properties.get(0).getSecond();
+ } catch (SQLException e) {
+ throw new ProjectManagerException("Error fetching property " + propsName,
+ e);
+ }
+ }
+
+ @Override
+ public Props fetchProjectProperty(Project project, String propsName)
+ throws ProjectManagerException {
+ QueryRunner runner = createQueryRunner();
+
+ ProjectPropertiesResultsHandler handler =
+ new ProjectPropertiesResultsHandler();
+ try {
+ List<Pair<String, Props>> properties =
+ runner.query(ProjectPropertiesResultsHandler.SELECT_PROJECT_PROPERTY,
+ handler, project.getId(), project.getVersion(), propsName);
+
+ if (properties == null || properties.isEmpty()) {
+ return null;
+ }
+
+ return properties.get(0).getSecond();
+ } catch (SQLException e) {
+ throw new ProjectManagerException("Error fetching property " + propsName,
+ e);
+ }
+ }
+
+ @Override
+ public void cleanOlderProjectVersion(int projectId, int version)
+ throws ProjectManagerException {
+ Connection connection = getConnection();
+
+ try {
+ cleanOlderProjectVersionFlows(connection, projectId, version);
+ cleanOlderProjectVersionProperties(connection, projectId, version);
+ cleanOlderProjectFiles(connection, projectId, version);
+ cleanOlderProjectVersion(connection, projectId, version);
+ } finally {
+ DbUtils.closeQuietly(connection);
+ }
+ }
+
+ private void cleanOlderProjectVersionFlows(Connection connection,
+ int projectId, int version) throws ProjectManagerException {
+ final String DELETE_FLOW =
+ "DELETE FROM project_flows WHERE project_id=? AND version<?";
+ QueryRunner runner = new QueryRunner();
+ try {
+ runner.update(connection, DELETE_FLOW, projectId, version);
+ connection.commit();
+ } catch (SQLException e) {
+ throw new ProjectManagerException("Error deleting project version flows "
+ + projectId + ":" + version, e);
+ }
+ }
+
+ private void cleanOlderProjectVersionProperties(Connection connection,
+ int projectId, int version) throws ProjectManagerException {
+ final String DELETE_PROPERTIES =
+ "DELETE FROM project_properties WHERE project_id=? AND version<?";
+ QueryRunner runner = new QueryRunner();
+ try {
+ runner.update(connection, DELETE_PROPERTIES, projectId, version);
+ connection.commit();
+ } catch (SQLException e) {
+ throw new ProjectManagerException(
+ "Error deleting project version properties " + projectId + ":"
+ + version, e);
+ }
+ }
+
+ private void cleanOlderProjectFiles(Connection connection, int projectId,
+ int version) throws ProjectManagerException {
+ final String DELETE_PROJECT_FILES =
+ "DELETE FROM project_files WHERE project_id=? AND version<?";
+ QueryRunner runner = new QueryRunner();
+ try {
+ runner.update(connection, DELETE_PROJECT_FILES, projectId, version);
+ connection.commit();
+ } catch (SQLException e) {
+ throw new ProjectManagerException("Error deleting project version files "
+ + projectId + ":" + version, e);
+ }
+ }
+
+ private void cleanOlderProjectVersion(Connection connection, int projectId,
+ int version) throws ProjectManagerException {
+ final String UPDATE_PROJECT_VERSIONS =
+ "UPDATE project_versions SET num_chunks=0 WHERE project_id=? AND version<?";
+ QueryRunner runner = new QueryRunner();
+ try {
+ runner.update(connection, UPDATE_PROJECT_VERSIONS, projectId, version);
+ connection.commit();
+ } catch (SQLException e) {
+ throw new ProjectManagerException(
+ "Error updating project version chunksize " + projectId + ":"
+ + version, e);
+ }
+ }
+
+ @Override
+ public Map<String, Props> fetchProjectProperties(int projectId, int version)
+ throws ProjectManagerException {
+ QueryRunner runner = createQueryRunner();
+
+ ProjectPropertiesResultsHandler handler =
+ new ProjectPropertiesResultsHandler();
+ try {
+ List<Pair<String, Props>> properties =
+ runner.query(
+ ProjectPropertiesResultsHandler.SELECT_PROJECT_PROPERTIES,
+ handler, projectId, version);
+
+ if (properties == null || properties.isEmpty()) {
+ return null;
+ }
+
+ HashMap<String, Props> props = new HashMap<String, Props>();
+ for (Pair<String, Props> pair : properties) {
+ props.put(pair.getFirst(), pair.getSecond());
+ }
+ return props;
+ } catch (SQLException e) {
+ throw new ProjectManagerException("Error fetching properties", e);
+ }
+ }
+
+ private static class ProjectResultHandler implements
+ ResultSetHandler<List<Project>> {
+ private static String SELECT_PROJECT_BY_ID =
+ "SELECT id, name, active, modified_time, create_time, version, last_modified_by, description, enc_type, settings_blob FROM projects WHERE id=?";
+
+ private static String SELECT_ALL_ACTIVE_PROJECTS =
+ "SELECT id, name, active, modified_time, create_time, version, last_modified_by, description, enc_type, settings_blob FROM projects WHERE active=true";
+
+ private static String SELECT_ACTIVE_PROJECT_BY_NAME =
+ "SELECT id, name, active, modified_time, create_time, version, last_modified_by, description, enc_type, settings_blob FROM projects WHERE name=? AND active=true";
+
+ @Override
+ public List<Project> handle(ResultSet rs) throws SQLException {
+ if (!rs.next()) {
+ return Collections.<Project> emptyList();
+ }
+
+ ArrayList<Project> projects = new ArrayList<Project>();
+ do {
+ int id = rs.getInt(1);
+ String name = rs.getString(2);
+ boolean active = rs.getBoolean(3);
+ long modifiedTime = rs.getLong(4);
+ long createTime = rs.getLong(5);
+ int version = rs.getInt(6);
+ String lastModifiedBy = rs.getString(7);
+ String description = rs.getString(8);
+ int encodingType = rs.getInt(9);
+ byte[] data = rs.getBytes(10);
+
+ Project project;
+ if (data != null) {
+ EncodingType encType = EncodingType.fromInteger(encodingType);
+ Object blobObj;
+ try {
+ // Convoluted way to inflate strings. Should find common package or
+ // helper function.
+ if (encType == EncodingType.GZIP) {
+ // Decompress the sucker.
+ String jsonString = GZIPUtils.unGzipString(data, "UTF-8");
+ blobObj = JSONUtils.parseJSONFromString(jsonString);
+ } else {
+ String jsonString = new String(data, "UTF-8");
+ blobObj = JSONUtils.parseJSONFromString(jsonString);
+ }
+ project = Project.projectFromObject(blobObj);
+ } catch (IOException e) {
+ throw new SQLException("Failed to get project.", e);
+ }
+ } else {
+ project = new Project(id, name);
+ }
+
+ // update the fields as they may have changed
+
+ project.setActive(active);
+ project.setLastModifiedTimestamp(modifiedTime);
+ project.setCreateTimestamp(createTime);
+ project.setVersion(version);
+ project.setLastModifiedUser(lastModifiedBy);
+ project.setDescription(description);
+
+ projects.add(project);
+ } while (rs.next());
+
+ return projects;
+ }
+ }
+
+ private static class ProjectPermissionsResultHandler implements
+ ResultSetHandler<List<Triple<String, Boolean, Permission>>> {
+ private static String SELECT_PROJECT_PERMISSION =
+ "SELECT project_id, modified_time, name, permissions, isGroup FROM project_permissions WHERE project_id=?";
+
+ @Override
+ public List<Triple<String, Boolean, Permission>> handle(ResultSet rs)
+ throws SQLException {
+ if (!rs.next()) {
+ return Collections.<Triple<String, Boolean, Permission>> emptyList();
+ }
+
+ ArrayList<Triple<String, Boolean, Permission>> permissions =
+ new ArrayList<Triple<String, Boolean, Permission>>();
+ do {
+ // int project_id = rs.getInt(1);
+ // long modifiedTime = rs.getLong(2);
+ String username = rs.getString(3);
+ int permissionFlag = rs.getInt(4);
+ boolean val = rs.getBoolean(5);
+
+ Permission perm = new Permission(permissionFlag);
+ permissions.add(new Triple<String, Boolean, Permission>(username, val,
+ perm));
+ } while (rs.next());
+
+ return permissions;
+ }
+ }
+
+ private static class ProjectFlowsResultHandler implements
+ ResultSetHandler<List<Flow>> {
+ private static String SELECT_PROJECT_FLOW =
+ "SELECT project_id, version, flow_id, modified_time, encoding_type, json FROM project_flows WHERE project_id=? AND version=? AND flow_id=?";
+
+ private static String SELECT_ALL_PROJECT_FLOWS =
+ "SELECT project_id, version, flow_id, modified_time, encoding_type, json FROM project_flows WHERE project_id=? AND version=?";
+
+ @Override
+ public List<Flow> handle(ResultSet rs) throws SQLException {
+ if (!rs.next()) {
+ return Collections.<Flow> emptyList();
+ }
+
+ ArrayList<Flow> flows = new ArrayList<Flow>();
+ do {
+ // int projectId = rs.getInt(1);
+ // int version = rs.getInt(2);
+ String flowId = rs.getString(3);
+ // long modifiedTime = rs.getLong(4);
+ int encodingType = rs.getInt(5);
+ byte[] dataBytes = rs.getBytes(6);
+
+ if (dataBytes == null) {
+ continue;
+ }
+
+ EncodingType encType = EncodingType.fromInteger(encodingType);
+
+ Object flowObj = null;
+ try {
+ // Convoluted way to inflate strings. Should find common package or
+ // helper function.
+ if (encType == EncodingType.GZIP) {
+ // Decompress the sucker.
+ String jsonString = GZIPUtils.unGzipString(dataBytes, "UTF-8");
+ flowObj = JSONUtils.parseJSONFromString(jsonString);
+ } else {
+ String jsonString = new String(dataBytes, "UTF-8");
+ flowObj = JSONUtils.parseJSONFromString(jsonString);
+ }
+
+ Flow flow = Flow.flowFromObject(flowObj);
+ flows.add(flow);
+ } catch (IOException e) {
+ throw new SQLException("Error retrieving flow data " + flowId, e);
+ }
+
+ } while (rs.next());
+
+ return flows;
+ }
+ }
+
+ private static class ProjectPropertiesResultsHandler implements
+ ResultSetHandler<List<Pair<String, Props>>> {
+ private static String SELECT_PROJECT_PROPERTY =
+ "SELECT project_id, version, name, modified_time, encoding_type, property FROM project_properties WHERE project_id=? AND version=? AND name=?";
+
+ private static String SELECT_PROJECT_PROPERTIES =
+ "SELECT project_id, version, name, modified_time, encoding_type, property FROM project_properties WHERE project_id=? AND version=?";
+
+ @Override
+ public List<Pair<String, Props>> handle(ResultSet rs) throws SQLException {
+ if (!rs.next()) {
+ return Collections.<Pair<String, Props>> emptyList();
+ }
+
+ List<Pair<String, Props>> properties =
+ new ArrayList<Pair<String, Props>>();
+ do {
+ // int projectId = rs.getInt(1);
+ // int version = rs.getInt(2);
+ String name = rs.getString(3);
+ // long modifiedTime = rs.getLong(4);
+ int eventType = rs.getInt(5);
+ byte[] dataBytes = rs.getBytes(6);
+
+ EncodingType encType = EncodingType.fromInteger(eventType);
+ String propertyString = null;
+
+ try {
+ if (encType == EncodingType.GZIP) {
+ // Decompress the sucker.
+ propertyString = GZIPUtils.unGzipString(dataBytes, "UTF-8");
+ } else {
+ propertyString = new String(dataBytes, "UTF-8");
+ }
+
+ Props props = PropsUtils.fromJSONString(propertyString);
+ props.setSource(name);
+ properties.add(new Pair<String, Props>(name, props));
+ } catch (IOException e) {
+ throw new SQLException(e);
+ }
+ } while (rs.next());
+
+ return properties;
+ }
+ }
+
+ private static class ProjectLogsResultHandler implements
+ ResultSetHandler<List<ProjectLogEvent>> {
+ private static String SELECT_PROJECT_EVENTS_ORDER =
+ "SELECT project_id, event_type, event_time, username, message FROM project_events WHERE project_id=? ORDER BY event_time DESC LIMIT ? OFFSET ?";
+
+ @Override
+ public List<ProjectLogEvent> handle(ResultSet rs) throws SQLException {
+ if (!rs.next()) {
+ return Collections.<ProjectLogEvent> emptyList();
+ }
+
+ ArrayList<ProjectLogEvent> events = new ArrayList<ProjectLogEvent>();
+ do {
+ int projectId = rs.getInt(1);
+ int eventType = rs.getInt(2);
+ long eventTime = rs.getLong(3);
+ String username = rs.getString(4);
+ String message = rs.getString(5);
+
+ ProjectLogEvent event =
+ new ProjectLogEvent(projectId, EventType.fromInteger(eventType),
+ eventTime, username, message);
+ events.add(event);
+ } while (rs.next());
+
+ return events;
+ }
+ }
+
+ private static class ProjectFileChunkResultHandler implements
+ ResultSetHandler<List<byte[]>> {
+ private static String SELECT_PROJECT_CHUNKS_FILE =
+ "SELECT project_id, version, chunk, size, file FROM project_files WHERE project_id=? AND version=? AND chunk >= ? AND chunk < ? ORDER BY chunk ASC";
+
+ @Override
+ public List<byte[]> handle(ResultSet rs) throws SQLException {
+ if (!rs.next()) {
+ return Collections.<byte[]> emptyList();
+ }
+
+ ArrayList<byte[]> data = new ArrayList<byte[]>();
+ do {
+ // int project_id = rs.getInt(1);
+ // int version = rs.getInt(2);
+ // int chunk = rs.getInt(3);
+ // int size = rs.getInt(4);
+ byte[] bytes = rs.getBytes(5);
+
+ data.add(bytes);
+ } while (rs.next());
+
+ return data;
+ }
+
+ }
+
+ private static class ProjectVersionResultHandler implements
+ ResultSetHandler<List<ProjectFileHandler>> {
+ private static String SELECT_PROJECT_VERSION =
+ "SELECT project_id, version, upload_time, uploader, file_type, file_name, md5, num_chunks FROM project_versions WHERE project_id=? AND version=?";
+
+ // private static String SELECT_ALL_PER_PROJECT =
+ // "SELECT project_id, version, upload_time, uploader, file_type, file_name, md5, num_chunks FROM project_versions WHERE project_id=?";
+
+ @Override
+ public List<ProjectFileHandler> handle(ResultSet rs) throws SQLException {
+ if (!rs.next()) {
+ return null;
+ }
+
+ List<ProjectFileHandler> handlers = new ArrayList<ProjectFileHandler>();
+ do {
+ int projectId = rs.getInt(1);
+ int version = rs.getInt(2);
+ long uploadTime = rs.getLong(3);
+ String uploader = rs.getString(4);
+ String fileType = rs.getString(5);
+ String fileName = rs.getString(6);
+ byte[] md5 = rs.getBytes(7);
+ int numChunks = rs.getInt(8);
+
+ ProjectFileHandler handler =
+ new ProjectFileHandler(projectId, version, uploadTime, uploader,
+ fileType, fileName, numChunks, md5);
+
+ handlers.add(handler);
+ } while (rs.next());
+
+ return handlers;
+ }
+ }
+
+ private static class IntHander implements ResultSetHandler<Integer> {
+ private static String SELECT_LATEST_VERSION =
+ "SELECT MAX(version) FROM project_versions WHERE project_id=?";
+
+ @Override
+ public Integer handle(ResultSet rs) throws SQLException {
+ if (!rs.next()) {
+ return 0;
+ }
+
+ return rs.getInt(1);
+ }
+ }
+
+ private Connection getConnection() throws ProjectManagerException {
+ Connection connection = null;
+ try {
+ connection = super.getDBConnection(false);
+ } catch (Exception e) {
+ DbUtils.closeQuietly(connection);
+ throw new ProjectManagerException("Error getting DB connection.", e);
+ }
+
+ return connection;
+ }
}
-
src/main/java/azkaban/project/Project.java 804(+406 -398)
diff --git a/src/main/java/azkaban/project/Project.java b/src/main/java/azkaban/project/Project.java
index e3e0cde..0a39709 100644
--- a/src/main/java/azkaban/project/Project.java
+++ b/src/main/java/azkaban/project/Project.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -32,399 +32,407 @@ import azkaban.user.User;
import azkaban.utils.Pair;
public class Project {
- private final int id;
- private final String name;
- private boolean active = true;
- private String description;
- private int version = -1;
- private long createTimestamp;
- private long lastModifiedTimestamp;
- private String lastModifiedUser;
- private String source;
- private LinkedHashMap<String, Permission> userPermissionMap = new LinkedHashMap<String, Permission>();
- private LinkedHashMap<String, Permission> groupPermissionMap = new LinkedHashMap<String, Permission>();
- private Map<String, Flow> flows = null;
- private HashSet<String> proxyUsers = new HashSet<String>();
- private Map<String, Object> metadata = new HashMap<String, Object>();
-
- public Project(int id, String name) {
- this.id = id;
- this.name = name;
- }
-
- public String getName() {
- return name;
- }
-
- public void setFlows(Map<String, Flow> flows) {
- this.flows = flows;
- }
-
- public Flow getFlow(String flowId) {
- if (flows == null) {
- return null;
- }
-
- return flows.get(flowId);
- }
-
- public Map<String, Flow> getFlowMap() {
- return flows;
- }
-
- public List<Flow> getFlows() {
- List<Flow> retFlow = null;
- if (flows != null) {
- retFlow = new ArrayList<Flow>(flows.values());
- }
- else {
- retFlow = new ArrayList<Flow>();
- }
- return retFlow;
- }
-
- public Permission getCollectivePermission(User user) {
- Permission permissions = new Permission();
- Permission perm = userPermissionMap.get(user.getUserId());
- if (perm != null) {
- permissions.addPermissions(perm);
- }
-
- for(String group: user.getGroups()) {
- perm = groupPermissionMap.get(group);
- if (perm != null) {
- permissions.addPermissions(perm);
- }
- }
-
- return permissions;
- }
-
- public Set<String> getProxyUsers() {
- return new HashSet<String>(proxyUsers);
- }
-
- public void addAllProxyUsers(Collection<String> proxyUsers) {
- this.proxyUsers.addAll(proxyUsers);
- }
-
- public boolean hasProxyUser(String proxy) {
- return this.proxyUsers.contains(proxy);
- }
-
- public void addProxyUser(String user) {
- this.proxyUsers.add(user);
- }
-
- public void removeProxyUser(String user) {
- this.proxyUsers.remove(user);
- }
-
- public boolean hasPermission(User user, Type type) {
- Permission perm = userPermissionMap.get(user.getUserId());
- if (perm != null && (perm.isPermissionSet(Type.ADMIN) || perm.isPermissionSet(type))) {
- return true;
- }
-
- return hasGroupPermission(user, type);
- }
-
- public boolean hasUserPermission(User user, Type type) {
- Permission perm = userPermissionMap.get(user.getUserId());
- if (perm == null) {
- // Check group
- return false;
- }
-
- if (perm.isPermissionSet(Type.ADMIN) || perm.isPermissionSet(type)) {
- return true;
- }
-
- return false;
- }
-
- public boolean hasGroupPermission(User user, Type type) {
- for(String group: user.getGroups()) {
- Permission perm = groupPermissionMap.get(group);
- if (perm != null) {
- if (perm.isPermissionSet(Type.ADMIN) || perm.isPermissionSet(type)) {
- return true;
- }
- }
- }
-
- return false;
- }
-
- public List<String> getUsersWithPermission(Type type) {
- ArrayList<String> users = new ArrayList<String>();
- for (Map.Entry<String, Permission> entry : userPermissionMap.entrySet()) {
- Permission perm = entry.getValue();
- if (perm.isPermissionSet(type)) {
- users.add(entry.getKey());
- }
- }
- return users;
- }
-
- public List<Pair<String, Permission>> getUserPermissions() {
- ArrayList<Pair<String, Permission>> permissions = new ArrayList<Pair<String, Permission>>();
-
- for (Map.Entry<String, Permission> entry : userPermissionMap.entrySet()) {
- permissions.add(new Pair<String, Permission>(entry.getKey(), entry.getValue()));
- }
-
- return permissions;
- }
-
- public List<Pair<String, Permission>> getGroupPermissions() {
- ArrayList<Pair<String, Permission>> permissions = new ArrayList<Pair<String, Permission>>();
-
- for (Map.Entry<String, Permission> entry : groupPermissionMap.entrySet()) {
- permissions.add(new Pair<String, Permission>(entry.getKey(), entry.getValue()));
- }
-
- return permissions;
- }
-
- public void setDescription(String description) {
- this.description = description;
- }
-
- public String getDescription() {
- return description;
- }
-
- public void setUserPermission(String userid, Permission perm) {
- userPermissionMap.put(userid, perm);
- }
-
- public void setGroupPermission(String group, Permission perm) {
- groupPermissionMap.put(group, perm);
- }
-
- public Permission getUserPermission(User user) {
- return userPermissionMap.get(user.getUserId());
- }
-
- public Permission getGroupPermission(String group) {
- return groupPermissionMap.get(group);
- }
-
- public Permission getUserPermission(String userID) {
- return userPermissionMap.get(userID);
- }
-
- public void removeGroupPermission(String group) {
- groupPermissionMap.remove(group);
- }
-
- public void removeUserPermission(String userId) {
- userPermissionMap.remove(userId);
- }
-
- public void clearUserPermission() {
- userPermissionMap.clear();
- }
-
- public long getCreateTimestamp() {
- return createTimestamp;
- }
-
- public void setCreateTimestamp(long createTimestamp) {
- this.createTimestamp = createTimestamp;
- }
-
- public long getLastModifiedTimestamp() {
- return lastModifiedTimestamp;
- }
-
- public void setLastModifiedTimestamp(long lastModifiedTimestamp) {
- this.lastModifiedTimestamp = lastModifiedTimestamp;
- }
-
- public Object toObject() {
- HashMap<String, Object> projectObject = new HashMap<String, Object>();
- projectObject.put("id", id);
- projectObject.put("name", name);
- projectObject.put("description", description);
- projectObject.put("createTimestamp", createTimestamp);
- projectObject.put("lastModifiedTimestamp", lastModifiedTimestamp);
- projectObject.put("lastModifiedUser", lastModifiedUser);
- projectObject.put("version", version);
-
- if (!active) {
- projectObject.put("active", false);
- }
-
- if (source != null) {
- projectObject.put("source", source);
- }
-
- if (metadata != null) {
- projectObject.put("metadata", metadata);
- }
-
- ArrayList<String> proxyUserList = new ArrayList<String>(proxyUsers);
- projectObject.put("proxyUsers", proxyUserList);
-
- return projectObject;
- }
-
- @SuppressWarnings("unchecked")
- public static Project projectFromObject(Object object) {
- Map<String, Object> projectObject = (Map<String, Object>) object;
- int id = (Integer) projectObject.get("id");
- String name = (String) projectObject.get("name");
- String description = (String) projectObject.get("description");
- String lastModifiedUser = (String) projectObject.get("lastModifiedUser");
- long createTimestamp = coerceToLong(projectObject.get("createTimestamp"));
- long lastModifiedTimestamp = coerceToLong(projectObject.get("lastModifiedTimestamp"));
- String source = (String)projectObject.get("source");
- Boolean active = (Boolean)projectObject.get("active");
- active = active == null ? true : active;
- int version = (Integer)projectObject.get("version");
- Map<String, Object> metadata = (Map<String, Object>)projectObject.get("metadata");
-
- Project project = new Project(id, name);
- project.setVersion(version);
- project.setDescription(description);
- project.setCreateTimestamp(createTimestamp);
- project.setLastModifiedTimestamp(lastModifiedTimestamp);
- project.setLastModifiedUser(lastModifiedUser);
- project.setActive(active);
-
- if (source != null) {
- project.setSource(source);
- }
- if (metadata != null) {
- project.setMetadata(metadata);
- }
-
- List<String> proxyUserList = (List<String>) projectObject.get("proxyUsers");
- project.addAllProxyUsers(proxyUserList);
-
- return project;
- }
-
- private static long coerceToLong(Object obj) {
- if (obj == null) {
- return 0;
- } else if (obj instanceof Integer) {
- return (Integer) obj;
- }
-
- return (Long) obj;
- }
-
- public String getLastModifiedUser() {
- return lastModifiedUser;
- }
-
- public void setLastModifiedUser(String lastModifiedUser) {
- this.lastModifiedUser = lastModifiedUser;
- }
-
- @Override
- public int hashCode() {
- final int prime = 31;
- int result = 1;
- result = prime * result + (active ? 1231 : 1237);
- result = prime * result
- + (int) (createTimestamp ^ (createTimestamp >>> 32));
- result = prime * result
- + ((description == null) ? 0 : description.hashCode());
- result = prime * result + id;
- result = prime
- * result
- + (int) (lastModifiedTimestamp ^ (lastModifiedTimestamp >>> 32));
- result = prime
- * result
- + ((lastModifiedUser == null) ? 0 : lastModifiedUser.hashCode());
- result = prime * result + ((name == null) ? 0 : name.hashCode());
- result = prime * result + ((source == null) ? 0 : source.hashCode());
- result = prime * result + version;
- return result;
- }
-
- @Override
- public boolean equals(Object obj) {
- if (this == obj)
- return true;
- if (obj == null)
- return false;
- if (getClass() != obj.getClass())
- return false;
- Project other = (Project) obj;
- if (active != other.active)
- return false;
- if (createTimestamp != other.createTimestamp)
- return false;
- if (description == null) {
- if (other.description != null)
- return false;
- } else if (!description.equals(other.description))
- return false;
- if (id != other.id)
- return false;
- if (lastModifiedTimestamp != other.lastModifiedTimestamp)
- return false;
- if (lastModifiedUser == null) {
- if (other.lastModifiedUser != null)
- return false;
- } else if (!lastModifiedUser.equals(other.lastModifiedUser))
- return false;
- if (name == null) {
- if (other.name != null)
- return false;
- } else if (!name.equals(other.name))
- return false;
- if (source == null) {
- if (other.source != null)
- return false;
- } else if (!source.equals(other.source))
- return false;
- if (version != other.version)
- return false;
- return true;
- }
-
- public String getSource() {
- return source;
- }
-
- public void setSource(String source) {
- this.source = source;
- }
-
- public Map<String, Object> getMetadata() {
- if(metadata == null){
- metadata = new HashMap<String, Object>();
- }
- return metadata;
- }
-
- protected void setMetadata(Map<String, Object> metadata) {
- this.metadata = metadata;
- }
-
- public int getId() {
- return id;
- }
-
- public boolean isActive() {
- return active;
- }
-
- public void setActive(boolean active) {
- this.active = active;
- }
-
- public int getVersion() {
- return version;
- }
-
- public void setVersion(int version) {
- this.version = version;
- }
+ private final int id;
+ private final String name;
+ private boolean active = true;
+ private String description;
+ private int version = -1;
+ private long createTimestamp;
+ private long lastModifiedTimestamp;
+ private String lastModifiedUser;
+ private String source;
+ private LinkedHashMap<String, Permission> userPermissionMap =
+ new LinkedHashMap<String, Permission>();
+ private LinkedHashMap<String, Permission> groupPermissionMap =
+ new LinkedHashMap<String, Permission>();
+ private Map<String, Flow> flows = null;
+ private HashSet<String> proxyUsers = new HashSet<String>();
+ private Map<String, Object> metadata = new HashMap<String, Object>();
+
+ public Project(int id, String name) {
+ this.id = id;
+ this.name = name;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setFlows(Map<String, Flow> flows) {
+ this.flows = flows;
+ }
+
+ public Flow getFlow(String flowId) {
+ if (flows == null) {
+ return null;
+ }
+
+ return flows.get(flowId);
+ }
+
+ public Map<String, Flow> getFlowMap() {
+ return flows;
+ }
+
+ public List<Flow> getFlows() {
+ List<Flow> retFlow = null;
+ if (flows != null) {
+ retFlow = new ArrayList<Flow>(flows.values());
+ } else {
+ retFlow = new ArrayList<Flow>();
+ }
+ return retFlow;
+ }
+
+ public Permission getCollectivePermission(User user) {
+ Permission permissions = new Permission();
+ Permission perm = userPermissionMap.get(user.getUserId());
+ if (perm != null) {
+ permissions.addPermissions(perm);
+ }
+
+ for (String group : user.getGroups()) {
+ perm = groupPermissionMap.get(group);
+ if (perm != null) {
+ permissions.addPermissions(perm);
+ }
+ }
+
+ return permissions;
+ }
+
+ public Set<String> getProxyUsers() {
+ return new HashSet<String>(proxyUsers);
+ }
+
+ public void addAllProxyUsers(Collection<String> proxyUsers) {
+ this.proxyUsers.addAll(proxyUsers);
+ }
+
+ public boolean hasProxyUser(String proxy) {
+ return this.proxyUsers.contains(proxy);
+ }
+
+ public void addProxyUser(String user) {
+ this.proxyUsers.add(user);
+ }
+
+ public void removeProxyUser(String user) {
+ this.proxyUsers.remove(user);
+ }
+
+ public boolean hasPermission(User user, Type type) {
+ Permission perm = userPermissionMap.get(user.getUserId());
+ if (perm != null
+ && (perm.isPermissionSet(Type.ADMIN) || perm.isPermissionSet(type))) {
+ return true;
+ }
+
+ return hasGroupPermission(user, type);
+ }
+
+ public boolean hasUserPermission(User user, Type type) {
+ Permission perm = userPermissionMap.get(user.getUserId());
+ if (perm == null) {
+ // Check group
+ return false;
+ }
+
+ if (perm.isPermissionSet(Type.ADMIN) || perm.isPermissionSet(type)) {
+ return true;
+ }
+
+ return false;
+ }
+
+ public boolean hasGroupPermission(User user, Type type) {
+ for (String group : user.getGroups()) {
+ Permission perm = groupPermissionMap.get(group);
+ if (perm != null) {
+ if (perm.isPermissionSet(Type.ADMIN) || perm.isPermissionSet(type)) {
+ return true;
+ }
+ }
+ }
+
+ return false;
+ }
+
+ public List<String> getUsersWithPermission(Type type) {
+ ArrayList<String> users = new ArrayList<String>();
+ for (Map.Entry<String, Permission> entry : userPermissionMap.entrySet()) {
+ Permission perm = entry.getValue();
+ if (perm.isPermissionSet(type)) {
+ users.add(entry.getKey());
+ }
+ }
+ return users;
+ }
+
+ public List<Pair<String, Permission>> getUserPermissions() {
+ ArrayList<Pair<String, Permission>> permissions =
+ new ArrayList<Pair<String, Permission>>();
+
+ for (Map.Entry<String, Permission> entry : userPermissionMap.entrySet()) {
+ permissions.add(new Pair<String, Permission>(entry.getKey(), entry
+ .getValue()));
+ }
+
+ return permissions;
+ }
+
+ public List<Pair<String, Permission>> getGroupPermissions() {
+ ArrayList<Pair<String, Permission>> permissions =
+ new ArrayList<Pair<String, Permission>>();
+
+ for (Map.Entry<String, Permission> entry : groupPermissionMap.entrySet()) {
+ permissions.add(new Pair<String, Permission>(entry.getKey(), entry
+ .getValue()));
+ }
+
+ return permissions;
+ }
+
+ public void setDescription(String description) {
+ this.description = description;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public void setUserPermission(String userid, Permission perm) {
+ userPermissionMap.put(userid, perm);
+ }
+
+ public void setGroupPermission(String group, Permission perm) {
+ groupPermissionMap.put(group, perm);
+ }
+
+ public Permission getUserPermission(User user) {
+ return userPermissionMap.get(user.getUserId());
+ }
+
+ public Permission getGroupPermission(String group) {
+ return groupPermissionMap.get(group);
+ }
+
+ public Permission getUserPermission(String userID) {
+ return userPermissionMap.get(userID);
+ }
+
+ public void removeGroupPermission(String group) {
+ groupPermissionMap.remove(group);
+ }
+
+ public void removeUserPermission(String userId) {
+ userPermissionMap.remove(userId);
+ }
+
+ public void clearUserPermission() {
+ userPermissionMap.clear();
+ }
+
+ public long getCreateTimestamp() {
+ return createTimestamp;
+ }
+
+ public void setCreateTimestamp(long createTimestamp) {
+ this.createTimestamp = createTimestamp;
+ }
+
+ public long getLastModifiedTimestamp() {
+ return lastModifiedTimestamp;
+ }
+
+ public void setLastModifiedTimestamp(long lastModifiedTimestamp) {
+ this.lastModifiedTimestamp = lastModifiedTimestamp;
+ }
+
+ public Object toObject() {
+ HashMap<String, Object> projectObject = new HashMap<String, Object>();
+ projectObject.put("id", id);
+ projectObject.put("name", name);
+ projectObject.put("description", description);
+ projectObject.put("createTimestamp", createTimestamp);
+ projectObject.put("lastModifiedTimestamp", lastModifiedTimestamp);
+ projectObject.put("lastModifiedUser", lastModifiedUser);
+ projectObject.put("version", version);
+
+ if (!active) {
+ projectObject.put("active", false);
+ }
+
+ if (source != null) {
+ projectObject.put("source", source);
+ }
+
+ if (metadata != null) {
+ projectObject.put("metadata", metadata);
+ }
+
+ ArrayList<String> proxyUserList = new ArrayList<String>(proxyUsers);
+ projectObject.put("proxyUsers", proxyUserList);
+
+ return projectObject;
+ }
+
+ @SuppressWarnings("unchecked")
+ public static Project projectFromObject(Object object) {
+ Map<String, Object> projectObject = (Map<String, Object>) object;
+ int id = (Integer) projectObject.get("id");
+ String name = (String) projectObject.get("name");
+ String description = (String) projectObject.get("description");
+ String lastModifiedUser = (String) projectObject.get("lastModifiedUser");
+ long createTimestamp = coerceToLong(projectObject.get("createTimestamp"));
+ long lastModifiedTimestamp =
+ coerceToLong(projectObject.get("lastModifiedTimestamp"));
+ String source = (String) projectObject.get("source");
+ Boolean active = (Boolean) projectObject.get("active");
+ active = active == null ? true : active;
+ int version = (Integer) projectObject.get("version");
+ Map<String, Object> metadata =
+ (Map<String, Object>) projectObject.get("metadata");
+
+ Project project = new Project(id, name);
+ project.setVersion(version);
+ project.setDescription(description);
+ project.setCreateTimestamp(createTimestamp);
+ project.setLastModifiedTimestamp(lastModifiedTimestamp);
+ project.setLastModifiedUser(lastModifiedUser);
+ project.setActive(active);
+
+ if (source != null) {
+ project.setSource(source);
+ }
+ if (metadata != null) {
+ project.setMetadata(metadata);
+ }
+
+ List<String> proxyUserList = (List<String>) projectObject.get("proxyUsers");
+ project.addAllProxyUsers(proxyUserList);
+
+ return project;
+ }
+
+ private static long coerceToLong(Object obj) {
+ if (obj == null) {
+ return 0;
+ } else if (obj instanceof Integer) {
+ return (Integer) obj;
+ }
+
+ return (Long) obj;
+ }
+
+ public String getLastModifiedUser() {
+ return lastModifiedUser;
+ }
+
+ public void setLastModifiedUser(String lastModifiedUser) {
+ this.lastModifiedUser = lastModifiedUser;
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + (active ? 1231 : 1237);
+ result =
+ prime * result + (int) (createTimestamp ^ (createTimestamp >>> 32));
+ result =
+ prime * result + ((description == null) ? 0 : description.hashCode());
+ result = prime * result + id;
+ result =
+ prime * result
+ + (int) (lastModifiedTimestamp ^ (lastModifiedTimestamp >>> 32));
+ result =
+ prime * result
+ + ((lastModifiedUser == null) ? 0 : lastModifiedUser.hashCode());
+ result = prime * result + ((name == null) ? 0 : name.hashCode());
+ result = prime * result + ((source == null) ? 0 : source.hashCode());
+ result = prime * result + version;
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj)
+ return true;
+ if (obj == null)
+ return false;
+ if (getClass() != obj.getClass())
+ return false;
+ Project other = (Project) obj;
+ if (active != other.active)
+ return false;
+ if (createTimestamp != other.createTimestamp)
+ return false;
+ if (description == null) {
+ if (other.description != null)
+ return false;
+ } else if (!description.equals(other.description))
+ return false;
+ if (id != other.id)
+ return false;
+ if (lastModifiedTimestamp != other.lastModifiedTimestamp)
+ return false;
+ if (lastModifiedUser == null) {
+ if (other.lastModifiedUser != null)
+ return false;
+ } else if (!lastModifiedUser.equals(other.lastModifiedUser))
+ return false;
+ if (name == null) {
+ if (other.name != null)
+ return false;
+ } else if (!name.equals(other.name))
+ return false;
+ if (source == null) {
+ if (other.source != null)
+ return false;
+ } else if (!source.equals(other.source))
+ return false;
+ if (version != other.version)
+ return false;
+ return true;
+ }
+
+ public String getSource() {
+ return source;
+ }
+
+ public void setSource(String source) {
+ this.source = source;
+ }
+
+ public Map<String, Object> getMetadata() {
+ if (metadata == null) {
+ metadata = new HashMap<String, Object>();
+ }
+ return metadata;
+ }
+
+ protected void setMetadata(Map<String, Object> metadata) {
+ this.metadata = metadata;
+ }
+
+ public int getId() {
+ return id;
+ }
+
+ public boolean isActive() {
+ return active;
+ }
+
+ public void setActive(boolean active) {
+ this.active = active;
+ }
+
+ public int getVersion() {
+ return version;
+ }
+
+ public void setVersion(int version) {
+ this.version = version;
+ }
}
src/main/java/azkaban/project/ProjectFileHandler.java 196(+94 -102)
diff --git a/src/main/java/azkaban/project/ProjectFileHandler.java b/src/main/java/azkaban/project/ProjectFileHandler.java
index 648c6f0..9412bee 100644
--- a/src/main/java/azkaban/project/ProjectFileHandler.java
+++ b/src/main/java/azkaban/project/ProjectFileHandler.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -19,104 +19,96 @@ package azkaban.project;
import java.io.File;
public class ProjectFileHandler {
- private final int projectId;
- private final int version;
- private final long uploadTime;
- private String fileType;
- private String fileName;
- private String uploader;
- private byte[] md5Hash;
- private int numChunks;
- private File localFile = null;
-
- public ProjectFileHandler(
- int projectId,
- int version,
- long uploadTime,
- String uploader,
- String fileType,
- String fileName,
- int numChunks,
- byte[] md5Hash)
- {
- this.projectId = projectId;
- this.version = version;
- this.uploadTime = uploadTime;
- this.setUploader(uploader);
- this.setFileType(fileType);
- this.setFileName(fileName);
- this.setMd5Hash(md5Hash);
- this.setNumChunks(numChunks);
- }
-
- public int getProjectId() {
- return projectId;
- }
-
- public int getVersion() {
- return version;
- }
-
- public long getUploadTime() {
- return uploadTime;
- }
-
- public String getFileType() {
- return fileType;
- }
-
- public void setFileType(String fileType) {
- this.fileType = fileType;
- }
-
- public String getFileName() {
- return fileName;
- }
-
- public void setFileName(String fileName) {
- this.fileName = fileName;
- }
-
- public byte[] getMd5Hash() {
- return md5Hash;
- }
-
- public void setMd5Hash(byte[] md5Hash) {
- this.md5Hash = md5Hash;
- }
-
- public File getLocalFile() {
- return localFile;
- }
-
- public synchronized void setLocalFile(File localFile) {
- this.localFile = localFile;
- }
-
- public synchronized void deleteLocalFile() {
- if (localFile == null) {
- return;
- }
- else {
- localFile.delete();
- localFile = null;
- }
- }
-
- public String getUploader() {
- return uploader;
- }
-
- public void setUploader(String uploader) {
- this.uploader = uploader;
- }
-
- public int getNumChunks() {
- return numChunks;
- }
-
- public void setNumChunks(int numChunks) {
- this.numChunks = numChunks;
- }
+ private final int projectId;
+ private final int version;
+ private final long uploadTime;
+ private String fileType;
+ private String fileName;
+ private String uploader;
+ private byte[] md5Hash;
+ private int numChunks;
+ private File localFile = null;
+
+ public ProjectFileHandler(int projectId, int version, long uploadTime,
+ String uploader, String fileType, String fileName, int numChunks,
+ byte[] md5Hash) {
+ this.projectId = projectId;
+ this.version = version;
+ this.uploadTime = uploadTime;
+ this.setUploader(uploader);
+ this.setFileType(fileType);
+ this.setFileName(fileName);
+ this.setMd5Hash(md5Hash);
+ this.setNumChunks(numChunks);
+ }
+
+ public int getProjectId() {
+ return projectId;
+ }
+
+ public int getVersion() {
+ return version;
+ }
+
+ public long getUploadTime() {
+ return uploadTime;
+ }
+
+ public String getFileType() {
+ return fileType;
+ }
+
+ public void setFileType(String fileType) {
+ this.fileType = fileType;
+ }
+
+ public String getFileName() {
+ return fileName;
+ }
+
+ public void setFileName(String fileName) {
+ this.fileName = fileName;
+ }
+
+ public byte[] getMd5Hash() {
+ return md5Hash;
+ }
+
+ public void setMd5Hash(byte[] md5Hash) {
+ this.md5Hash = md5Hash;
+ }
+
+ public File getLocalFile() {
+ return localFile;
+ }
+
+ public synchronized void setLocalFile(File localFile) {
+ this.localFile = localFile;
+ }
+
+ public synchronized void deleteLocalFile() {
+ if (localFile == null) {
+ return;
+ } else {
+ localFile.delete();
+ localFile = null;
+ }
+ }
+
+ public String getUploader() {
+ return uploader;
+ }
+
+ public void setUploader(String uploader) {
+ this.uploader = uploader;
+ }
+
+ public int getNumChunks() {
+ return numChunks;
+ }
+
+ public void setNumChunks(int numChunks) {
+ this.numChunks = numChunks;
+ }
}
src/main/java/azkaban/project/ProjectLoader.java 459(+243 -216)
diff --git a/src/main/java/azkaban/project/ProjectLoader.java b/src/main/java/azkaban/project/ProjectLoader.java
index 986ba55..1251903 100644
--- a/src/main/java/azkaban/project/ProjectLoader.java
+++ b/src/main/java/azkaban/project/ProjectLoader.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -30,217 +30,244 @@ import azkaban.utils.Triple;
public interface ProjectLoader {
- /**
- * Returns all projects which are active
- *
- * @return
- * @throws ProjectManagerException
- */
- public List<Project> fetchAllActiveProjects() throws ProjectManagerException;
-
- /**
- * Loads whole project, including permissions, by the project id.
- *
- * @param id
- * @return
- * @throws ProjectManagerException
- */
- public Project fetchProjectById(int id) throws ProjectManagerException;
-
- /**
- * Should create an empty project with the given name and user and adds it
- * to the data store. It will auto assign a unique id for this project if
- * successful.
- *
- * If an active project of the same name exists, it will throw an exception.
- * If the name and description of the project exceeds the store's
- * constraints, it will throw an exception.
- *
- * @param name
- * @return
- * @throws ProjectManagerException if an active project of the same name exists.
- */
- public Project createNewProject(String name, String description, User creator) throws ProjectManagerException;
-
- /**
- * Removes the project by marking it inactive.
- *
- * @param project
- * @throws ProjectManagerException
- */
- public void removeProject(Project project, String user) throws ProjectManagerException;
-
- /**
- * Adds and updates the user permissions. Does not check if the user is valid.
- * If the permission doesn't exist, it adds. If the permission exists, it updates.
- *
- * @param project
- * @param name
- * @param perm
- * @param isGroup
- * @throws ProjectManagerException
- */
- public void updatePermission(Project project, String name, Permission perm, boolean isGroup) throws ProjectManagerException;
-
- public void removePermission(Project project, String name, boolean isGroup) throws ProjectManagerException;
-
- /**
- * Modifies and commits the project description.
- *
- * @param project
- * @param description
- * @throws ProjectManagerException
- */
- public void updateDescription(Project project, String description, String user) throws ProjectManagerException;
-
- /**
- * Stores logs for a particular project. Will soft fail rather than throw exception.
- *
- * @param project
- * @param type
- * @param message
- * return true if the posting was success.
- */
- public boolean postEvent(Project project, EventType type, String user, String message);
-
- /**
- * Returns all the events for a project sorted
- *
- * @param project
- * @return
- */
- public List<ProjectLogEvent> getProjectEvents(Project project, int num, int skip) throws ProjectManagerException;
-
- /**
- * Will upload the files and return the version number of the file uploaded.
- */
- public void uploadProjectFile(Project project, int version, String filetype, String filename, File localFile, String user) throws ProjectManagerException;
-
- /**
- * Get file that's uploaded.
- *
- * @return
- */
- public ProjectFileHandler getUploadedFile(Project project, int version) throws ProjectManagerException;
-
- /**
- * Get file that's uploaded.
- *
- * @return
- */
- public ProjectFileHandler getUploadedFile(int projectId, int version) throws ProjectManagerException;
-
- /**
- * Changes and commits different project version.
- *
- * @param project
- * @param version
- * @throws ProjectManagerException
- */
- public void changeProjectVersion(Project project, int version, String user) throws ProjectManagerException;
-
-
- public void updateFlow(Project project, int version, Flow flow) throws ProjectManagerException;
-
- /**
- * Uploads all computed flows
- *
- * @param project
- * @param version
- * @param flows
- * @throws ProjectManagerException
- */
- public void uploadFlows(Project project, int version, Collection<Flow> flows) throws ProjectManagerException;
-
- /**
- * Upload just one flow.
- *
- * @param project
- * @param version
- * @param flow
- * @throws ProjectManagerException
- */
- public void uploadFlow(Project project, int version, Flow flow) throws ProjectManagerException;
-
- /**
- * Fetches one particular flow.
- *
- * @param project
- * @param version
- * @param flowId
- * @throws ProjectManagerException
- */
- public Flow fetchFlow(Project project, String flowId) throws ProjectManagerException;
-
- /**
- * Fetches all flows.
- *
- * @param project
- * @param version
- * @param flowId
- * @throws ProjectManagerException
- */
- public List<Flow> fetchAllProjectFlows(Project project) throws ProjectManagerException;
-
- /**
- * Gets the latest upload version.
- */
- public int getLatestProjectVersion(Project project) throws ProjectManagerException;
-
- /**
- * Upload Project properties
- *
- * @param project
- * @param path
- * @param properties
- * @throws ProjectManagerException
- */
- public void uploadProjectProperty(Project project, Props props) throws ProjectManagerException;
-
- /**
- * Upload Project properties. Map contains key value of path and properties
- *
- * @param project
- * @param path
- * @param properties
- * @throws ProjectManagerException
- */
- public void uploadProjectProperties(Project project, List<Props> properties) throws ProjectManagerException;
-
- /**
- * Fetch project properties
- *
- * @param project
- * @param propsName
- * @return
- * @throws ProjectManagerException
- */
- public Props fetchProjectProperty(Project project, String propsName) throws ProjectManagerException;
-
- /**
- * Fetch all project properties
- *
- * @param project
- * @return
- * @throws ProjectManagerException
- */
- public Map<String,Props> fetchProjectProperties(int projectId, int version) throws ProjectManagerException;
-
- /**
- * Cleans all project versions less tha
- *
- * @param projectId
- * @param version
- * @throws ProjectManagerException
- */
- public void cleanOlderProjectVersion(int projectId, int version) throws ProjectManagerException;
-
- public void updateProjectProperty(Project project, Props props) throws ProjectManagerException;
-
- Props fetchProjectProperty(int projectId, int projectVer, String propsName) throws ProjectManagerException;
-
- List<Triple<String, Boolean, Permission>> getProjectPermissions(int projectId) throws ProjectManagerException;
-
- void updateProjectSettings(Project project) throws ProjectManagerException;
-
+ /**
+ * Returns all projects which are active
+ *
+ * @return
+ * @throws ProjectManagerException
+ */
+ public List<Project> fetchAllActiveProjects() throws ProjectManagerException;
+
+ /**
+ * Loads whole project, including permissions, by the project id.
+ *
+ * @param id
+ * @return
+ * @throws ProjectManagerException
+ */
+ public Project fetchProjectById(int id) throws ProjectManagerException;
+
+ /**
+ * Should create an empty project with the given name and user and adds it to
+ * the data store. It will auto assign a unique id for this project if
+ * successful.
+ *
+ * If an active project of the same name exists, it will throw an exception.
+ * If the name and description of the project exceeds the store's constraints,
+ * it will throw an exception.
+ *
+ * @param name
+ * @return
+ * @throws ProjectManagerException if an active project of the same name
+ * exists.
+ */
+ public Project createNewProject(String name, String description, User creator)
+ throws ProjectManagerException;
+
+ /**
+ * Removes the project by marking it inactive.
+ *
+ * @param project
+ * @throws ProjectManagerException
+ */
+ public void removeProject(Project project, String user)
+ throws ProjectManagerException;
+
+ /**
+ * Adds and updates the user permissions. Does not check if the user is valid.
+ * If the permission doesn't exist, it adds. If the permission exists, it
+ * updates.
+ *
+ * @param project
+ * @param name
+ * @param perm
+ * @param isGroup
+ * @throws ProjectManagerException
+ */
+ public void updatePermission(Project project, String name, Permission perm,
+ boolean isGroup) throws ProjectManagerException;
+
+ public void removePermission(Project project, String name, boolean isGroup)
+ throws ProjectManagerException;
+
+ /**
+ * Modifies and commits the project description.
+ *
+ * @param project
+ * @param description
+ * @throws ProjectManagerException
+ */
+ public void updateDescription(Project project, String description, String user)
+ throws ProjectManagerException;
+
+ /**
+ * Stores logs for a particular project. Will soft fail rather than throw
+ * exception.
+ *
+ * @param project
+ * @param type
+ * @param message return true if the posting was success.
+ */
+ public boolean postEvent(Project project, EventType type, String user,
+ String message);
+
+ /**
+ * Returns all the events for a project sorted
+ *
+ * @param project
+ * @return
+ */
+ public List<ProjectLogEvent> getProjectEvents(Project project, int num,
+ int skip) throws ProjectManagerException;
+
+ /**
+ * Will upload the files and return the version number of the file uploaded.
+ */
+ public void uploadProjectFile(Project project, int version, String filetype,
+ String filename, File localFile, String user)
+ throws ProjectManagerException;
+
+ /**
+ * Get file that's uploaded.
+ *
+ * @return
+ */
+ public ProjectFileHandler getUploadedFile(Project project, int version)
+ throws ProjectManagerException;
+
+ /**
+ * Get file that's uploaded.
+ *
+ * @return
+ */
+ public ProjectFileHandler getUploadedFile(int projectId, int version)
+ throws ProjectManagerException;
+
+ /**
+ * Changes and commits different project version.
+ *
+ * @param project
+ * @param version
+ * @throws ProjectManagerException
+ */
+ public void changeProjectVersion(Project project, int version, String user)
+ throws ProjectManagerException;
+
+ public void updateFlow(Project project, int version, Flow flow)
+ throws ProjectManagerException;
+
+ /**
+ * Uploads all computed flows
+ *
+ * @param project
+ * @param version
+ * @param flows
+ * @throws ProjectManagerException
+ */
+ public void uploadFlows(Project project, int version, Collection<Flow> flows)
+ throws ProjectManagerException;
+
+ /**
+ * Upload just one flow.
+ *
+ * @param project
+ * @param version
+ * @param flow
+ * @throws ProjectManagerException
+ */
+ public void uploadFlow(Project project, int version, Flow flow)
+ throws ProjectManagerException;
+
+ /**
+ * Fetches one particular flow.
+ *
+ * @param project
+ * @param version
+ * @param flowId
+ * @throws ProjectManagerException
+ */
+ public Flow fetchFlow(Project project, String flowId)
+ throws ProjectManagerException;
+
+ /**
+ * Fetches all flows.
+ *
+ * @param project
+ * @param version
+ * @param flowId
+ * @throws ProjectManagerException
+ */
+ public List<Flow> fetchAllProjectFlows(Project project)
+ throws ProjectManagerException;
+
+ /**
+ * Gets the latest upload version.
+ */
+ public int getLatestProjectVersion(Project project)
+ throws ProjectManagerException;
+
+ /**
+ * Upload Project properties
+ *
+ * @param project
+ * @param path
+ * @param properties
+ * @throws ProjectManagerException
+ */
+ public void uploadProjectProperty(Project project, Props props)
+ throws ProjectManagerException;
+
+ /**
+ * Upload Project properties. Map contains key value of path and properties
+ *
+ * @param project
+ * @param path
+ * @param properties
+ * @throws ProjectManagerException
+ */
+ public void uploadProjectProperties(Project project, List<Props> properties)
+ throws ProjectManagerException;
+
+ /**
+ * Fetch project properties
+ *
+ * @param project
+ * @param propsName
+ * @return
+ * @throws ProjectManagerException
+ */
+ public Props fetchProjectProperty(Project project, String propsName)
+ throws ProjectManagerException;
+
+ /**
+ * Fetch all project properties
+ *
+ * @param project
+ * @return
+ * @throws ProjectManagerException
+ */
+ public Map<String, Props> fetchProjectProperties(int projectId, int version)
+ throws ProjectManagerException;
+
+ /**
+ * Cleans all project versions less tha
+ *
+ * @param projectId
+ * @param version
+ * @throws ProjectManagerException
+ */
+ public void cleanOlderProjectVersion(int projectId, int version)
+ throws ProjectManagerException;
+
+ public void updateProjectProperty(Project project, Props props)
+ throws ProjectManagerException;
+
+ Props fetchProjectProperty(int projectId, int projectVer, String propsName)
+ throws ProjectManagerException;
+
+ List<Triple<String, Boolean, Permission>> getProjectPermissions(int projectId)
+ throws ProjectManagerException;
+
+ void updateProjectSettings(Project project) throws ProjectManagerException;
+
}
src/main/java/azkaban/project/ProjectLogEvent.java 163(+82 -81)
diff --git a/src/main/java/azkaban/project/ProjectLogEvent.java b/src/main/java/azkaban/project/ProjectLogEvent.java
index e6c3399..a19d013 100644
--- a/src/main/java/azkaban/project/ProjectLogEvent.java
+++ b/src/main/java/azkaban/project/ProjectLogEvent.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -17,93 +17,94 @@
package azkaban.project;
public class ProjectLogEvent {
- /**
- * Log event type messages. Do not change the numeric representation of each enum.
- *
- * Only represent from 0 to 255 different codes.
- */
- public static enum EventType {
- ERROR(128),
- CREATED(1),
- DELETED(2),
- USER_PERMISSION(3),
- GROUP_PERMISSION(4),
- DESCRIPTION(5),
- UPLOADED(6),
- SCHEDULE(7),
- SLA(8),
- PROXY_USER(9);
+ /**
+ * Log event type messages. Do not change the numeric representation of each enum.
+ *
+ * Only represent from 0 to 255 different codes.
+ */
+ public static enum EventType {
+ ERROR(128),
+ CREATED(1),
+ DELETED(2),
+ USER_PERMISSION(3),
+ GROUP_PERMISSION(4),
+ DESCRIPTION(5),
+ UPLOADED(6),
+ SCHEDULE(7),
+ SLA(8),
+ PROXY_USER(9);
- private int numVal;
+ private int numVal;
- EventType(int numVal) {
- this.numVal = numVal;
- }
+ EventType(int numVal) {
+ this.numVal = numVal;
+ }
- public int getNumVal() {
- return numVal;
- }
+ public int getNumVal() {
+ return numVal;
+ }
- public static EventType fromInteger(int x) {
- switch (x) {
- case 1:
- return CREATED;
- case 2:
- return DELETED;
- case 3:
- return USER_PERMISSION;
- case 4:
- return GROUP_PERMISSION;
- case 5:
- return DESCRIPTION;
- case 6:
- return UPLOADED;
- case 7:
- return SCHEDULE;
- case 8:
- return SLA;
- case 9:
- return PROXY_USER;
- case 128:
- return ERROR;
- default:
- return ERROR;
- }
- }
- }
-
- private final int projectId;
- private final String user;
- private final long time;
- private final EventType type;
- private final String message;
+ public static EventType fromInteger(int x) {
+ switch (x) {
+ case 1:
+ return CREATED;
+ case 2:
+ return DELETED;
+ case 3:
+ return USER_PERMISSION;
+ case 4:
+ return GROUP_PERMISSION;
+ case 5:
+ return DESCRIPTION;
+ case 6:
+ return UPLOADED;
+ case 7:
+ return SCHEDULE;
+ case 8:
+ return SLA;
+ case 9:
+ return PROXY_USER;
+ case 128:
+ return ERROR;
+ default:
+ return ERROR;
+ }
+ }
+ }
- public ProjectLogEvent(int projectId, EventType type, long time, String user, String message) {
- this.projectId = projectId;
- this.user = user;
- this.time = time;
- this.type = type;
- this.message = message;
- }
+ private final int projectId;
+ private final String user;
+ private final long time;
+ private final EventType type;
+ private final String message;
- public int getProjectId() {
- return projectId;
- }
+ public ProjectLogEvent(int projectId, EventType type, long time, String user,
+ String message) {
+ this.projectId = projectId;
+ this.user = user;
+ this.time = time;
+ this.type = type;
+ this.message = message;
+ }
- public String getUser() {
- return user;
- }
+ public int getProjectId() {
+ return projectId;
+ }
- public long getTime() {
- return time;
- }
+ public String getUser() {
+ return user;
+ }
- public EventType getType() {
- return type;
- }
+ public long getTime() {
+ return time;
+ }
- public String getMessage() {
- return message;
- }
+ public EventType getType() {
+ return type;
+ }
+
+ public String getMessage() {
+ return message;
+ }
}
src/main/java/azkaban/project/ProjectManager.java 769(+408 -361)
diff --git a/src/main/java/azkaban/project/ProjectManager.java b/src/main/java/azkaban/project/ProjectManager.java
index be494e4..104228a 100644
--- a/src/main/java/azkaban/project/ProjectManager.java
+++ b/src/main/java/azkaban/project/ProjectManager.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -40,79 +40,85 @@ import azkaban.utils.Props;
import azkaban.utils.Utils;
public class ProjectManager {
- private static final Logger logger = Logger.getLogger(ProjectManager.class);
-
- private ConcurrentHashMap<Integer, Project> projectsById = new ConcurrentHashMap<Integer, Project>();
- private ConcurrentHashMap<String, Project> projectsByName = new ConcurrentHashMap<String, Project>();
- private final ProjectLoader projectLoader;
- private final Props props;
- private final File tempDir;
- private final int projectVersionRetention;
- private final boolean creatorDefaultPermissions;
-
- public ProjectManager(ProjectLoader loader, Props props) {
- this.projectLoader = loader;
- this.props = props;
- this.tempDir = new File(this.props.getString("project.temp.dir", "temp"));
- this.projectVersionRetention = (props.getInt("project.version.retention", 3));
- logger.info("Project version retention is set to " + projectVersionRetention);
-
- this.creatorDefaultPermissions = props.getBoolean("creator.default.proxy", true);
-
- if (!tempDir.exists()) {
- tempDir.mkdirs();
- }
-
- loadAllProjects();
- }
-
- private void loadAllProjects() {
- List<Project> projects;
- try {
- projects = projectLoader.fetchAllActiveProjects();
- } catch (ProjectManagerException e) {
- throw new RuntimeException("Could not load projects from store.", e);
- }
- for (Project proj: projects) {
- projectsByName.put(proj.getName(), proj);
- projectsById.put(proj.getId(), proj);
- }
-
- for (Project proj: projects) {
- loadAllProjectFlows(proj);
- }
- }
-
- private void loadAllProjectFlows(Project project) {
- try {
- List<Flow> flows = projectLoader.fetchAllProjectFlows(project);
- Map<String, Flow> flowMap = new HashMap<String, Flow>();
- for (Flow flow: flows) {
- flowMap.put(flow.getId(), flow);
- }
-
- project.setFlows(flowMap);
- }
- catch (ProjectManagerException e) {
- throw new RuntimeException("Could not load projects flows from store.", e);
- }
- }
-
- public List<String> getProjectNames() {
- return new ArrayList<String>(projectsByName.keySet());
- }
-
- public List<Project> getUserProjects(User user) {
- ArrayList<Project> array = new ArrayList<Project>();
- for (Project project : projectsById.values()) {
- Permission perm = project.getUserPermission(user);
-
- if (perm != null && (perm.isPermissionSet(Type.ADMIN) || perm.isPermissionSet(Type.READ))) {
- array.add(project);
- }
- }
- return array;
- }
+ private static final Logger logger = Logger.getLogger(ProjectManager.class);
+
+ private ConcurrentHashMap<Integer, Project> projectsById =
+ new ConcurrentHashMap<Integer, Project>();
+ private ConcurrentHashMap<String, Project> projectsByName =
+ new ConcurrentHashMap<String, Project>();
+ private final ProjectLoader projectLoader;
+ private final Props props;
+ private final File tempDir;
+ private final int projectVersionRetention;
+ private final boolean creatorDefaultPermissions;
+
+ public ProjectManager(ProjectLoader loader, Props props) {
+ this.projectLoader = loader;
+ this.props = props;
+ this.tempDir = new File(this.props.getString("project.temp.dir", "temp"));
+ this.projectVersionRetention =
+ (props.getInt("project.version.retention", 3));
+ logger.info("Project version retention is set to "
+ + projectVersionRetention);
+
+ this.creatorDefaultPermissions =
+ props.getBoolean("creator.default.proxy", true);
+
+ if (!tempDir.exists()) {
+ tempDir.mkdirs();
+ }
+
+ loadAllProjects();
+ }
+
+ private void loadAllProjects() {
+ List<Project> projects;
+ try {
+ projects = projectLoader.fetchAllActiveProjects();
+ } catch (ProjectManagerException e) {
+ throw new RuntimeException("Could not load projects from store.", e);
+ }
+ for (Project proj : projects) {
+ projectsByName.put(proj.getName(), proj);
+ projectsById.put(proj.getId(), proj);
+ }
+
+ for (Project proj : projects) {
+ loadAllProjectFlows(proj);
+ }
+ }
+
+ private void loadAllProjectFlows(Project project) {
+ try {
+ List<Flow> flows = projectLoader.fetchAllProjectFlows(project);
+ Map<String, Flow> flowMap = new HashMap<String, Flow>();
+ for (Flow flow : flows) {
+ flowMap.put(flow.getId(), flow);
+ }
+
+ project.setFlows(flowMap);
+ } catch (ProjectManagerException e) {
+ throw new RuntimeException("Could not load projects flows from store.", e);
+ }
+ }
+
+ public List<String> getProjectNames() {
+ return new ArrayList<String>(projectsByName.keySet());
+ }
+
+ public List<Project> getUserProjects(User user) {
+ ArrayList<Project> array = new ArrayList<Project>();
+ for (Project project : projectsById.values()) {
+ Permission perm = project.getUserPermission(user);
+
+ if (perm != null
+ && (perm.isPermissionSet(Type.ADMIN) || perm
+ .isPermissionSet(Type.READ))) {
+ array.add(project);
+ }
+ }
+ return array;
+ }
public List<Project> getGroupProjects(User user) {
List<Project> array = new ArrayList<Project>();
@@ -124,291 +130,332 @@ public class ProjectManager {
return array;
}
- public List<Project> getUserProjectsByRegex(User user, String regexPattern) {
- List<Project> array = new ArrayList<Project>();
- Pattern pattern;
- try {
- pattern = Pattern.compile(regexPattern, Pattern.CASE_INSENSITIVE);
- } catch (PatternSyntaxException e) {
- logger.error("Bad regex pattern " + regexPattern);
- return array;
- }
-
-
- for (Project project : projectsById.values()) {
- Permission perm = project.getUserPermission(user);
-
- if (perm != null && (perm.isPermissionSet(Type.ADMIN) || perm.isPermissionSet(Type.READ))) {
- if(pattern.matcher(project.getName()).find() ) {
- array.add(project);
- }
- }
- }
- return array;
- }
-
- public List<Project> getProjects() {
- return new ArrayList<Project>(projectsById.values());
- }
-
- public List<Project> getProjectsByRegex(String regexPattern) {
- List<Project> allProjects = new ArrayList<Project>();
- Pattern pattern;
- try {
- pattern = Pattern.compile(regexPattern, Pattern.CASE_INSENSITIVE);
- } catch (PatternSyntaxException e) {
- logger.error("Bad regex pattern " + regexPattern);
- return allProjects;
- }
- for(Project project : getProjects()) {
- if(pattern.matcher(project.getName()).find()) {
- allProjects.add(project);
- }
- }
- return allProjects;
- }
-
- public Project getProject(String name) {
- return projectsByName.get(name);
- }
-
- public Project getProject(int id) {
- return projectsById.get(id);
- }
-
- public Project createProject(String projectName, String description, User creator) throws ProjectManagerException {
- if (projectName == null || projectName.trim().isEmpty()) {
- throw new ProjectManagerException("Project name cannot be empty.");
- }
- else if (description == null || description.trim().isEmpty()) {
- throw new ProjectManagerException("Description cannot be empty.");
- }
- else if (creator == null) {
- throw new ProjectManagerException("Valid creator user must be set.");
- }
- else if (!projectName.matches("[a-zA-Z][a-zA-Z_0-9|-]*")) {
- throw new ProjectManagerException("Project names must start with a letter, followed by any number of letters, digits, '-' or '_'.");
- }
-
- if (projectsByName.contains(projectName)) {
- throw new ProjectManagerException("Project already exists.");
- }
-
- logger.info("Trying to create " + projectName + " by user " + creator.getUserId());
- Project newProject = projectLoader.createNewProject(projectName, description, creator);
- projectsByName.put(newProject.getName(), newProject);
- projectsById.put(newProject.getId(), newProject);
-
- if(creatorDefaultPermissions) {
- // Add permission to project
- projectLoader.updatePermission(newProject, creator.getUserId(), new Permission(Permission.Type.ADMIN), false);
-
- // Add proxy user
- newProject.addProxyUser(creator.getUserId());
- try {
- updateProjectSetting(newProject);
- } catch (ProjectManagerException e) {
- e.printStackTrace();
- throw e;
- }
- }
-
- projectLoader.postEvent(newProject, EventType.CREATED, creator.getUserId(), null);
-
- return newProject;
- }
-
- public synchronized Project removeProject(Project project, User deleter) throws ProjectManagerException {
- projectLoader.removeProject(project, deleter.getUserId());
- projectLoader.postEvent(project, EventType.DELETED, deleter.getUserId(), null);
-
- projectsByName.remove(project.getName());
- projectsById.remove(project.getId());
-
- return project;
- }
-
- public void updateProjectDescription(Project project, String description, User modifier) throws ProjectManagerException {
- projectLoader.updateDescription(project, description, modifier.getUserId());
- projectLoader.postEvent(project, EventType.DESCRIPTION, modifier.getUserId(), "Description changed to " + description);
- }
-
- public List<ProjectLogEvent> getProjectEventLogs(Project project, int results, int skip) throws ProjectManagerException {
- return projectLoader.getProjectEvents(project, results, skip);
- }
-
- public Props getProperties(Project project, String source) throws ProjectManagerException {
- return projectLoader.fetchProjectProperty(project, source);
- }
-
- public Props getJobOverrideProperty(Project project, String jobName) throws ProjectManagerException {
- return projectLoader.fetchProjectProperty(project, jobName+".jor");
- }
-
- public void setJobOverrideProperty(Project project, Props prop, String jobName) throws ProjectManagerException {
- prop.setSource(jobName+".jor");
- Props oldProps = projectLoader.fetchProjectProperty(project, prop.getSource());
- if(oldProps == null) {
- projectLoader.uploadProjectProperty(project, prop);
- }
- else {
- projectLoader.updateProjectProperty(project, prop);
- }
- return;
- }
-
- public void updateProjectSetting(Project project) throws ProjectManagerException {
- projectLoader.updateProjectSettings(project);
- }
-
- public void addProjectProxyUser(Project project, String proxyName, User modifier) throws ProjectManagerException {
- logger.info("User " + modifier.getUserId() + " adding proxy user " + proxyName + " to project " + project.getName());
- project.addProxyUser(proxyName);
-
- projectLoader.postEvent(project, EventType.PROXY_USER, modifier.getUserId(), "Proxy user " + proxyName + " is added to project.");
- updateProjectSetting(project);
- }
-
- public void removeProjectProxyUser(Project project, String proxyName, User modifier) throws ProjectManagerException {
- logger.info("User " + modifier.getUserId() + " removing proxy user " + proxyName + " from project " + project.getName());
- project.removeProxyUser(proxyName);
-
- projectLoader.postEvent(project, EventType.PROXY_USER, modifier.getUserId(), "Proxy user " + proxyName + " has been removed form the project.");
- updateProjectSetting(project);
- }
-
- public void updateProjectPermission(Project project, String name, Permission perm, boolean group, User modifier) throws ProjectManagerException {
- logger.info("User " + modifier.getUserId() + " updating permissions for project " + project.getName() + " for " + name + " " + perm.toString());
- projectLoader.updatePermission(project, name, perm, group);
- if (group) {
- projectLoader.postEvent(project, EventType.GROUP_PERMISSION, modifier.getUserId(), "Permission for group " + name + " set to " + perm.toString());
- }
- else {
- projectLoader.postEvent(project, EventType.USER_PERMISSION, modifier.getUserId(), "Permission for user " + name + " set to " + perm.toString());
- }
- }
-
- public void removeProjectPermission(Project project, String name, boolean group, User modifier) throws ProjectManagerException {
- logger.info("User " + modifier.getUserId() + " removing permissions for project " + project.getName() + " for " + name);
- projectLoader.removePermission(project, name, group);
- if (group) {
- projectLoader.postEvent(project, EventType.GROUP_PERMISSION, modifier.getUserId(), "Permission for group " + name + " removed.");
- }
- else {
- projectLoader.postEvent(project, EventType.USER_PERMISSION, modifier.getUserId(), "Permission for user " + name + " removed.");
- }
- }
-
- public void uploadProject(Project project, File archive, String fileType, User uploader) throws ProjectManagerException {
- logger.info("Uploading files to " + project.getName());
-
- // Unzip.
- File file = null;
- try {
- if (fileType == null) {
- throw new ProjectManagerException("Unknown file type for " + archive.getName());
- }
- else if ("zip".equals(fileType)) {
- file = unzipFile(archive);
- }
- else {
- throw new ProjectManagerException("Unsupported archive type for file " + archive.getName());
- }
- } catch(IOException e) {
- throw new ProjectManagerException("Error unzipping file.", e);
- }
-
- logger.info("Validating Flow for upload " + archive.getName());
- DirectoryFlowLoader loader = new DirectoryFlowLoader(logger);
- loader.loadProjectFlow(file);
- if(!loader.getErrors().isEmpty()) {
- logger.error("Error found in upload to " + project.getName() + ". Cleaning up.");
-
- try {
- FileUtils.deleteDirectory(file);
- } catch (IOException e) {
- file.deleteOnExit();
- e.printStackTrace();
- }
-
- StringBuffer errorMessage = new StringBuffer();
- errorMessage.append("Error found in upload. Cannot upload.\n");
- for (String error: loader.getErrors()) {
- errorMessage.append(error);
- errorMessage.append('\n');
- }
-
- throw new ProjectManagerException(errorMessage.toString());
- }
-
- Map<String, Props> jobProps = loader.getJobProps();
- List<Props> propProps = loader.getProps();
-
- synchronized(project) {
- int newVersion = projectLoader.getLatestProjectVersion(project) + 1;
- Map<String, Flow> flows = loader.getFlowMap();
- for (Flow flow: flows.values()) {
- flow.setProjectId(project.getId());
- flow.setVersion(newVersion);
- }
-
- logger.info("Uploading file to db " + archive.getName());
- projectLoader.uploadProjectFile(project, newVersion, fileType, archive.getName(), archive, uploader.getUserId());
- logger.info("Uploading flow to db " + archive.getName());
- projectLoader.uploadFlows(project, newVersion, flows.values());
- logger.info("Changing project versions " + archive.getName());
- projectLoader.changeProjectVersion(project, newVersion, uploader.getUserId());
- project.setFlows(flows);
- logger.info("Uploading Job properties");
- projectLoader.uploadProjectProperties(project, new ArrayList<Props>(jobProps.values()));
- logger.info("Uploading Props properties");
- projectLoader.uploadProjectProperties(project, propProps);
- }
-
- //TODO: find something else to load triggers
-// if(loadTriggerFromFile) {
-// logger.info("Loading triggers.");
-// Props triggerProps = new Props();
-// triggerProps.put("projectId", project.getId());
-// triggerProps.put("projectName", project.getName());
-// triggerProps.put("submitUser", uploader.getUserId());
-// try {
-// triggerManager.loadTriggerFromDir(file, triggerProps);
-// } catch (Exception e) {
-// // TODO Auto-generated catch block
-// e.printStackTrace();
-// logger.error("Failed to load triggers.", e);
-// }
-// }
-
- logger.info("Uploaded project files. Cleaning up temp files.");
- projectLoader.postEvent(project, EventType.UPLOADED, uploader.getUserId(), "Uploaded project files zip " + archive.getName());
- try {
- FileUtils.deleteDirectory(file);
- } catch (IOException e) {
- file.deleteOnExit();
- e.printStackTrace();
- }
-
- logger.info("Cleaning up old install files older than " + (project.getVersion() - projectVersionRetention));
- projectLoader.cleanOlderProjectVersion(project.getId(), project.getVersion() - projectVersionRetention);
- }
-
- public void updateFlow(Project project, Flow flow) throws ProjectManagerException {
- projectLoader.updateFlow(project, flow.getVersion(), flow);
- }
-
- private File unzipFile(File archiveFile) throws IOException {
- ZipFile zipfile = new ZipFile(archiveFile);
- File unzipped = Utils.createTempDir(tempDir);
- Utils.unzip(zipfile, unzipped);
-
- return unzipped;
- }
-
- public void postProjectEvent(Project project, EventType type, String user,String message) {
- projectLoader.postEvent(project, type, user, message);
- }
+ public List<Project> getUserProjectsByRegex(User user, String regexPattern) {
+ List<Project> array = new ArrayList<Project>();
+ Pattern pattern;
+ try {
+ pattern = Pattern.compile(regexPattern, Pattern.CASE_INSENSITIVE);
+ } catch (PatternSyntaxException e) {
+ logger.error("Bad regex pattern " + regexPattern);
+ return array;
+ }
+
+ for (Project project : projectsById.values()) {
+ Permission perm = project.getUserPermission(user);
+
+ if (perm != null
+ && (perm.isPermissionSet(Type.ADMIN) || perm
+ .isPermissionSet(Type.READ))) {
+ if (pattern.matcher(project.getName()).find()) {
+ array.add(project);
+ }
+ }
+ }
+ return array;
+ }
+
+ public List<Project> getProjects() {
+ return new ArrayList<Project>(projectsById.values());
+ }
+
+ public List<Project> getProjectsByRegex(String regexPattern) {
+ List<Project> allProjects = new ArrayList<Project>();
+ Pattern pattern;
+ try {
+ pattern = Pattern.compile(regexPattern, Pattern.CASE_INSENSITIVE);
+ } catch (PatternSyntaxException e) {
+ logger.error("Bad regex pattern " + regexPattern);
+ return allProjects;
+ }
+ for (Project project : getProjects()) {
+ if (pattern.matcher(project.getName()).find()) {
+ allProjects.add(project);
+ }
+ }
+ return allProjects;
+ }
+
+ public Project getProject(String name) {
+ return projectsByName.get(name);
+ }
+
+ public Project getProject(int id) {
+ return projectsById.get(id);
+ }
+
+ public Project createProject(String projectName, String description,
+ User creator) throws ProjectManagerException {
+ if (projectName == null || projectName.trim().isEmpty()) {
+ throw new ProjectManagerException("Project name cannot be empty.");
+ } else if (description == null || description.trim().isEmpty()) {
+ throw new ProjectManagerException("Description cannot be empty.");
+ } else if (creator == null) {
+ throw new ProjectManagerException("Valid creator user must be set.");
+ } else if (!projectName.matches("[a-zA-Z][a-zA-Z_0-9|-]*")) {
+ throw new ProjectManagerException(
+ "Project names must start with a letter, followed by any number of letters, digits, '-' or '_'.");
+ }
+
+ if (projectsByName.contains(projectName)) {
+ throw new ProjectManagerException("Project already exists.");
+ }
+
+ logger.info("Trying to create " + projectName + " by user "
+ + creator.getUserId());
+ Project newProject =
+ projectLoader.createNewProject(projectName, description, creator);
+ projectsByName.put(newProject.getName(), newProject);
+ projectsById.put(newProject.getId(), newProject);
+
+ if (creatorDefaultPermissions) {
+ // Add permission to project
+ projectLoader.updatePermission(newProject, creator.getUserId(),
+ new Permission(Permission.Type.ADMIN), false);
+
+ // Add proxy user
+ newProject.addProxyUser(creator.getUserId());
+ try {
+ updateProjectSetting(newProject);
+ } catch (ProjectManagerException e) {
+ e.printStackTrace();
+ throw e;
+ }
+ }
+
+ projectLoader.postEvent(newProject, EventType.CREATED, creator.getUserId(),
+ null);
+
+ return newProject;
+ }
+
+ public synchronized Project removeProject(Project project, User deleter)
+ throws ProjectManagerException {
+ projectLoader.removeProject(project, deleter.getUserId());
+ projectLoader.postEvent(project, EventType.DELETED, deleter.getUserId(),
+ null);
+
+ projectsByName.remove(project.getName());
+ projectsById.remove(project.getId());
+
+ return project;
+ }
+
+ public void updateProjectDescription(Project project, String description,
+ User modifier) throws ProjectManagerException {
+ projectLoader.updateDescription(project, description, modifier.getUserId());
+ projectLoader.postEvent(project, EventType.DESCRIPTION,
+ modifier.getUserId(), "Description changed to " + description);
+ }
+
+ public List<ProjectLogEvent> getProjectEventLogs(Project project,
+ int results, int skip) throws ProjectManagerException {
+ return projectLoader.getProjectEvents(project, results, skip);
+ }
+
+ public Props getProperties(Project project, String source)
+ throws ProjectManagerException {
+ return projectLoader.fetchProjectProperty(project, source);
+ }
+
+ public Props getJobOverrideProperty(Project project, String jobName)
+ throws ProjectManagerException {
+ return projectLoader.fetchProjectProperty(project, jobName + ".jor");
+ }
+
+ public void setJobOverrideProperty(Project project, Props prop, String jobName)
+ throws ProjectManagerException {
+ prop.setSource(jobName + ".jor");
+ Props oldProps =
+ projectLoader.fetchProjectProperty(project, prop.getSource());
+ if (oldProps == null) {
+ projectLoader.uploadProjectProperty(project, prop);
+ } else {
+ projectLoader.updateProjectProperty(project, prop);
+ }
+ return;
+ }
+
+ public void updateProjectSetting(Project project)
+ throws ProjectManagerException {
+ projectLoader.updateProjectSettings(project);
+ }
+
+ public void addProjectProxyUser(Project project, String proxyName,
+ User modifier) throws ProjectManagerException {
+ logger.info("User " + modifier.getUserId() + " adding proxy user "
+ + proxyName + " to project " + project.getName());
+ project.addProxyUser(proxyName);
+
+ projectLoader.postEvent(project, EventType.PROXY_USER,
+ modifier.getUserId(), "Proxy user " + proxyName
+ + " is added to project.");
+ updateProjectSetting(project);
+ }
+
+ public void removeProjectProxyUser(Project project, String proxyName,
+ User modifier) throws ProjectManagerException {
+ logger.info("User " + modifier.getUserId() + " removing proxy user "
+ + proxyName + " from project " + project.getName());
+ project.removeProxyUser(proxyName);
+
+ projectLoader.postEvent(project, EventType.PROXY_USER,
+ modifier.getUserId(), "Proxy user " + proxyName
+ + " has been removed form the project.");
+ updateProjectSetting(project);
+ }
+
+ public void updateProjectPermission(Project project, String name,
+ Permission perm, boolean group, User modifier)
+ throws ProjectManagerException {
+ logger.info("User " + modifier.getUserId()
+ + " updating permissions for project " + project.getName() + " for "
+ + name + " " + perm.toString());
+ projectLoader.updatePermission(project, name, perm, group);
+ if (group) {
+ projectLoader.postEvent(project, EventType.GROUP_PERMISSION,
+ modifier.getUserId(), "Permission for group " + name + " set to "
+ + perm.toString());
+ } else {
+ projectLoader.postEvent(project, EventType.USER_PERMISSION,
+ modifier.getUserId(), "Permission for user " + name + " set to "
+ + perm.toString());
+ }
+ }
+
+ public void removeProjectPermission(Project project, String name,
+ boolean group, User modifier) throws ProjectManagerException {
+ logger.info("User " + modifier.getUserId()
+ + " removing permissions for project " + project.getName() + " for "
+ + name);
+ projectLoader.removePermission(project, name, group);
+ if (group) {
+ projectLoader.postEvent(project, EventType.GROUP_PERMISSION,
+ modifier.getUserId(), "Permission for group " + name + " removed.");
+ } else {
+ projectLoader.postEvent(project, EventType.USER_PERMISSION,
+ modifier.getUserId(), "Permission for user " + name + " removed.");
+ }
+ }
+
+ public void uploadProject(Project project, File archive, String fileType,
+ User uploader) throws ProjectManagerException {
+ logger.info("Uploading files to " + project.getName());
+
+ // Unzip.
+ File file = null;
+ try {
+ if (fileType == null) {
+ throw new ProjectManagerException("Unknown file type for "
+ + archive.getName());
+ } else if ("zip".equals(fileType)) {
+ file = unzipFile(archive);
+ } else {
+ throw new ProjectManagerException("Unsupported archive type for file "
+ + archive.getName());
+ }
+ } catch (IOException e) {
+ throw new ProjectManagerException("Error unzipping file.", e);
+ }
+ logger.info("Validating Flow for upload " + archive.getName());
+ DirectoryFlowLoader loader = new DirectoryFlowLoader(logger);
+ loader.loadProjectFlow(file);
+ if (!loader.getErrors().isEmpty()) {
+ logger.error("Error found in upload to " + project.getName()
+ + ". Cleaning up.");
+
+ try {
+ FileUtils.deleteDirectory(file);
+ } catch (IOException e) {
+ file.deleteOnExit();
+ e.printStackTrace();
+ }
+
+ StringBuffer errorMessage = new StringBuffer();
+ errorMessage.append("Error found in upload. Cannot upload.\n");
+ for (String error : loader.getErrors()) {
+ errorMessage.append(error);
+ errorMessage.append('\n');
+ }
+
+ throw new ProjectManagerException(errorMessage.toString());
+ }
+
+ Map<String, Props> jobProps = loader.getJobProps();
+ List<Props> propProps = loader.getProps();
+
+ synchronized (project) {
+ int newVersion = projectLoader.getLatestProjectVersion(project) + 1;
+ Map<String, Flow> flows = loader.getFlowMap();
+ for (Flow flow : flows.values()) {
+ flow.setProjectId(project.getId());
+ flow.setVersion(newVersion);
+ }
+
+ logger.info("Uploading file to db " + archive.getName());
+ projectLoader.uploadProjectFile(project, newVersion, fileType,
+ archive.getName(), archive, uploader.getUserId());
+ logger.info("Uploading flow to db " + archive.getName());
+ projectLoader.uploadFlows(project, newVersion, flows.values());
+ logger.info("Changing project versions " + archive.getName());
+ projectLoader.changeProjectVersion(project, newVersion,
+ uploader.getUserId());
+ project.setFlows(flows);
+ logger.info("Uploading Job properties");
+ projectLoader.uploadProjectProperties(project, new ArrayList<Props>(
+ jobProps.values()));
+ logger.info("Uploading Props properties");
+ projectLoader.uploadProjectProperties(project, propProps);
+ }
+
+ // TODO: find something else to load triggers
+ // if(loadTriggerFromFile) {
+ // logger.info("Loading triggers.");
+ // Props triggerProps = new Props();
+ // triggerProps.put("projectId", project.getId());
+ // triggerProps.put("projectName", project.getName());
+ // triggerProps.put("submitUser", uploader.getUserId());
+ // try {
+ // triggerManager.loadTriggerFromDir(file, triggerProps);
+ // } catch (Exception e) {
+ // // TODO Auto-generated catch block
+ // e.printStackTrace();
+ // logger.error("Failed to load triggers.", e);
+ // }
+ // }
+
+ logger.info("Uploaded project files. Cleaning up temp files.");
+ projectLoader.postEvent(project, EventType.UPLOADED, uploader.getUserId(),
+ "Uploaded project files zip " + archive.getName());
+ try {
+ FileUtils.deleteDirectory(file);
+ } catch (IOException e) {
+ file.deleteOnExit();
+ e.printStackTrace();
+ }
+
+ logger.info("Cleaning up old install files older than "
+ + (project.getVersion() - projectVersionRetention));
+ projectLoader.cleanOlderProjectVersion(project.getId(),
+ project.getVersion() - projectVersionRetention);
+ }
+
+ public void updateFlow(Project project, Flow flow)
+ throws ProjectManagerException {
+ projectLoader.updateFlow(project, flow.getVersion(), flow);
+ }
+
+ private File unzipFile(File archiveFile) throws IOException {
+ ZipFile zipfile = new ZipFile(archiveFile);
+ File unzipped = Utils.createTempDir(tempDir);
+ Utils.unzip(zipfile, unzipped);
+
+ return unzipped;
+ }
+
+ public void postProjectEvent(Project project, EventType type, String user,
+ String message) {
+ projectLoader.postEvent(project, type, user, message);
+ }
}
diff --git a/src/main/java/azkaban/project/ProjectManagerException.java b/src/main/java/azkaban/project/ProjectManagerException.java
index e6f279d..6ad88bf 100644
--- a/src/main/java/azkaban/project/ProjectManagerException.java
+++ b/src/main/java/azkaban/project/ProjectManagerException.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -16,14 +16,14 @@
package azkaban.project;
-public class ProjectManagerException extends Exception{
- private static final long serialVersionUID = 1L;
+public class ProjectManagerException extends Exception {
+ private static final long serialVersionUID = 1L;
- public ProjectManagerException(String message) {
- super(message);
- }
-
- public ProjectManagerException(String message, Throwable cause) {
- super(message, cause);
- }
+ public ProjectManagerException(String message) {
+ super(message);
+ }
+
+ public ProjectManagerException(String message, Throwable cause) {
+ super(message, cause);
+ }
}
src/main/java/azkaban/scheduler/Schedule.java 692(+321 -371)
diff --git a/src/main/java/azkaban/scheduler/Schedule.java b/src/main/java/azkaban/scheduler/Schedule.java
index 32eb916..eeb1cac 100644
--- a/src/main/java/azkaban/scheduler/Schedule.java
+++ b/src/main/java/azkaban/scheduler/Schedule.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -36,372 +36,322 @@ import azkaban.executor.ExecutionOptions;
import azkaban.sla.SlaOption;
import azkaban.utils.Pair;
-public class Schedule{
-
-// private long projectGuid;
-// private long flowGuid;
-
-// private String scheduleId;
-
- private int scheduleId;
- private int projectId;
- private String projectName;
- private String flowName;
- private long firstSchedTime;
- private DateTimeZone timezone;
- private long lastModifyTime;
- private ReadablePeriod period;
- private long nextExecTime;
- private String submitUser;
- private String status;
- private long submitTime;
-
- private boolean skipPastOccurrences = true;
-
- private ExecutionOptions executionOptions;
- private List<SlaOption> slaOptions;
-
- public Schedule(
- int scheduleId,
- int projectId,
- String projectName,
- String flowName,
- String status,
- long firstSchedTime,
- DateTimeZone timezone,
- ReadablePeriod period,
- long lastModifyTime,
- long nextExecTime,
- long submitTime,
- String submitUser
- ) {
-
- this(scheduleId,
- projectId,
- projectName,
- flowName,
- status,
- firstSchedTime,
- timezone,
- period,
- lastModifyTime,
- nextExecTime,
- submitTime,
- submitUser,
- null,
- null
- );
- }
-
- public Schedule(
- int scheduleId,
- int projectId,
- String projectName,
- String flowName,
- String status,
- long firstSchedTime,
- String timezoneId,
- String period,
- long lastModifyTime,
- long nextExecTime,
- long submitTime,
- String submitUser,
- ExecutionOptions executionOptions,
- List<SlaOption> slaOptions
- ) {
- this(scheduleId, projectId,
- projectName,
- flowName,
- status,
- firstSchedTime,
- DateTimeZone.forID(timezoneId),
- parsePeriodString(period),
- lastModifyTime,
- nextExecTime,
- submitTime,
- submitUser,
- executionOptions,
- slaOptions
- );
- }
-
- public Schedule(
- int scheduleId,
- int projectId,
- String projectName,
- String flowName,
- String status,
- long firstSchedTime,
- DateTimeZone timezone,
- ReadablePeriod period,
- long lastModifyTime,
- long nextExecTime,
- long submitTime,
- String submitUser,
- ExecutionOptions executionOptions,
- List<SlaOption> slaOptions
- ) {
- this.scheduleId = scheduleId;
- this.projectId = projectId;
- this.projectName = projectName;
- this.flowName = flowName;
- this.firstSchedTime = firstSchedTime;
- this.timezone = timezone;
- this.lastModifyTime = lastModifyTime;
- this.period = period;
- this.nextExecTime = nextExecTime;
- this.submitUser = submitUser;
- this.status = status;
- this.submitTime = submitTime;
- this.executionOptions = executionOptions;
- this.slaOptions = slaOptions;
- }
-
- public ExecutionOptions getExecutionOptions() {
- return executionOptions;
- }
-
- public List<SlaOption> getSlaOptions() {
- return slaOptions;
- }
-
- public void setFlowOptions(ExecutionOptions executionOptions) {
- this.executionOptions = executionOptions;
- }
-
- public void setSlaOptions(List<SlaOption> slaOptions) {
- this.slaOptions = slaOptions;
- }
-
- public String getScheduleName() {
- return projectName + "." + flowName + " (" + projectId + ")";
- }
-
- public String toString() {
- return projectName + "." + flowName + " (" + projectId + ")" + " to be run at (starting) " +
- new DateTime(firstSchedTime).toDateTimeISO() + " with recurring period of " + (period == null ? "non-recurring" : createPeriodString(period));
- }
-
- public Pair<Integer, String> getScheduleIdentityPair() {
- return new Pair<Integer, String>(getProjectId(), getFlowName());
- }
-
- public void setScheduleId(int scheduleId) {
- this.scheduleId = scheduleId;
- }
-
- public int getScheduleId() {
- return scheduleId;
- }
-
- public int getProjectId() {
- return projectId;
- }
-
- public String getProjectName() {
- return projectName;
- }
-
- public String getFlowName() {
- return flowName;
- }
-
- public long getFirstSchedTime() {
- return firstSchedTime;
- }
-
- public DateTimeZone getTimezone() {
- return timezone;
- }
-
- public long getLastModifyTime() {
- return lastModifyTime;
- }
-
- public ReadablePeriod getPeriod() {
- return period;
- }
-
- public long getNextExecTime() {
- return nextExecTime;
- }
-
- public String getSubmitUser() {
- return submitUser;
- }
-
- public String getStatus() {
- return status;
- }
-
- public long getSubmitTime() {
- return submitTime;
- }
-
- public boolean updateTime() {
- if (new DateTime(nextExecTime).isAfterNow()) {
- return true;
- }
-
- if (period != null) {
- DateTime nextTime = getNextRuntime(nextExecTime, timezone, period);
-
- this.nextExecTime = nextTime.getMillis();
- return true;
- }
-
- return false;
- }
-
- public void setNextExecTime(long nextExecTime) {
- this.nextExecTime = nextExecTime;
- }
-
- private DateTime getNextRuntime(long scheduleTime, DateTimeZone timezone, ReadablePeriod period) {
- DateTime now = new DateTime();
- DateTime date = new DateTime(scheduleTime).withZone(timezone);
- int count = 0;
- while (!now.isBefore(date)) {
- if (count > 100000) {
- throw new IllegalStateException(
- "100000 increments of period did not get to present time.");
- }
-
- if (period == null) {
- break;
- } else {
- date = date.plus(period);
- }
-
- count += 1;
- }
-
- return date;
- }
-
- public static ReadablePeriod parsePeriodString(String periodStr) {
- ReadablePeriod period;
- char periodUnit = periodStr.charAt(periodStr.length() - 1);
- if (periodUnit == 'n') {
- return null;
- }
-
- int periodInt = Integer.parseInt(periodStr.substring(0,
- periodStr.length() - 1));
- switch (periodUnit) {
- case 'M':
- period = Months.months(periodInt);
- break;
- case 'w':
- period = Weeks.weeks(periodInt);
- break;
- case 'd':
- period = Days.days(periodInt);
- break;
- case 'h':
- period = Hours.hours(periodInt);
- break;
- case 'm':
- period = Minutes.minutes(periodInt);
- break;
- case 's':
- period = Seconds.seconds(periodInt);
- break;
- default:
- throw new IllegalArgumentException("Invalid schedule period unit '"
- + periodUnit);
- }
-
- return period;
- }
-
- public static String createPeriodString(ReadablePeriod period) {
- String periodStr = "n";
-
- if (period == null) {
- return "n";
- }
-
- if (period.get(DurationFieldType.months()) > 0) {
- int months = period.get(DurationFieldType.months());
- periodStr = months + "M";
- } else if (period.get(DurationFieldType.weeks()) > 0) {
- int weeks = period.get(DurationFieldType.weeks());
- periodStr = weeks + "w";
- } else if (period.get(DurationFieldType.days()) > 0) {
- int days = period.get(DurationFieldType.days());
- periodStr = days + "d";
- } else if (period.get(DurationFieldType.hours()) > 0) {
- int hours = period.get(DurationFieldType.hours());
- periodStr = hours + "h";
- } else if (period.get(DurationFieldType.minutes()) > 0) {
- int minutes = period.get(DurationFieldType.minutes());
- periodStr = minutes + "m";
- } else if (period.get(DurationFieldType.seconds()) > 0) {
- int seconds = period.get(DurationFieldType.seconds());
- periodStr = seconds + "s";
- }
-
- return periodStr;
- }
-
- public Map<String,Object> optionsToObject() {
- if(executionOptions != null ) {
- HashMap<String, Object> schedObj = new HashMap<String, Object>();
-
- if(executionOptions != null) {
- schedObj.put("executionOptions", executionOptions.toObject());
- }
-
- if(slaOptions != null) {
- List<Object> slaOptionsObject = new ArrayList<Object>();
-// schedObj.put("slaOptions", slaOptions.toObject());
- for(SlaOption sla : slaOptions) {
- slaOptionsObject.add(sla.toObject());
- }
- schedObj.put("slaOptions", slaOptionsObject);
- }
-
- return schedObj;
- }
- return null;
- }
-
- @SuppressWarnings("unchecked")
- public void createAndSetScheduleOptions(Object obj) {
- HashMap<String, Object> schedObj = (HashMap<String, Object>)obj;
- if (schedObj.containsKey("executionOptions")) {
- ExecutionOptions execOptions = ExecutionOptions.createFromObject(schedObj.get("executionOptions"));
- this.executionOptions = execOptions;
- }
- else if (schedObj.containsKey("flowOptions")){
- ExecutionOptions execOptions = ExecutionOptions.createFromObject(schedObj.get("flowOptions"));
- this.executionOptions = execOptions;
- execOptions.setConcurrentOption(ExecutionOptions.CONCURRENT_OPTION_SKIP);
- }
- else {
- this.executionOptions = new ExecutionOptions();
- this.executionOptions.setConcurrentOption(ExecutionOptions.CONCURRENT_OPTION_SKIP);
- }
-
- if(schedObj.containsKey("slaOptions")) {
- List<Object> slaOptionsObject = (List<Object>) schedObj.get("slaOptions");
- List<SlaOption> slaOptions = new ArrayList<SlaOption>();
- for(Object slaObj : slaOptionsObject) {
- slaOptions.add(SlaOption.fromObject(slaObj));
- }
- this.slaOptions = slaOptions;
- }
-
-
- }
-
- public boolean isRecurring() {
- return period == null ? false : true;
- }
-
- public boolean skipPastOccurrences() {
- return skipPastOccurrences;
- }
-
+public class Schedule {
+
+ // private long projectGuid;
+ // private long flowGuid;
+
+ // private String scheduleId;
+
+ private int scheduleId;
+ private int projectId;
+ private String projectName;
+ private String flowName;
+ private long firstSchedTime;
+ private DateTimeZone timezone;
+ private long lastModifyTime;
+ private ReadablePeriod period;
+ private long nextExecTime;
+ private String submitUser;
+ private String status;
+ private long submitTime;
+
+ private boolean skipPastOccurrences = true;
+
+ private ExecutionOptions executionOptions;
+ private List<SlaOption> slaOptions;
+
+ public Schedule(int scheduleId, int projectId, String projectName,
+ String flowName, String status, long firstSchedTime,
+ DateTimeZone timezone, ReadablePeriod period, long lastModifyTime,
+ long nextExecTime, long submitTime, String submitUser) {
+
+ this(scheduleId, projectId, projectName, flowName, status, firstSchedTime,
+ timezone, period, lastModifyTime, nextExecTime, submitTime, submitUser,
+ null, null);
+ }
+
+ public Schedule(int scheduleId, int projectId, String projectName,
+ String flowName, String status, long firstSchedTime, String timezoneId,
+ String period, long lastModifyTime, long nextExecTime, long submitTime,
+ String submitUser, ExecutionOptions executionOptions,
+ List<SlaOption> slaOptions) {
+ this(scheduleId, projectId, projectName, flowName, status, firstSchedTime,
+ DateTimeZone.forID(timezoneId), parsePeriodString(period),
+ lastModifyTime, nextExecTime, submitTime, submitUser, executionOptions,
+ slaOptions);
+ }
+
+ public Schedule(int scheduleId, int projectId, String projectName,
+ String flowName, String status, long firstSchedTime,
+ DateTimeZone timezone, ReadablePeriod period, long lastModifyTime,
+ long nextExecTime, long submitTime, String submitUser,
+ ExecutionOptions executionOptions, List<SlaOption> slaOptions) {
+ this.scheduleId = scheduleId;
+ this.projectId = projectId;
+ this.projectName = projectName;
+ this.flowName = flowName;
+ this.firstSchedTime = firstSchedTime;
+ this.timezone = timezone;
+ this.lastModifyTime = lastModifyTime;
+ this.period = period;
+ this.nextExecTime = nextExecTime;
+ this.submitUser = submitUser;
+ this.status = status;
+ this.submitTime = submitTime;
+ this.executionOptions = executionOptions;
+ this.slaOptions = slaOptions;
+ }
+
+ public ExecutionOptions getExecutionOptions() {
+ return executionOptions;
+ }
+
+ public List<SlaOption> getSlaOptions() {
+ return slaOptions;
+ }
+
+ public void setFlowOptions(ExecutionOptions executionOptions) {
+ this.executionOptions = executionOptions;
+ }
+
+ public void setSlaOptions(List<SlaOption> slaOptions) {
+ this.slaOptions = slaOptions;
+ }
+
+ public String getScheduleName() {
+ return projectName + "." + flowName + " (" + projectId + ")";
+ }
+
+ public String toString() {
+ return projectName + "." + flowName + " (" + projectId + ")"
+ + " to be run at (starting) "
+ + new DateTime(firstSchedTime).toDateTimeISO()
+ + " with recurring period of "
+ + (period == null ? "non-recurring" : createPeriodString(period));
+ }
+
+ public Pair<Integer, String> getScheduleIdentityPair() {
+ return new Pair<Integer, String>(getProjectId(), getFlowName());
+ }
+
+ public void setScheduleId(int scheduleId) {
+ this.scheduleId = scheduleId;
+ }
+
+ public int getScheduleId() {
+ return scheduleId;
+ }
+
+ public int getProjectId() {
+ return projectId;
+ }
+
+ public String getProjectName() {
+ return projectName;
+ }
+
+ public String getFlowName() {
+ return flowName;
+ }
+
+ public long getFirstSchedTime() {
+ return firstSchedTime;
+ }
+
+ public DateTimeZone getTimezone() {
+ return timezone;
+ }
+
+ public long getLastModifyTime() {
+ return lastModifyTime;
+ }
+
+ public ReadablePeriod getPeriod() {
+ return period;
+ }
+
+ public long getNextExecTime() {
+ return nextExecTime;
+ }
+
+ public String getSubmitUser() {
+ return submitUser;
+ }
+
+ public String getStatus() {
+ return status;
+ }
+
+ public long getSubmitTime() {
+ return submitTime;
+ }
+
+ public boolean updateTime() {
+ if (new DateTime(nextExecTime).isAfterNow()) {
+ return true;
+ }
+
+ if (period != null) {
+ DateTime nextTime = getNextRuntime(nextExecTime, timezone, period);
+
+ this.nextExecTime = nextTime.getMillis();
+ return true;
+ }
+
+ return false;
+ }
+
+ public void setNextExecTime(long nextExecTime) {
+ this.nextExecTime = nextExecTime;
+ }
+
+ private DateTime getNextRuntime(long scheduleTime, DateTimeZone timezone,
+ ReadablePeriod period) {
+ DateTime now = new DateTime();
+ DateTime date = new DateTime(scheduleTime).withZone(timezone);
+ int count = 0;
+ while (!now.isBefore(date)) {
+ if (count > 100000) {
+ throw new IllegalStateException(
+ "100000 increments of period did not get to present time.");
+ }
+
+ if (period == null) {
+ break;
+ } else {
+ date = date.plus(period);
+ }
+
+ count += 1;
+ }
+
+ return date;
+ }
+
+ public static ReadablePeriod parsePeriodString(String periodStr) {
+ ReadablePeriod period;
+ char periodUnit = periodStr.charAt(periodStr.length() - 1);
+ if (periodUnit == 'n') {
+ return null;
+ }
+
+ int periodInt =
+ Integer.parseInt(periodStr.substring(0, periodStr.length() - 1));
+ switch (periodUnit) {
+ case 'M':
+ period = Months.months(periodInt);
+ break;
+ case 'w':
+ period = Weeks.weeks(periodInt);
+ break;
+ case 'd':
+ period = Days.days(periodInt);
+ break;
+ case 'h':
+ period = Hours.hours(periodInt);
+ break;
+ case 'm':
+ period = Minutes.minutes(periodInt);
+ break;
+ case 's':
+ period = Seconds.seconds(periodInt);
+ break;
+ default:
+ throw new IllegalArgumentException("Invalid schedule period unit '"
+ + periodUnit);
+ }
+
+ return period;
+ }
+
+ public static String createPeriodString(ReadablePeriod period) {
+ String periodStr = "n";
+
+ if (period == null) {
+ return "n";
+ }
+
+ if (period.get(DurationFieldType.months()) > 0) {
+ int months = period.get(DurationFieldType.months());
+ periodStr = months + "M";
+ } else if (period.get(DurationFieldType.weeks()) > 0) {
+ int weeks = period.get(DurationFieldType.weeks());
+ periodStr = weeks + "w";
+ } else if (period.get(DurationFieldType.days()) > 0) {
+ int days = period.get(DurationFieldType.days());
+ periodStr = days + "d";
+ } else if (period.get(DurationFieldType.hours()) > 0) {
+ int hours = period.get(DurationFieldType.hours());
+ periodStr = hours + "h";
+ } else if (period.get(DurationFieldType.minutes()) > 0) {
+ int minutes = period.get(DurationFieldType.minutes());
+ periodStr = minutes + "m";
+ } else if (period.get(DurationFieldType.seconds()) > 0) {
+ int seconds = period.get(DurationFieldType.seconds());
+ periodStr = seconds + "s";
+ }
+
+ return periodStr;
+ }
+
+ public Map<String, Object> optionsToObject() {
+ if (executionOptions != null) {
+ HashMap<String, Object> schedObj = new HashMap<String, Object>();
+
+ if (executionOptions != null) {
+ schedObj.put("executionOptions", executionOptions.toObject());
+ }
+
+ if (slaOptions != null) {
+ List<Object> slaOptionsObject = new ArrayList<Object>();
+ // schedObj.put("slaOptions", slaOptions.toObject());
+ for (SlaOption sla : slaOptions) {
+ slaOptionsObject.add(sla.toObject());
+ }
+ schedObj.put("slaOptions", slaOptionsObject);
+ }
+
+ return schedObj;
+ }
+ return null;
+ }
+
+ @SuppressWarnings("unchecked")
+ public void createAndSetScheduleOptions(Object obj) {
+ HashMap<String, Object> schedObj = (HashMap<String, Object>) obj;
+ if (schedObj.containsKey("executionOptions")) {
+ ExecutionOptions execOptions =
+ ExecutionOptions.createFromObject(schedObj.get("executionOptions"));
+ this.executionOptions = execOptions;
+ } else if (schedObj.containsKey("flowOptions")) {
+ ExecutionOptions execOptions =
+ ExecutionOptions.createFromObject(schedObj.get("flowOptions"));
+ this.executionOptions = execOptions;
+ execOptions.setConcurrentOption(ExecutionOptions.CONCURRENT_OPTION_SKIP);
+ } else {
+ this.executionOptions = new ExecutionOptions();
+ this.executionOptions
+ .setConcurrentOption(ExecutionOptions.CONCURRENT_OPTION_SKIP);
+ }
+
+ if (schedObj.containsKey("slaOptions")) {
+ List<Object> slaOptionsObject = (List<Object>) schedObj.get("slaOptions");
+ List<SlaOption> slaOptions = new ArrayList<SlaOption>();
+ for (Object slaObj : slaOptionsObject) {
+ slaOptions.add(SlaOption.fromObject(slaObj));
+ }
+ this.slaOptions = slaOptions;
+ }
+
+ }
+
+ public boolean isRecurring() {
+ return period == null ? false : true;
+ }
+
+ public boolean skipPastOccurrences() {
+ return skipPastOccurrences;
+ }
+
}
diff --git a/src/main/java/azkaban/scheduler/ScheduleLoader.java b/src/main/java/azkaban/scheduler/ScheduleLoader.java
index e834cea..65ae121 100644
--- a/src/main/java/azkaban/scheduler/ScheduleLoader.java
+++ b/src/main/java/azkaban/scheduler/ScheduleLoader.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -19,16 +19,16 @@ package azkaban.scheduler;
import java.util.List;
public interface ScheduleLoader {
-
- public void insertSchedule(Schedule s) throws ScheduleManagerException;
-
- public void updateSchedule(Schedule s) throws ScheduleManagerException;
-
- public List<Schedule> loadSchedules() throws ScheduleManagerException;
-
- public void removeSchedule(Schedule s) throws ScheduleManagerException;
-
- public void updateNextExecTime(Schedule s) throws ScheduleManagerException;
-
- public List<Schedule> loadUpdatedSchedules() throws ScheduleManagerException;
+
+ public void insertSchedule(Schedule s) throws ScheduleManagerException;
+
+ public void updateSchedule(Schedule s) throws ScheduleManagerException;
+
+ public List<Schedule> loadSchedules() throws ScheduleManagerException;
+
+ public void removeSchedule(Schedule s) throws ScheduleManagerException;
+
+ public void updateNextExecTime(Schedule s) throws ScheduleManagerException;
+
+ public List<Schedule> loadUpdatedSchedules() throws ScheduleManagerException;
}
src/main/java/azkaban/scheduler/ScheduleManager.java 458(+226 -232)
diff --git a/src/main/java/azkaban/scheduler/ScheduleManager.java b/src/main/java/azkaban/scheduler/ScheduleManager.java
index a243389..5b98d3c 100644
--- a/src/main/java/azkaban/scheduler/ScheduleManager.java
+++ b/src/main/java/azkaban/scheduler/ScheduleManager.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -41,233 +41,227 @@ import azkaban.utils.Props;
* flow to and overlap each other.
*/
public class ScheduleManager implements TriggerAgent {
- private static Logger logger = Logger.getLogger(ScheduleManager.class);
-
- public static final String triggerSource = "SimpleTimeTrigger";
- private final DateTimeFormatter _dateFormat = DateTimeFormat.forPattern("MM-dd-yyyy HH:mm:ss:SSS");
- private ScheduleLoader loader;
-
- private Map<Integer, Schedule> scheduleIDMap = new LinkedHashMap<Integer, Schedule>();
- private Map<Pair<Integer, String>, Schedule> scheduleIdentityPairMap = new LinkedHashMap<Pair<Integer, String>, Schedule>();
-
- /**
- * Give the schedule manager a loader class that will properly load the
- * schedule.
- *
- * @param loader
- */
- public ScheduleManager (ScheduleLoader loader)
- {
- this.loader = loader;
- }
-
- @Override
- public void start() throws ScheduleManagerException {
- List<Schedule> scheduleList = null;
- try {
- scheduleList = loader.loadSchedules();
- } catch (ScheduleManagerException e) {
- // TODO Auto-generated catch block
- logger.error("Failed to load schedules" + e.getCause() + e.getMessage());
- e.printStackTrace();
- }
-
- for (Schedule sched : scheduleList) {
- if(sched.getStatus().equals(TriggerStatus.EXPIRED.toString())) {
- onScheduleExpire(sched);
- } else {
- internalSchedule(sched);
- }
- }
-
- }
-
- // only do this when using external runner
- public synchronized void updateLocal() throws ScheduleManagerException {
- List<Schedule> updates = loader.loadUpdatedSchedules();
- for(Schedule s : updates) {
- if(s.getStatus().equals(TriggerStatus.EXPIRED.toString())) {
- onScheduleExpire(s);
- } else {
- internalSchedule(s);
- }
- }
- }
-
- private void onScheduleExpire(Schedule s) {
- removeSchedule(s);
- }
-
- /**
- * Shutdowns the scheduler thread. After shutdown, it may not be safe to use
- * it again.
- */
- public void shutdown() {
-
- }
-
- /**
- * Retrieves a copy of the list of schedules.
- *
- * @return
- * @throws ScheduleManagerException
- */
- public synchronized List<Schedule> getSchedules() throws ScheduleManagerException {
-
- updateLocal();
- return new ArrayList<Schedule>(scheduleIDMap.values());
- }
-
- /**
- * Returns the scheduled flow for the flow name
- *
- * @param id
- * @return
- * @throws ScheduleManagerException
- */
-
- public Schedule getSchedule(int projectId, String flowId) throws ScheduleManagerException {
- updateLocal();
- return scheduleIdentityPairMap.get(new Pair<Integer,String>(projectId, flowId));
- }
-
- /**
- * Returns the scheduled flow for the scheduleId
- *
- * @param id
- * @return
- * @throws ScheduleManagerException
- */
- public Schedule getSchedule(int scheduleId) throws ScheduleManagerException {
- updateLocal();
- return scheduleIDMap.get(scheduleId);
- }
-
- /**
- * Removes the flow from the schedule if it exists.
- *
- * @param id
- * @throws ScheduleManagerException
- */
-
- public synchronized void removeSchedule(int projectId, String flowId) throws ScheduleManagerException {
- Schedule sched = getSchedule(projectId, flowId);
- if(sched != null) {
- removeSchedule(sched);
- }
- }
-
- /**
- * Removes the flow from the schedule if it exists.
- *
- * @param id
- */
- public synchronized void removeSchedule(Schedule sched) {
- Pair<Integer,String> identityPairMap = sched.getScheduleIdentityPair();
-
- Schedule schedule = scheduleIdentityPairMap.get(identityPairMap);
- if(schedule != null) {
- scheduleIdentityPairMap.remove(identityPairMap);
- }
-
- scheduleIDMap.remove(sched.getScheduleId());
-
- try {
- loader.removeSchedule(sched);
- } catch (ScheduleManagerException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
- }
-
- public Schedule scheduleFlow(
- final int scheduleId,
- final int projectId,
- final String projectName,
- final String flowName,
- final String status,
- final long firstSchedTime,
- final DateTimeZone timezone,
- final ReadablePeriod period,
- final long lastModifyTime,
- final long nextExecTime,
- final long submitTime,
- final String submitUser
- ) {
- return scheduleFlow(scheduleId, projectId, projectName, flowName, status, firstSchedTime, timezone, period, lastModifyTime, nextExecTime, submitTime, submitUser, null, null);
- }
-
- public Schedule scheduleFlow(
- final int scheduleId,
- final int projectId,
- final String projectName,
- final String flowName,
- final String status,
- final long firstSchedTime,
- final DateTimeZone timezone,
- final ReadablePeriod period,
- final long lastModifyTime,
- final long nextExecTime,
- final long submitTime,
- final String submitUser,
- ExecutionOptions execOptions,
- List<SlaOption> slaOptions
- ) {
- Schedule sched = new Schedule(scheduleId, projectId, projectName, flowName, status, firstSchedTime, timezone, period, lastModifyTime, nextExecTime, submitTime, submitUser, execOptions, slaOptions);
- logger.info("Scheduling flow '" + sched.getScheduleName() + "' for "
- + _dateFormat.print(firstSchedTime) + " with a period of "
- + period == null ? "(non-recurring)" : period);
-
- insertSchedule(sched);
- return sched;
- }
-
- /**
- * Schedules the flow, but doesn't save the schedule afterwards.
- *
- * @param flow
- */
- private synchronized void internalSchedule(Schedule s) {
- scheduleIDMap.put(s.getScheduleId(), s);
- scheduleIdentityPairMap.put(s.getScheduleIdentityPair(), s);
- }
-
- /**
- * Adds a flow to the schedule.
- *
- * @param flow
- */
- public synchronized void insertSchedule(Schedule s) {
- //boolean exist = s.getScheduleId() != -1;
- Schedule exist = scheduleIdentityPairMap.get(s.getScheduleIdentityPair());
- if(s.updateTime()) {
- try {
- if(exist == null) {
- loader.insertSchedule(s);
- internalSchedule(s);
- }
- else{
- s.setScheduleId(exist.getScheduleId());
- loader.updateSchedule(s);
- internalSchedule(s);
- }
- } catch (ScheduleManagerException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
- } else {
- logger.error("The provided schedule is non-recurring and the scheduled time already passed. "
- + s.getScheduleName());
- }
- }
-
- @Override
- public void loadTriggerFromProps(Props props) throws ScheduleManagerException {
- throw new ScheduleManagerException("create " + getTriggerSource() + " from json not supported yet" );
- }
-
- @Override
- public String getTriggerSource() {
- return triggerSource;
- }
+ private static Logger logger = Logger.getLogger(ScheduleManager.class);
+
+ public static final String triggerSource = "SimpleTimeTrigger";
+ private final DateTimeFormatter _dateFormat = DateTimeFormat
+ .forPattern("MM-dd-yyyy HH:mm:ss:SSS");
+ private ScheduleLoader loader;
+
+ private Map<Integer, Schedule> scheduleIDMap =
+ new LinkedHashMap<Integer, Schedule>();
+ private Map<Pair<Integer, String>, Schedule> scheduleIdentityPairMap =
+ new LinkedHashMap<Pair<Integer, String>, Schedule>();
+
+ /**
+ * Give the schedule manager a loader class that will properly load the
+ * schedule.
+ *
+ * @param loader
+ */
+ public ScheduleManager(ScheduleLoader loader) {
+ this.loader = loader;
+ }
+
+ @Override
+ public void start() throws ScheduleManagerException {
+ List<Schedule> scheduleList = null;
+ try {
+ scheduleList = loader.loadSchedules();
+ } catch (ScheduleManagerException e) {
+ // TODO Auto-generated catch block
+ logger.error("Failed to load schedules" + e.getCause() + e.getMessage());
+ e.printStackTrace();
+ }
+
+ for (Schedule sched : scheduleList) {
+ if (sched.getStatus().equals(TriggerStatus.EXPIRED.toString())) {
+ onScheduleExpire(sched);
+ } else {
+ internalSchedule(sched);
+ }
+ }
+
+ }
+
+ // only do this when using external runner
+ public synchronized void updateLocal() throws ScheduleManagerException {
+ List<Schedule> updates = loader.loadUpdatedSchedules();
+ for (Schedule s : updates) {
+ if (s.getStatus().equals(TriggerStatus.EXPIRED.toString())) {
+ onScheduleExpire(s);
+ } else {
+ internalSchedule(s);
+ }
+ }
+ }
+
+ private void onScheduleExpire(Schedule s) {
+ removeSchedule(s);
+ }
+
+ /**
+ * Shutdowns the scheduler thread. After shutdown, it may not be safe to use
+ * it again.
+ */
+ public void shutdown() {
+
+ }
+
+ /**
+ * Retrieves a copy of the list of schedules.
+ *
+ * @return
+ * @throws ScheduleManagerException
+ */
+ public synchronized List<Schedule> getSchedules()
+ throws ScheduleManagerException {
+
+ updateLocal();
+ return new ArrayList<Schedule>(scheduleIDMap.values());
+ }
+
+ /**
+ * Returns the scheduled flow for the flow name
+ *
+ * @param id
+ * @return
+ * @throws ScheduleManagerException
+ */
+
+ public Schedule getSchedule(int projectId, String flowId)
+ throws ScheduleManagerException {
+ updateLocal();
+ return scheduleIdentityPairMap.get(new Pair<Integer, String>(projectId,
+ flowId));
+ }
+
+ /**
+ * Returns the scheduled flow for the scheduleId
+ *
+ * @param id
+ * @return
+ * @throws ScheduleManagerException
+ */
+ public Schedule getSchedule(int scheduleId) throws ScheduleManagerException {
+ updateLocal();
+ return scheduleIDMap.get(scheduleId);
+ }
+
+ /**
+ * Removes the flow from the schedule if it exists.
+ *
+ * @param id
+ * @throws ScheduleManagerException
+ */
+
+ public synchronized void removeSchedule(int projectId, String flowId)
+ throws ScheduleManagerException {
+ Schedule sched = getSchedule(projectId, flowId);
+ if (sched != null) {
+ removeSchedule(sched);
+ }
+ }
+
+ /**
+ * Removes the flow from the schedule if it exists.
+ *
+ * @param id
+ */
+ public synchronized void removeSchedule(Schedule sched) {
+ Pair<Integer, String> identityPairMap = sched.getScheduleIdentityPair();
+
+ Schedule schedule = scheduleIdentityPairMap.get(identityPairMap);
+ if (schedule != null) {
+ scheduleIdentityPairMap.remove(identityPairMap);
+ }
+
+ scheduleIDMap.remove(sched.getScheduleId());
+
+ try {
+ loader.removeSchedule(sched);
+ } catch (ScheduleManagerException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ }
+
+ public Schedule scheduleFlow(final int scheduleId, final int projectId,
+ final String projectName, final String flowName, final String status,
+ final long firstSchedTime, final DateTimeZone timezone,
+ final ReadablePeriod period, final long lastModifyTime,
+ final long nextExecTime, final long submitTime, final String submitUser) {
+ return scheduleFlow(scheduleId, projectId, projectName, flowName, status,
+ firstSchedTime, timezone, period, lastModifyTime, nextExecTime,
+ submitTime, submitUser, null, null);
+ }
+
+ public Schedule scheduleFlow(final int scheduleId, final int projectId,
+ final String projectName, final String flowName, final String status,
+ final long firstSchedTime, final DateTimeZone timezone,
+ final ReadablePeriod period, final long lastModifyTime,
+ final long nextExecTime, final long submitTime, final String submitUser,
+ ExecutionOptions execOptions, List<SlaOption> slaOptions) {
+ Schedule sched =
+ new Schedule(scheduleId, projectId, projectName, flowName, status,
+ firstSchedTime, timezone, period, lastModifyTime, nextExecTime,
+ submitTime, submitUser, execOptions, slaOptions);
+ logger
+ .info("Scheduling flow '" + sched.getScheduleName() + "' for "
+ + _dateFormat.print(firstSchedTime) + " with a period of " + period == null ? "(non-recurring)"
+ : period);
+
+ insertSchedule(sched);
+ return sched;
+ }
+
+ /**
+ * Schedules the flow, but doesn't save the schedule afterwards.
+ *
+ * @param flow
+ */
+ private synchronized void internalSchedule(Schedule s) {
+ scheduleIDMap.put(s.getScheduleId(), s);
+ scheduleIdentityPairMap.put(s.getScheduleIdentityPair(), s);
+ }
+
+ /**
+ * Adds a flow to the schedule.
+ *
+ * @param flow
+ */
+ public synchronized void insertSchedule(Schedule s) {
+ // boolean exist = s.getScheduleId() != -1;
+ Schedule exist = scheduleIdentityPairMap.get(s.getScheduleIdentityPair());
+ if (s.updateTime()) {
+ try {
+ if (exist == null) {
+ loader.insertSchedule(s);
+ internalSchedule(s);
+ } else {
+ s.setScheduleId(exist.getScheduleId());
+ loader.updateSchedule(s);
+ internalSchedule(s);
+ }
+ } catch (ScheduleManagerException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ } else {
+ logger
+ .error("The provided schedule is non-recurring and the scheduled time already passed. "
+ + s.getScheduleName());
+ }
+ }
+
+ @Override
+ public void loadTriggerFromProps(Props props) throws ScheduleManagerException {
+ throw new ScheduleManagerException("create " + getTriggerSource()
+ + " from json not supported yet");
+ }
+
+ @Override
+ public String getTriggerSource() {
+ return triggerSource;
+ }
}
diff --git a/src/main/java/azkaban/scheduler/ScheduleManagerException.java b/src/main/java/azkaban/scheduler/ScheduleManagerException.java
index 3ffb1b6..8efe52e 100644
--- a/src/main/java/azkaban/scheduler/ScheduleManagerException.java
+++ b/src/main/java/azkaban/scheduler/ScheduleManagerException.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -16,18 +16,18 @@
package azkaban.scheduler;
-public class ScheduleManagerException extends Exception{
- private static final long serialVersionUID = 1L;
+public class ScheduleManagerException extends Exception {
+ private static final long serialVersionUID = 1L;
- public ScheduleManagerException(String message) {
- super(message);
- }
-
- public ScheduleManagerException(String message, Throwable cause) {
- super(message, cause);
- }
+ public ScheduleManagerException(String message) {
+ super(message);
+ }
- public ScheduleManagerException(Exception e) {
- super(e);
- }
+ public ScheduleManagerException(String message, Throwable cause) {
+ super(message, cause);
+ }
+
+ public ScheduleManagerException(Exception e) {
+ super(e);
+ }
}
diff --git a/src/main/java/azkaban/scheduler/ScheduleStatisticManager.java b/src/main/java/azkaban/scheduler/ScheduleStatisticManager.java
index 71c6aaf..b9e5966 100644
--- a/src/main/java/azkaban/scheduler/ScheduleStatisticManager.java
+++ b/src/main/java/azkaban/scheduler/ScheduleStatisticManager.java
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -31,149 +31,154 @@ import azkaban.utils.JSONUtils;
import azkaban.webapp.AzkabanWebServer;
public class ScheduleStatisticManager {
- private static HashMap<Integer, Object> cacheLock = new HashMap<Integer, Object>();
- private static File cacheDirectory;
- private static final int STAT_NUMBERS = 10;
-
- public static Map<String, Object> getStatistics(int scheduleId, AzkabanWebServer server) throws ScheduleManagerException {
- if (cacheDirectory == null) {
- setCacheFolder(new File(server.getServerProps().getString("cache.directory", "cache")));
- }
- Map<String, Object> data = loadCache(scheduleId);
- if (data != null) {
- return data;
- }
-
- // Calculate data and cache it
- data = calculateStats(scheduleId, server);
-
- saveCache(scheduleId, data);
-
- return data;
- }
-
- private static Map<String, Object> calculateStats(int scheduleId, AzkabanWebServer server) throws ScheduleManagerException {
- Map<String, Object> data = new HashMap<String, Object>();
- ExecutorManagerAdapter executorManager = server.getExecutorManager();
- ScheduleManager scheduleManager = server.getScheduleManager();
- Schedule schedule = scheduleManager.getSchedule(scheduleId);
-
- try {
- List<ExecutableFlow> executables = executorManager.getExecutableFlows(schedule.getProjectId(), schedule.getFlowName(), 0, STAT_NUMBERS, Status.SUCCEEDED);
-
- long average = 0;
- long min = Integer.MAX_VALUE;
- long max = 0;
- if (executables.isEmpty()) {
- average = 0;
- min = 0;
- max = 0;
- }
- else {
- for (ExecutableFlow flow : executables) {
- long time = flow.getEndTime() - flow.getStartTime();
- average += time;
- if (time < min) {
- min = time;
- }
- if (time > max) {
- max = time;
- }
- }
- average /= executables.size();
- }
-
- data.put("average", average);
- data.put("min", min);
- data.put("max", max);
- } catch (ExecutorManagerException e) {
- e.printStackTrace();
- }
-
- return data;
- }
-
- public static void invalidateCache(int scheduleId, File cacheDir) {
- setCacheFolder(cacheDir);
- // This should be silent and not fail
- try {
- Object lock = getLock(scheduleId);
- synchronized (lock) {
- getCacheFile(scheduleId).delete();
- }
- unLock(scheduleId);
- } catch (Exception e) {
- }
- }
-
- private static void saveCache(int scheduleId, Map<String, Object> data) {
- Object lock = getLock(scheduleId);
- try {
- synchronized (lock) {
- File cache = getCacheFile(scheduleId);
- cache.createNewFile();
- OutputStream output = new FileOutputStream(cache);
- try {
- JSONUtils.toJSON(data, output, false);
- } finally {
- output.close();
- }
- }
- } catch (Exception e) {
- e.printStackTrace();
- }
- unLock(scheduleId);
- }
-
- private static Map<String, Object> loadCache(int scheduleId) {
- Object lock = getLock(scheduleId);
- try {
- synchronized (lock) {
- File cache = getCacheFile(scheduleId);
- if (cache.exists() && cache.isFile()) {
- Object dataObj = JSONUtils.parseJSONFromFile(cache);
- if (dataObj instanceof Map<?, ?>) {
- @SuppressWarnings("unchecked")
- Map<String, Object> data = (Map<String, Object>) dataObj;
- return data;
- }
- }
- }
- } catch (Exception e) {
- e.printStackTrace();
- }
- unLock(scheduleId);
- return null;
- }
-
- private static File getCacheFile(int scheduleId) {
- cacheDirectory.mkdirs();
- File file = new File(cacheDirectory, scheduleId + ".cache");
- return file;
- }
-
- private static Object getLock(int scheduleId) {
- Object lock = null;
- synchronized (cacheLock) {
- lock = cacheLock.get(scheduleId);
- if (lock == null) {
- lock = new Object();
- cacheLock.put(scheduleId, lock);
- }
- }
-
- return lock;
- }
-
- private static void unLock(int scheduleId) {
- synchronized (cacheLock) {
- cacheLock.remove(scheduleId);
- }
- }
-
- private static void setCacheFolder(File cacheDir) {
- if (cacheDirectory == null) {
- cacheDirectory = new File(cacheDir, "schedule-statistics");
- }
- }
+ private static HashMap<Integer, Object> cacheLock =
+ new HashMap<Integer, Object>();
+ private static File cacheDirectory;
+ private static final int STAT_NUMBERS = 10;
+
+ public static Map<String, Object> getStatistics(int scheduleId,
+ AzkabanWebServer server) throws ScheduleManagerException {
+ if (cacheDirectory == null) {
+ setCacheFolder(new File(server.getServerProps().getString(
+ "cache.directory", "cache")));
+ }
+ Map<String, Object> data = loadCache(scheduleId);
+ if (data != null) {
+ return data;
+ }
+
+ // Calculate data and cache it
+ data = calculateStats(scheduleId, server);
+
+ saveCache(scheduleId, data);
+
+ return data;
+ }
+
+ private static Map<String, Object> calculateStats(int scheduleId,
+ AzkabanWebServer server) throws ScheduleManagerException {
+ Map<String, Object> data = new HashMap<String, Object>();
+ ExecutorManagerAdapter executorManager = server.getExecutorManager();
+ ScheduleManager scheduleManager = server.getScheduleManager();
+ Schedule schedule = scheduleManager.getSchedule(scheduleId);
+
+ try {
+ List<ExecutableFlow> executables =
+ executorManager.getExecutableFlows(schedule.getProjectId(),
+ schedule.getFlowName(), 0, STAT_NUMBERS, Status.SUCCEEDED);
+
+ long average = 0;
+ long min = Integer.MAX_VALUE;
+ long max = 0;
+ if (executables.isEmpty()) {
+ average = 0;
+ min = 0;
+ max = 0;
+ } else {
+ for (ExecutableFlow flow : executables) {
+ long time = flow.getEndTime() - flow.getStartTime();
+ average += time;
+ if (time < min) {
+ min = time;
+ }
+ if (time > max) {
+ max = time;
+ }
+ }
+ average /= executables.size();
+ }
+
+ data.put("average", average);
+ data.put("min", min);
+ data.put("max", max);
+ } catch (ExecutorManagerException e) {
+ e.printStackTrace();
+ }
+
+ return data;
+ }
+
+ public static void invalidateCache(int scheduleId, File cacheDir) {
+ setCacheFolder(cacheDir);
+ // This should be silent and not fail
+ try {
+ Object lock = getLock(scheduleId);
+ synchronized (lock) {
+ getCacheFile(scheduleId).delete();
+ }
+ unLock(scheduleId);
+ } catch (Exception e) {
+ }
+ }
+
+ private static void saveCache(int scheduleId, Map<String, Object> data) {
+ Object lock = getLock(scheduleId);
+ try {
+ synchronized (lock) {
+ File cache = getCacheFile(scheduleId);
+ cache.createNewFile();
+ OutputStream output = new FileOutputStream(cache);
+ try {
+ JSONUtils.toJSON(data, output, false);
+ } finally {
+ output.close();
+ }
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ unLock(scheduleId);
+ }
+
+ private static Map<String, Object> loadCache(int scheduleId) {
+ Object lock = getLock(scheduleId);
+ try {
+ synchronized (lock) {
+ File cache = getCacheFile(scheduleId);
+ if (cache.exists() && cache.isFile()) {
+ Object dataObj = JSONUtils.parseJSONFromFile(cache);
+ if (dataObj instanceof Map<?, ?>) {
+ @SuppressWarnings("unchecked")
+ Map<String, Object> data = (Map<String, Object>) dataObj;
+ return data;
+ }
+ }
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ unLock(scheduleId);
+ return null;
+ }
+
+ private static File getCacheFile(int scheduleId) {
+ cacheDirectory.mkdirs();
+ File file = new File(cacheDirectory, scheduleId + ".cache");
+ return file;
+ }
+
+ private static Object getLock(int scheduleId) {
+ Object lock = null;
+ synchronized (cacheLock) {
+ lock = cacheLock.get(scheduleId);
+ if (lock == null) {
+ lock = new Object();
+ cacheLock.put(scheduleId, lock);
+ }
+ }
+
+ return lock;
+ }
+
+ private static void unLock(int scheduleId) {
+ synchronized (cacheLock) {
+ cacheLock.remove(scheduleId);
+ }
+ }
+
+ private static void setCacheFolder(File cacheDir) {
+ if (cacheDirectory == null) {
+ cacheDirectory = new File(cacheDir, "schedule-statistics");
+ }
+ }
}
diff --git a/src/main/java/azkaban/scheduler/TriggerBasedScheduleLoader.java b/src/main/java/azkaban/scheduler/TriggerBasedScheduleLoader.java
index 7648a05..796b7af 100644
--- a/src/main/java/azkaban/scheduler/TriggerBasedScheduleLoader.java
+++ b/src/main/java/azkaban/scheduler/TriggerBasedScheduleLoader.java
@@ -18,175 +18,188 @@ import azkaban.trigger.builtin.BasicTimeChecker;
import azkaban.trigger.builtin.ExecuteFlowAction;
public class TriggerBasedScheduleLoader implements ScheduleLoader {
-
- private static Logger logger = Logger.getLogger(TriggerBasedScheduleLoader.class);
-
- private TriggerManagerAdapter triggerManager;
-
- private String triggerSource;
-
- private long lastUpdateTime = -1;
-
- public TriggerBasedScheduleLoader(TriggerManager triggerManager, String triggerSource) {
- this.triggerManager = triggerManager;
- this.triggerSource = triggerSource;
- }
-
- private Trigger scheduleToTrigger(Schedule s) {
- Condition triggerCondition = createTriggerCondition(s);
- Condition expireCondition = createExpireCondition(s);
- List<TriggerAction> actions = createActions(s);
- Trigger t = new Trigger(s.getScheduleId(), s.getLastModifyTime(), s.getSubmitTime(), s.getSubmitUser(), triggerSource, triggerCondition, expireCondition, actions);
- if(s.isRecurring()) {
- t.setResetOnTrigger(true);
- } else {
- t.setResetOnTrigger(false);
- }
- return t;
- }
-
- private List<TriggerAction> createActions (Schedule s) {
- List<TriggerAction> actions = new ArrayList<TriggerAction>();
- ExecuteFlowAction executeAct = new ExecuteFlowAction("executeFlowAction", s.getProjectId(), s.getProjectName(), s.getFlowName(), s.getSubmitUser(), s.getExecutionOptions(), s.getSlaOptions());
- actions.add(executeAct);
-
- return actions;
- }
-
- private Condition createTriggerCondition (Schedule s) {
- Map<String, ConditionChecker> checkers = new HashMap<String, ConditionChecker>();
- ConditionChecker checker = new BasicTimeChecker("BasicTimeChecker_1", s.getFirstSchedTime(), s.getTimezone(), s.isRecurring(), s.skipPastOccurrences(), s.getPeriod());
- checkers.put(checker.getId(), checker);
- String expr = checker.getId() + ".eval()";
- Condition cond = new Condition(checkers, expr);
- return cond;
- }
-
- // if failed to trigger, auto expire?
- private Condition createExpireCondition (Schedule s) {
- Map<String, ConditionChecker> checkers = new HashMap<String, ConditionChecker>();
- ConditionChecker checker = new BasicTimeChecker("BasicTimeChecker_2", s.getFirstSchedTime(), s.getTimezone(), s.isRecurring(), s.skipPastOccurrences(), s.getPeriod());
- checkers.put(checker.getId(), checker);
- String expr = checker.getId() + ".eval()";
- Condition cond = new Condition(checkers, expr);
- return cond;
- }
-
- @Override
- public void insertSchedule(Schedule s) throws ScheduleManagerException {
- Trigger t = scheduleToTrigger(s);
- try {
- triggerManager.insertTrigger(t, t.getSubmitUser());
- s.setScheduleId(t.getTriggerId());
- } catch (TriggerManagerException e) {
- throw new ScheduleManagerException("Failed to insert new schedule!", e);
- }
- }
-
- @Override
- public void updateSchedule(Schedule s) throws ScheduleManagerException {
- Trigger t = scheduleToTrigger(s);
- try {
- triggerManager.updateTrigger(t, t.getSubmitUser());
- } catch (TriggerManagerException e) {
- throw new ScheduleManagerException("Failed to update schedule!", e);
- }
- }
-
- //TODO
- // may need to add logic to filter out skip runs
- @Override
- public synchronized List<Schedule> loadSchedules() throws ScheduleManagerException {
- List<Trigger> triggers = triggerManager.getTriggers(triggerSource);
- List<Schedule> schedules = new ArrayList<Schedule>();
-// triggersLocalCopy = new HashMap<Integer, Trigger>();
- for(Trigger t : triggers) {
- lastUpdateTime = Math.max(lastUpdateTime, t.getLastModifyTime());
- Schedule s = triggerToSchedule(t);
- schedules.add(s);
- System.out.println("loaded schedule for " + s.getProjectId() + s.getProjectName());
- }
- return schedules;
-
- }
-
- private Schedule triggerToSchedule(Trigger t) throws ScheduleManagerException {
- Condition triggerCond = t.getTriggerCondition();
- Map<String, ConditionChecker> checkers = triggerCond.getCheckers();
- BasicTimeChecker ck = null;
- for(ConditionChecker checker : checkers.values()) {
- if(checker.getType().equals(BasicTimeChecker.type)) {
- ck = (BasicTimeChecker) checker;
- break;
- }
- }
- List<TriggerAction> actions = t.getActions();
- ExecuteFlowAction act = null;
- for(TriggerAction action : actions) {
- if(action.getType().equals(ExecuteFlowAction.type)) {
- act = (ExecuteFlowAction) action;
- break;
- }
- }
- if(ck != null && act != null) {
- Schedule s = new Schedule(
- t.getTriggerId(),
- act.getProjectId(),
- act.getProjectName(),
- act.getFlowName(),
- t.getStatus().toString(),
- ck.getFirstCheckTime(),
- ck.getTimeZone(),
- ck.getPeriod(),
- t.getLastModifyTime(),
- ck.getNextCheckTime(),
- t.getSubmitTime(),
- t.getSubmitUser(),
- act.getExecutionOptions(),
- act.getSlaOptions());
- return s;
- } else {
- logger.error("Failed to parse schedule from trigger!");
- throw new ScheduleManagerException("Failed to parse schedule from trigger!");
- }
- }
-
- @Override
- public void removeSchedule(Schedule s) throws ScheduleManagerException {
- try {
- triggerManager.removeTrigger(s.getScheduleId(), s.getSubmitUser());
-// triggersLocalCopy.remove(s.getScheduleId());
- } catch (TriggerManagerException e) {
- // TODO Auto-generated catch block
- throw new ScheduleManagerException(e.getMessage());
- }
-
- }
-
- @Override
- public void updateNextExecTime(Schedule s)
- throws ScheduleManagerException {
-
- }
-
- @Override
- public synchronized List<Schedule> loadUpdatedSchedules() throws ScheduleManagerException {
- List<Trigger> triggers;
- try {
- triggers = triggerManager.getTriggerUpdates(triggerSource, lastUpdateTime);
- } catch (TriggerManagerException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- throw new ScheduleManagerException(e);
- }
- List<Schedule> schedules = new ArrayList<Schedule>();
- for(Trigger t : triggers) {
- lastUpdateTime = Math.max(lastUpdateTime, t.getLastModifyTime());
- Schedule s = triggerToSchedule(t);
- schedules.add(s);
- System.out.println("loaded schedule for " + s.getProjectId() + s.getProjectName());
- }
- return schedules;
- }
+
+ private static Logger logger = Logger
+ .getLogger(TriggerBasedScheduleLoader.class);
+
+ private TriggerManagerAdapter triggerManager;
+
+ private String triggerSource;
+
+ private long lastUpdateTime = -1;
+
+ public TriggerBasedScheduleLoader(TriggerManager triggerManager,
+ String triggerSource) {
+ this.triggerManager = triggerManager;
+ this.triggerSource = triggerSource;
+ }
+
+ private Trigger scheduleToTrigger(Schedule s) {
+ Condition triggerCondition = createTriggerCondition(s);
+ Condition expireCondition = createExpireCondition(s);
+ List<TriggerAction> actions = createActions(s);
+ Trigger t =
+ new Trigger(s.getScheduleId(), s.getLastModifyTime(),
+ s.getSubmitTime(), s.getSubmitUser(), triggerSource,
+ triggerCondition, expireCondition, actions);
+ if (s.isRecurring()) {
+ t.setResetOnTrigger(true);
+ } else {
+ t.setResetOnTrigger(false);
+ }
+ return t;
+ }
+
+ private List<TriggerAction> createActions(Schedule s) {
+ List<TriggerAction> actions = new ArrayList<TriggerAction>();
+ ExecuteFlowAction executeAct =
+ new ExecuteFlowAction("executeFlowAction", s.getProjectId(),
+ s.getProjectName(), s.getFlowName(), s.getSubmitUser(),
+ s.getExecutionOptions(), s.getSlaOptions());
+ actions.add(executeAct);
+
+ return actions;
+ }
+
+ private Condition createTriggerCondition(Schedule s) {
+ Map<String, ConditionChecker> checkers =
+ new HashMap<String, ConditionChecker>();
+ ConditionChecker checker =
+ new BasicTimeChecker("BasicTimeChecker_1", s.getFirstSchedTime(),
+ s.getTimezone(), s.isRecurring(), s.skipPastOccurrences(),
+ s.getPeriod());
+ checkers.put(checker.getId(), checker);
+ String expr = checker.getId() + ".eval()";
+ Condition cond = new Condition(checkers, expr);
+ return cond;
+ }
+
+ // if failed to trigger, auto expire?
+ private Condition createExpireCondition(Schedule s) {
+ Map<String, ConditionChecker> checkers =
+ new HashMap<String, ConditionChecker>();
+ ConditionChecker checker =
+ new BasicTimeChecker("BasicTimeChecker_2", s.getFirstSchedTime(),
+ s.getTimezone(), s.isRecurring(), s.skipPastOccurrences(),
+ s.getPeriod());
+ checkers.put(checker.getId(), checker);
+ String expr = checker.getId() + ".eval()";
+ Condition cond = new Condition(checkers, expr);
+ return cond;
+ }
+
+ @Override
+ public void insertSchedule(Schedule s) throws ScheduleManagerException {
+ Trigger t = scheduleToTrigger(s);
+ try {
+ triggerManager.insertTrigger(t, t.getSubmitUser());
+ s.setScheduleId(t.getTriggerId());
+ } catch (TriggerManagerException e) {
+ throw new ScheduleManagerException("Failed to insert new schedule!", e);
+ }
+ }
+
+ @Override
+ public void updateSchedule(Schedule s) throws ScheduleManagerException {
+ Trigger t = scheduleToTrigger(s);
+ try {
+ triggerManager.updateTrigger(t, t.getSubmitUser());
+ } catch (TriggerManagerException e) {
+ throw new ScheduleManagerException("Failed to update schedule!", e);
+ }
+ }
+
+ // TODO
+ // may need to add logic to filter out skip runs
+ @Override
+ public synchronized List<Schedule> loadSchedules()
+ throws ScheduleManagerException {
+ List<Trigger> triggers = triggerManager.getTriggers(triggerSource);
+ List<Schedule> schedules = new ArrayList<Schedule>();
+ // triggersLocalCopy = new HashMap<Integer, Trigger>();
+ for (Trigger t : triggers) {
+ lastUpdateTime = Math.max(lastUpdateTime, t.getLastModifyTime());
+ Schedule s = triggerToSchedule(t);
+ schedules.add(s);
+ System.out.println("loaded schedule for " + s.getProjectId()
+ + s.getProjectName());
+ }
+ return schedules;
+
+ }
+
+ private Schedule triggerToSchedule(Trigger t) throws ScheduleManagerException {
+ Condition triggerCond = t.getTriggerCondition();
+ Map<String, ConditionChecker> checkers = triggerCond.getCheckers();
+ BasicTimeChecker ck = null;
+ for (ConditionChecker checker : checkers.values()) {
+ if (checker.getType().equals(BasicTimeChecker.type)) {
+ ck = (BasicTimeChecker) checker;
+ break;
+ }
+ }
+ List<TriggerAction> actions = t.getActions();
+ ExecuteFlowAction act = null;
+ for (TriggerAction action : actions) {
+ if (action.getType().equals(ExecuteFlowAction.type)) {
+ act = (ExecuteFlowAction) action;
+ break;
+ }
+ }
+ if (ck != null && act != null) {
+ Schedule s =
+ new Schedule(t.getTriggerId(), act.getProjectId(),
+ act.getProjectName(), act.getFlowName(),
+ t.getStatus().toString(), ck.getFirstCheckTime(),
+ ck.getTimeZone(), ck.getPeriod(), t.getLastModifyTime(),
+ ck.getNextCheckTime(), t.getSubmitTime(), t.getSubmitUser(),
+ act.getExecutionOptions(), act.getSlaOptions());
+ return s;
+ } else {
+ logger.error("Failed to parse schedule from trigger!");
+ throw new ScheduleManagerException(
+ "Failed to parse schedule from trigger!");
+ }
+ }
+
+ @Override
+ public void removeSchedule(Schedule s) throws ScheduleManagerException {
+ try {
+ triggerManager.removeTrigger(s.getScheduleId(), s.getSubmitUser());
+ // triggersLocalCopy.remove(s.getScheduleId());
+ } catch (TriggerManagerException e) {
+ // TODO Auto-generated catch block
+ throw new ScheduleManagerException(e.getMessage());
+ }
+
+ }
+
+ @Override
+ public void updateNextExecTime(Schedule s) throws ScheduleManagerException {
+
+ }
+
+ @Override
+ public synchronized List<Schedule> loadUpdatedSchedules()
+ throws ScheduleManagerException {
+ List<Trigger> triggers;
+ try {
+ triggers =
+ triggerManager.getTriggerUpdates(triggerSource, lastUpdateTime);
+ } catch (TriggerManagerException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ throw new ScheduleManagerException(e);
+ }
+ List<Schedule> schedules = new ArrayList<Schedule>();
+ for (Trigger t : triggers) {
+ lastUpdateTime = Math.max(lastUpdateTime, t.getLastModifyTime());
+ Schedule s = triggerToSchedule(t);
+ schedules.add(s);
+ System.out.println("loaded schedule for " + s.getProjectId()
+ + s.getProjectName());
+ }
+ return schedules;
+ }
}
src/main/java/azkaban/sla/SlaOption.java 309(+162 -147)
diff --git a/src/main/java/azkaban/sla/SlaOption.java b/src/main/java/azkaban/sla/SlaOption.java
index e8f7a04..82b9bdc 100644
--- a/src/main/java/azkaban/sla/SlaOption.java
+++ b/src/main/java/azkaban/sla/SlaOption.java
@@ -12,153 +12,168 @@ import org.joda.time.format.DateTimeFormatter;
import azkaban.executor.ExecutableFlow;
public class SlaOption {
-
- public static final String TYPE_FLOW_FINISH = "FlowFinish";
- public static final String TYPE_FLOW_SUCCEED = "FlowSucceed";
- public static final String TYPE_FLOW_PROGRESS = "FlowProgress";
-
- public static final String TYPE_JOB_FINISH = "JobFinish";
- public static final String TYPE_JOB_SUCCEED = "JobSucceed";
- public static final String TYPE_JOB_PROGRESS = "JobProgress";
-
- public static final String INFO_DURATION = "Duration";
- public static final String INFO_FLOW_NAME = "FlowName";
- public static final String INFO_JOB_NAME = "JobName";
- public static final String INFO_PROGRESS_PERCENT = "ProgressPercent";
- public static final String INFO_EMAIL_LIST = "EmailList";
-
- // always alert
- public static final String ALERT_TYPE = "SlaAlertType";
- public static final String ACTION_CANCEL_FLOW = "SlaCancelFlow";
- public static final String ACTION_ALERT = "SlaAlert";
-
- private String type;
- private Map<String, Object> info;
- private List<String> actions;
-
- private static DateTimeFormatter fmt = DateTimeFormat.forPattern("MM/dd, YYYY HH:mm");
-
- public SlaOption(
- String type,
- List<String> actions,
- Map<String, Object> info
- ) {
- this.type = type;
- this.info = info;
- this.actions = actions;
- }
-
- public String getType() {
- return type;
- }
-
- public void setType(String type) {
- this.type = type;
- }
-
- public Map<String, Object> getInfo() {
- return info;
- }
-
- public void setInfo(Map<String, Object> info) {
- this.info = info;
- }
-
- public List<String> getActions() {
- return actions;
- }
-
- public void setActions(List<String> actions) {
- this.actions = actions;
- }
-
- public Map<String,Object> toObject() {
- HashMap<String, Object> slaObj = new HashMap<String, Object>();
-
- slaObj.put("type", type);
- slaObj.put("info", info);
- slaObj.put("actions", actions);
-
- return slaObj;
- }
-
- @SuppressWarnings("unchecked")
- public static SlaOption fromObject(Object object) {
-
- HashMap<String, Object> slaObj = (HashMap<String,Object>)object;
-
- String type = (String) slaObj.get("type");
- List<String> actions = (List<String>) slaObj.get("actions");
- Map<String, Object> info = (Map<String, Object>) slaObj.get("info");
-
- return new SlaOption(type, actions, info);
- }
-
- public Object toWebObject() {
- HashMap<String, Object> slaObj = new HashMap<String, Object>();
-
-// slaObj.put("type", type);
-// slaObj.put("info", info);
-// slaObj.put("actions", actions);
- if(type.equals(TYPE_FLOW_FINISH) || type.equals(TYPE_FLOW_SUCCEED)) {
- slaObj.put("id", "");
- } else {
- slaObj.put("id", info.get(INFO_JOB_NAME));
- }
- slaObj.put("duration", info.get(INFO_DURATION));
- if(type.equals(TYPE_FLOW_FINISH) || type.equals(TYPE_JOB_FINISH)) {
- slaObj.put("rule", "FINISH");
- } else {
- slaObj.put("rule", "SUCCESS");
- }
- List<String> actionsObj = new ArrayList<String>();
- for(String act : actions) {
- if(act.equals(ACTION_ALERT)) {
- actionsObj.add("EMAIL");
- }
- else {
- actionsObj.add("KILL");
- }
- }
- slaObj.put("actions", actionsObj);
-
- return slaObj;
- }
-
- @Override
- public String toString() {
- return "Sla of " + getType() + getInfo() + getActions();
- }
-
- public static String createSlaMessage(SlaOption slaOption, ExecutableFlow flow) {
- String type = slaOption.getType();
- int execId = flow.getExecutionId();
- if(type.equals(SlaOption.TYPE_FLOW_FINISH)) {
- String flowName = (String) slaOption.getInfo().get(SlaOption.INFO_FLOW_NAME);
- String duration = (String) slaOption.getInfo().get(SlaOption.INFO_DURATION);
- String basicinfo = "SLA Alert: Your flow " + flowName + " failed to FINISH within " + duration + "</br>";
- String expected = "Here is details : </br>" + "Flow " + flowName + " in execution " + execId + " is expected to FINISH within " + duration + " from " + fmt.print(new DateTime(flow.getStartTime())) + "</br>";
- String actual = "Actual flow status is " + flow.getStatus();
- return basicinfo + expected + actual;
- } else if(type.equals(SlaOption.TYPE_FLOW_SUCCEED)) {
- String flowName = (String) slaOption.getInfo().get(SlaOption.INFO_FLOW_NAME);
- String duration = (String) slaOption.getInfo().get(SlaOption.INFO_DURATION);
- String basicinfo = "SLA Alert: Your flow " + flowName + " failed to SUCCEED within " + duration + "</br>";
- String expected = "Here is details : </br>" + "Flow " + flowName + " in execution " + execId + " expected to FINISH within " + duration + " from " + fmt.print(new DateTime(flow.getStartTime())) + "</br>";
- String actual = "Actual flow status is " + flow.getStatus();
- return basicinfo + expected + actual;
- } else if(type.equals(SlaOption.TYPE_JOB_FINISH)) {
- String jobName = (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
- String duration = (String) slaOption.getInfo().get(SlaOption.INFO_DURATION);
- return "SLA Alert: Your job " + jobName + " failed to FINISH within " + duration + " in execution " + execId;
- } else if(type.equals(SlaOption.TYPE_JOB_SUCCEED)) {
- String jobName = (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
- String duration = (String) slaOption.getInfo().get(SlaOption.INFO_DURATION);
- return "SLA Alert: Your job " + jobName + " failed to SUCCEED within " + duration + " in execution " + execId;
- } else {
- return "Unrecognized SLA type " + type;
- }
- }
+ public static final String TYPE_FLOW_FINISH = "FlowFinish";
+ public static final String TYPE_FLOW_SUCCEED = "FlowSucceed";
+ public static final String TYPE_FLOW_PROGRESS = "FlowProgress";
+
+ public static final String TYPE_JOB_FINISH = "JobFinish";
+ public static final String TYPE_JOB_SUCCEED = "JobSucceed";
+ public static final String TYPE_JOB_PROGRESS = "JobProgress";
+
+ public static final String INFO_DURATION = "Duration";
+ public static final String INFO_FLOW_NAME = "FlowName";
+ public static final String INFO_JOB_NAME = "JobName";
+ public static final String INFO_PROGRESS_PERCENT = "ProgressPercent";
+ public static final String INFO_EMAIL_LIST = "EmailList";
+
+ // always alert
+ public static final String ALERT_TYPE = "SlaAlertType";
+ public static final String ACTION_CANCEL_FLOW = "SlaCancelFlow";
+ public static final String ACTION_ALERT = "SlaAlert";
+
+ private String type;
+ private Map<String, Object> info;
+ private List<String> actions;
+
+ private static DateTimeFormatter fmt = DateTimeFormat
+ .forPattern("MM/dd, YYYY HH:mm");
+
+ public SlaOption(String type, List<String> actions, Map<String, Object> info) {
+ this.type = type;
+ this.info = info;
+ this.actions = actions;
+ }
+
+ public String getType() {
+ return type;
+ }
+
+ public void setType(String type) {
+ this.type = type;
+ }
+
+ public Map<String, Object> getInfo() {
+ return info;
+ }
+
+ public void setInfo(Map<String, Object> info) {
+ this.info = info;
+ }
+
+ public List<String> getActions() {
+ return actions;
+ }
+
+ public void setActions(List<String> actions) {
+ this.actions = actions;
+ }
+
+ public Map<String, Object> toObject() {
+ HashMap<String, Object> slaObj = new HashMap<String, Object>();
+
+ slaObj.put("type", type);
+ slaObj.put("info", info);
+ slaObj.put("actions", actions);
+
+ return slaObj;
+ }
+
+ @SuppressWarnings("unchecked")
+ public static SlaOption fromObject(Object object) {
+
+ HashMap<String, Object> slaObj = (HashMap<String, Object>) object;
+
+ String type = (String) slaObj.get("type");
+ List<String> actions = (List<String>) slaObj.get("actions");
+ Map<String, Object> info = (Map<String, Object>) slaObj.get("info");
+
+ return new SlaOption(type, actions, info);
+ }
+
+ public Object toWebObject() {
+ HashMap<String, Object> slaObj = new HashMap<String, Object>();
+
+ // slaObj.put("type", type);
+ // slaObj.put("info", info);
+ // slaObj.put("actions", actions);
+ if (type.equals(TYPE_FLOW_FINISH) || type.equals(TYPE_FLOW_SUCCEED)) {
+ slaObj.put("id", "");
+ } else {
+ slaObj.put("id", info.get(INFO_JOB_NAME));
+ }
+ slaObj.put("duration", info.get(INFO_DURATION));
+ if (type.equals(TYPE_FLOW_FINISH) || type.equals(TYPE_JOB_FINISH)) {
+ slaObj.put("rule", "FINISH");
+ } else {
+ slaObj.put("rule", "SUCCESS");
+ }
+ List<String> actionsObj = new ArrayList<String>();
+ for (String act : actions) {
+ if (act.equals(ACTION_ALERT)) {
+ actionsObj.add("EMAIL");
+ } else {
+ actionsObj.add("KILL");
+ }
+ }
+ slaObj.put("actions", actionsObj);
+
+ return slaObj;
+ }
+
+ @Override
+ public String toString() {
+ return "Sla of " + getType() + getInfo() + getActions();
+ }
+
+ public static String createSlaMessage(SlaOption slaOption, ExecutableFlow flow) {
+ String type = slaOption.getType();
+ int execId = flow.getExecutionId();
+ if (type.equals(SlaOption.TYPE_FLOW_FINISH)) {
+ String flowName =
+ (String) slaOption.getInfo().get(SlaOption.INFO_FLOW_NAME);
+ String duration =
+ (String) slaOption.getInfo().get(SlaOption.INFO_DURATION);
+ String basicinfo =
+ "SLA Alert: Your flow " + flowName + " failed to FINISH within "
+ + duration + "</br>";
+ String expected =
+ "Here is details : </br>" + "Flow " + flowName + " in execution "
+ + execId + " is expected to FINISH within " + duration + " from "
+ + fmt.print(new DateTime(flow.getStartTime())) + "</br>";
+ String actual = "Actual flow status is " + flow.getStatus();
+ return basicinfo + expected + actual;
+ } else if (type.equals(SlaOption.TYPE_FLOW_SUCCEED)) {
+ String flowName =
+ (String) slaOption.getInfo().get(SlaOption.INFO_FLOW_NAME);
+ String duration =
+ (String) slaOption.getInfo().get(SlaOption.INFO_DURATION);
+ String basicinfo =
+ "SLA Alert: Your flow " + flowName + " failed to SUCCEED within "
+ + duration + "</br>";
+ String expected =
+ "Here is details : </br>" + "Flow " + flowName + " in execution "
+ + execId + " expected to FINISH within " + duration + " from "
+ + fmt.print(new DateTime(flow.getStartTime())) + "</br>";
+ String actual = "Actual flow status is " + flow.getStatus();
+ return basicinfo + expected + actual;
+ } else if (type.equals(SlaOption.TYPE_JOB_FINISH)) {
+ String jobName =
+ (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
+ String duration =
+ (String) slaOption.getInfo().get(SlaOption.INFO_DURATION);
+ return "SLA Alert: Your job " + jobName + " failed to FINISH within "
+ + duration + " in execution " + execId;
+ } else if (type.equals(SlaOption.TYPE_JOB_SUCCEED)) {
+ String jobName =
+ (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
+ String duration =
+ (String) slaOption.getInfo().get(SlaOption.INFO_DURATION);
+ return "SLA Alert: Your job " + jobName + " failed to SUCCEED within "
+ + duration + " in execution " + execId;
+ } else {
+ return "Unrecognized SLA type " + type;
+ }
+ }
}
src/main/java/azkaban/trigger/ActionTypeLoader.java 307(+159 -148)
diff --git a/src/main/java/azkaban/trigger/ActionTypeLoader.java b/src/main/java/azkaban/trigger/ActionTypeLoader.java
index 2efdf99..16292da 100644
--- a/src/main/java/azkaban/trigger/ActionTypeLoader.java
+++ b/src/main/java/azkaban/trigger/ActionTypeLoader.java
@@ -26,152 +26,163 @@ import azkaban.utils.Props;
import azkaban.utils.Utils;
public class ActionTypeLoader {
-
- private static Logger logger = Logger.getLogger(ActionTypeLoader.class);
-
- public static final String DEFAULT_TRIGGER_ACTION_PLUGIN_DIR = "plugins/triggeractions";
-
- protected static Map<String, Class<? extends TriggerAction>> actionToClass = new HashMap<String, Class<? extends TriggerAction>>();
-
- public void init(Props props) throws TriggerException {
- // load built-in actions
-
-//
-// loadBuiltinActions();
-//
-// loadPluginActions(props);
-
- }
-
- public synchronized void registerActionType(String type, Class<? extends TriggerAction> actionClass) {
- logger.info("Registering action " + type);
- if(!actionToClass.containsKey(type)) {
- actionToClass.put(type, actionClass);
- }
- }
-
-// private void loadPluginActions(Props props) throws TriggerException {
-// String checkerDir = props.getString("azkaban.trigger.action.plugin.dir", DEFAULT_TRIGGER_ACTION_PLUGIN_DIR);
-// File pluginDir = new File(checkerDir);
-// if(!pluginDir.exists() || !pluginDir.isDirectory() || !pluginDir.canRead()) {
-// logger.info("No trigger action plugins to load.");
-// return;
-// }
-//
-// logger.info("Loading plugin trigger actions from " + pluginDir);
-// ClassLoader parentCl = this.getClass().getClassLoader();
-//
-// Props globalActionConf = null;
-// File confFile = Utils.findFilefromDir(pluginDir, COMMONCONFFILE);
-// try {
-// if(confFile != null) {
-// globalActionConf = new Props(null, confFile);
-// } else {
-// globalActionConf = new Props();
-// }
-// } catch (IOException e) {
-// throw new TriggerException("Failed to get global properties." + e);
-// }
-//
-// for(File dir : pluginDir.listFiles()) {
-// if(dir.isDirectory() && dir.canRead()) {
-// try {
-// loadPluginTypes(globalActionConf, pluginDir, parentCl);
-// } catch (Exception e) {
-// logger.info("Plugin actions failed to load. " + e.getCause());
-// throw new TriggerException("Failed to load all trigger actions!", e);
-// }
-// }
-// }
-// }
-//
-// @SuppressWarnings("unchecked")
-// private void loadPluginTypes(Props globalConf, File dir, ClassLoader parentCl) throws TriggerException {
-// Props actionConf = null;
-// File confFile = Utils.findFilefromDir(dir, ACTIONTYPECONFFILE);
-// if(confFile == null) {
-// logger.info("No action type found in " + dir.getAbsolutePath());
-// return;
-// }
-// try {
-// actionConf = new Props(globalConf, confFile);
-// } catch (IOException e) {
-// throw new TriggerException("Failed to load config for the action type", e);
-// }
-//
-// String actionName = dir.getName();
-// String actionClass = actionConf.getString("action.class");
-//
-// List<URL> resources = new ArrayList<URL>();
-// for(File f : dir.listFiles()) {
-// try {
-// if(f.getName().endsWith(".jar")) {
-// resources.add(f.toURI().toURL());
-// logger.info("adding to classpath " + f.toURI().toURL());
-// }
-// } catch (MalformedURLException e) {
-// // TODO Auto-generated catch block
-// throw new TriggerException(e);
-// }
-// }
-//
-// // each job type can have a different class loader
-// ClassLoader actionCl = new URLClassLoader(resources.toArray(new URL[resources.size()]), parentCl);
-//
-// Class<? extends TriggerAction> clazz = null;
-// try {
-// clazz = (Class<? extends TriggerAction>)actionCl.loadClass(actionClass);
-// actionToClass.put(actionName, clazz);
-// }
-// catch (ClassNotFoundException e) {
-// throw new TriggerException(e);
-// }
-//
-// if(actionConf.getBoolean("need.init")) {
-// try {
-// Utils.invokeStaticMethod(actionCl, actionClass, "init", actionConf);
-// } catch (Exception e) {
-// e.printStackTrace();
-// logger.error("Failed to init the action type " + actionName);
-// throw new TriggerException(e);
-// }
-// }
-//
-// logger.info("Loaded action type " + actionName + " " + actionClass);
-// }
-//
-// private void loadBuiltinActions() {
-// actionToClass.put(ExecuteFlowAction.type, ExecuteFlowAction.class);
-// logger.info("Loaded ExecuteFlowAction type.");
-// }
-
- public static void registerBuiltinActions(Map<String, Class<? extends TriggerAction>> builtinActions) {
- actionToClass.putAll(builtinActions);
- for(String type : builtinActions.keySet()) {
- logger.info("Loaded " + type + " action.");
- }
- }
-
- public TriggerAction createActionFromJson(String type, Object obj) throws Exception {
- TriggerAction action = null;
- Class<? extends TriggerAction> actionClass = actionToClass.get(type);
- if(actionClass == null) {
- throw new Exception("Action Type " + type + " not supported!");
- }
- action = (TriggerAction) Utils.invokeStaticMethod(actionClass.getClassLoader(), actionClass.getName(), "createFromJson", obj);
-
- return action;
- }
-
- public TriggerAction createAction(String type, Object ... args) {
- TriggerAction action = null;
- Class<? extends TriggerAction> actionClass = actionToClass.get(type);
- action = (TriggerAction) Utils.callConstructor(actionClass, args);
-
- return action;
- }
-
- public Set<String> getSupportedActions() {
- return actionToClass.keySet();
- }
+
+ private static Logger logger = Logger.getLogger(ActionTypeLoader.class);
+
+ public static final String DEFAULT_TRIGGER_ACTION_PLUGIN_DIR =
+ "plugins/triggeractions";
+
+ protected static Map<String, Class<? extends TriggerAction>> actionToClass =
+ new HashMap<String, Class<? extends TriggerAction>>();
+
+ public void init(Props props) throws TriggerException {
+ // load built-in actions
+
+ //
+ // loadBuiltinActions();
+ //
+ // loadPluginActions(props);
+
+ }
+
+ public synchronized void registerActionType(String type,
+ Class<? extends TriggerAction> actionClass) {
+ logger.info("Registering action " + type);
+ if (!actionToClass.containsKey(type)) {
+ actionToClass.put(type, actionClass);
+ }
+ }
+
+ // private void loadPluginActions(Props props) throws TriggerException {
+ // String checkerDir = props.getString("azkaban.trigger.action.plugin.dir",
+ // DEFAULT_TRIGGER_ACTION_PLUGIN_DIR);
+ // File pluginDir = new File(checkerDir);
+ // if(!pluginDir.exists() || !pluginDir.isDirectory() || !pluginDir.canRead())
+ // {
+ // logger.info("No trigger action plugins to load.");
+ // return;
+ // }
+ //
+ // logger.info("Loading plugin trigger actions from " + pluginDir);
+ // ClassLoader parentCl = this.getClass().getClassLoader();
+ //
+ // Props globalActionConf = null;
+ // File confFile = Utils.findFilefromDir(pluginDir, COMMONCONFFILE);
+ // try {
+ // if(confFile != null) {
+ // globalActionConf = new Props(null, confFile);
+ // } else {
+ // globalActionConf = new Props();
+ // }
+ // } catch (IOException e) {
+ // throw new TriggerException("Failed to get global properties." + e);
+ // }
+ //
+ // for(File dir : pluginDir.listFiles()) {
+ // if(dir.isDirectory() && dir.canRead()) {
+ // try {
+ // loadPluginTypes(globalActionConf, pluginDir, parentCl);
+ // } catch (Exception e) {
+ // logger.info("Plugin actions failed to load. " + e.getCause());
+ // throw new TriggerException("Failed to load all trigger actions!", e);
+ // }
+ // }
+ // }
+ // }
+ //
+ // @SuppressWarnings("unchecked")
+ // private void loadPluginTypes(Props globalConf, File dir, ClassLoader
+ // parentCl) throws TriggerException {
+ // Props actionConf = null;
+ // File confFile = Utils.findFilefromDir(dir, ACTIONTYPECONFFILE);
+ // if(confFile == null) {
+ // logger.info("No action type found in " + dir.getAbsolutePath());
+ // return;
+ // }
+ // try {
+ // actionConf = new Props(globalConf, confFile);
+ // } catch (IOException e) {
+ // throw new TriggerException("Failed to load config for the action type", e);
+ // }
+ //
+ // String actionName = dir.getName();
+ // String actionClass = actionConf.getString("action.class");
+ //
+ // List<URL> resources = new ArrayList<URL>();
+ // for(File f : dir.listFiles()) {
+ // try {
+ // if(f.getName().endsWith(".jar")) {
+ // resources.add(f.toURI().toURL());
+ // logger.info("adding to classpath " + f.toURI().toURL());
+ // }
+ // } catch (MalformedURLException e) {
+ // // TODO Auto-generated catch block
+ // throw new TriggerException(e);
+ // }
+ // }
+ //
+ // // each job type can have a different class loader
+ // ClassLoader actionCl = new URLClassLoader(resources.toArray(new
+ // URL[resources.size()]), parentCl);
+ //
+ // Class<? extends TriggerAction> clazz = null;
+ // try {
+ // clazz = (Class<? extends TriggerAction>)actionCl.loadClass(actionClass);
+ // actionToClass.put(actionName, clazz);
+ // }
+ // catch (ClassNotFoundException e) {
+ // throw new TriggerException(e);
+ // }
+ //
+ // if(actionConf.getBoolean("need.init")) {
+ // try {
+ // Utils.invokeStaticMethod(actionCl, actionClass, "init", actionConf);
+ // } catch (Exception e) {
+ // e.printStackTrace();
+ // logger.error("Failed to init the action type " + actionName);
+ // throw new TriggerException(e);
+ // }
+ // }
+ //
+ // logger.info("Loaded action type " + actionName + " " + actionClass);
+ // }
+ //
+ // private void loadBuiltinActions() {
+ // actionToClass.put(ExecuteFlowAction.type, ExecuteFlowAction.class);
+ // logger.info("Loaded ExecuteFlowAction type.");
+ // }
+
+ public static void registerBuiltinActions(
+ Map<String, Class<? extends TriggerAction>> builtinActions) {
+ actionToClass.putAll(builtinActions);
+ for (String type : builtinActions.keySet()) {
+ logger.info("Loaded " + type + " action.");
+ }
+ }
+
+ public TriggerAction createActionFromJson(String type, Object obj)
+ throws Exception {
+ TriggerAction action = null;
+ Class<? extends TriggerAction> actionClass = actionToClass.get(type);
+ if (actionClass == null) {
+ throw new Exception("Action Type " + type + " not supported!");
+ }
+ action =
+ (TriggerAction) Utils.invokeStaticMethod(actionClass.getClassLoader(),
+ actionClass.getName(), "createFromJson", obj);
+
+ return action;
+ }
+
+ public TriggerAction createAction(String type, Object... args) {
+ TriggerAction action = null;
+ Class<? extends TriggerAction> actionClass = actionToClass.get(type);
+ action = (TriggerAction) Utils.callConstructor(actionClass, args);
+
+ return action;
+ }
+
+ public Set<String> getSupportedActions() {
+ return actionToClass.keySet();
+ }
}
src/main/java/azkaban/trigger/builtin/BasicTimeChecker.java 342(+170 -172)
diff --git a/src/main/java/azkaban/trigger/builtin/BasicTimeChecker.java b/src/main/java/azkaban/trigger/builtin/BasicTimeChecker.java
index 9f80ce6..e63adcd 100644
--- a/src/main/java/azkaban/trigger/builtin/BasicTimeChecker.java
+++ b/src/main/java/azkaban/trigger/builtin/BasicTimeChecker.java
@@ -28,177 +28,175 @@ import azkaban.utils.Utils;
public class BasicTimeChecker implements ConditionChecker {
- public static final String type = "BasicTimeChecker";
-
- private long firstCheckTime;
- private long nextCheckTime;
- private DateTimeZone timezone;
- private boolean isRecurring = true;
- private boolean skipPastChecks = true;
- private ReadablePeriod period;
-
- private final String id;
-
- public BasicTimeChecker(
- String id,
- long firstCheckTime,
- DateTimeZone timezone,
- boolean isRecurring,
- boolean skipPastChecks,
- ReadablePeriod period) {
- this.id = id;
- this.firstCheckTime = firstCheckTime;
- this.timezone = timezone;
- this.isRecurring = isRecurring;
- this.skipPastChecks = skipPastChecks;
- this.period = period;
- this.nextCheckTime = firstCheckTime;
- this.nextCheckTime = calculateNextCheckTime();
- }
-
- public long getFirstCheckTime() {
- return firstCheckTime;
- }
-
- public DateTimeZone getTimeZone() {
- return timezone;
- }
-
- public boolean isRecurring() {
- return isRecurring;
- }
-
- public boolean isSkipPastChecks() {
- return skipPastChecks;
- }
-
- public ReadablePeriod getPeriod() {
- return period;
- }
-
- public long getNextCheckTime() {
- return nextCheckTime;
- }
-
- public BasicTimeChecker(
- String id,
- long firstCheckTime,
- DateTimeZone timezone,
- long nextCheckTime,
- boolean isRecurring,
- boolean skipPastChecks,
- ReadablePeriod period) {
- this.id = id;
- this.firstCheckTime = firstCheckTime;
- this.timezone = timezone;
- this.nextCheckTime = nextCheckTime;
- this.isRecurring = isRecurring;
- this.skipPastChecks = skipPastChecks;
- this.period = period;
- }
-
- @Override
- public Boolean eval() {
- return nextCheckTime < System.currentTimeMillis();
- }
-
- @Override
- public void reset() {
- this.nextCheckTime = calculateNextCheckTime();
- }
-
- @Override
- public String getId() {
- return id;
- }
-
- @Override
- public String getType() {
- return type;
- }
-
- @SuppressWarnings("unchecked")
- public static BasicTimeChecker createFromJson(Object obj) throws Exception {
- return createFromJson((HashMap<String, Object>)obj);
- }
-
- public static BasicTimeChecker createFromJson(HashMap<String, Object> obj) throws Exception {
- Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
- if(!jsonObj.get("type").equals(type)) {
- throw new Exception("Cannot create checker of " + type + " from " + jsonObj.get("type"));
- }
- Long firstCheckTime = Long.valueOf((String) jsonObj.get("firstCheckTime"));
- String timezoneId = (String) jsonObj.get("timezone");
- long nextCheckTime = Long.valueOf((String) jsonObj.get("nextCheckTime"));
- DateTimeZone timezone = DateTimeZone.forID(timezoneId);
- boolean isRecurring = Boolean.valueOf((String)jsonObj.get("isRecurring"));
- boolean skipPastChecks = Boolean.valueOf((String)jsonObj.get("skipPastChecks"));
- ReadablePeriod period = Utils.parsePeriodString((String)jsonObj.get("period"));
- String id = (String) jsonObj.get("id");
-
- BasicTimeChecker checker = new BasicTimeChecker(id, firstCheckTime, timezone, nextCheckTime, isRecurring, skipPastChecks, period);
- if(skipPastChecks) {
- checker.updateNextCheckTime();
- }
- return checker;
- }
-
- @Override
- public BasicTimeChecker fromJson(Object obj) throws Exception{
- return createFromJson(obj);
- }
-
- private void updateNextCheckTime(){
- nextCheckTime = calculateNextCheckTime();
- }
-
- private long calculateNextCheckTime(){
- DateTime date = new DateTime(nextCheckTime).withZone(timezone);
- int count = 0;
- while(!date.isAfterNow()) {
- if(count > 100000) {
- throw new IllegalStateException("100000 increments of period did not get to present time.");
- }
- if(period == null) {
- break;
- }else {
- date = date.plus(period);
- }
- count += 1;
- if(!skipPastChecks) {
- continue;
- }
- }
- return date.getMillis();
- }
-
- @Override
- public Object getNum() {
- return null;
- }
-
- @Override
- public Object toJson() {
- Map<String, Object> jsonObj = new HashMap<String, Object>();
- jsonObj.put("type", type);
- jsonObj.put("firstCheckTime", String.valueOf(firstCheckTime));
- jsonObj.put("timezone", timezone.getID());
- jsonObj.put("nextCheckTime", String.valueOf(nextCheckTime));
- jsonObj.put("isRecurring", String.valueOf(isRecurring));
- jsonObj.put("skipPastChecks", String.valueOf(skipPastChecks));
- jsonObj.put("period", Utils.createPeriodString(period));
- jsonObj.put("id", id);
-
- return jsonObj;
- }
-
- @Override
- public void stopChecker() {
- return;
- }
-
- @Override
- public void setContext(Map<String, Object> context) {
- }
+ public static final String type = "BasicTimeChecker";
+
+ private long firstCheckTime;
+ private long nextCheckTime;
+ private DateTimeZone timezone;
+ private boolean isRecurring = true;
+ private boolean skipPastChecks = true;
+ private ReadablePeriod period;
+
+ private final String id;
+
+ public BasicTimeChecker(String id, long firstCheckTime,
+ DateTimeZone timezone, boolean isRecurring, boolean skipPastChecks,
+ ReadablePeriod period) {
+ this.id = id;
+ this.firstCheckTime = firstCheckTime;
+ this.timezone = timezone;
+ this.isRecurring = isRecurring;
+ this.skipPastChecks = skipPastChecks;
+ this.period = period;
+ this.nextCheckTime = firstCheckTime;
+ this.nextCheckTime = calculateNextCheckTime();
+ }
+
+ public long getFirstCheckTime() {
+ return firstCheckTime;
+ }
+
+ public DateTimeZone getTimeZone() {
+ return timezone;
+ }
+
+ public boolean isRecurring() {
+ return isRecurring;
+ }
+
+ public boolean isSkipPastChecks() {
+ return skipPastChecks;
+ }
+
+ public ReadablePeriod getPeriod() {
+ return period;
+ }
+
+ public long getNextCheckTime() {
+ return nextCheckTime;
+ }
+
+ public BasicTimeChecker(String id, long firstCheckTime,
+ DateTimeZone timezone, long nextCheckTime, boolean isRecurring,
+ boolean skipPastChecks, ReadablePeriod period) {
+ this.id = id;
+ this.firstCheckTime = firstCheckTime;
+ this.timezone = timezone;
+ this.nextCheckTime = nextCheckTime;
+ this.isRecurring = isRecurring;
+ this.skipPastChecks = skipPastChecks;
+ this.period = period;
+ }
+
+ @Override
+ public Boolean eval() {
+ return nextCheckTime < System.currentTimeMillis();
+ }
+
+ @Override
+ public void reset() {
+ this.nextCheckTime = calculateNextCheckTime();
+ }
+
+ @Override
+ public String getId() {
+ return id;
+ }
+
+ @Override
+ public String getType() {
+ return type;
+ }
+
+ @SuppressWarnings("unchecked")
+ public static BasicTimeChecker createFromJson(Object obj) throws Exception {
+ return createFromJson((HashMap<String, Object>) obj);
+ }
+
+ public static BasicTimeChecker createFromJson(HashMap<String, Object> obj)
+ throws Exception {
+ Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
+ if (!jsonObj.get("type").equals(type)) {
+ throw new Exception("Cannot create checker of " + type + " from "
+ + jsonObj.get("type"));
+ }
+ Long firstCheckTime = Long.valueOf((String) jsonObj.get("firstCheckTime"));
+ String timezoneId = (String) jsonObj.get("timezone");
+ long nextCheckTime = Long.valueOf((String) jsonObj.get("nextCheckTime"));
+ DateTimeZone timezone = DateTimeZone.forID(timezoneId);
+ boolean isRecurring = Boolean.valueOf((String) jsonObj.get("isRecurring"));
+ boolean skipPastChecks =
+ Boolean.valueOf((String) jsonObj.get("skipPastChecks"));
+ ReadablePeriod period =
+ Utils.parsePeriodString((String) jsonObj.get("period"));
+ String id = (String) jsonObj.get("id");
+
+ BasicTimeChecker checker =
+ new BasicTimeChecker(id, firstCheckTime, timezone, nextCheckTime,
+ isRecurring, skipPastChecks, period);
+ if (skipPastChecks) {
+ checker.updateNextCheckTime();
+ }
+ return checker;
+ }
+
+ @Override
+ public BasicTimeChecker fromJson(Object obj) throws Exception {
+ return createFromJson(obj);
+ }
+
+ private void updateNextCheckTime() {
+ nextCheckTime = calculateNextCheckTime();
+ }
+
+ private long calculateNextCheckTime() {
+ DateTime date = new DateTime(nextCheckTime).withZone(timezone);
+ int count = 0;
+ while (!date.isAfterNow()) {
+ if (count > 100000) {
+ throw new IllegalStateException(
+ "100000 increments of period did not get to present time.");
+ }
+ if (period == null) {
+ break;
+ } else {
+ date = date.plus(period);
+ }
+ count += 1;
+ if (!skipPastChecks) {
+ continue;
+ }
+ }
+ return date.getMillis();
+ }
+
+ @Override
+ public Object getNum() {
+ return null;
+ }
+
+ @Override
+ public Object toJson() {
+ Map<String, Object> jsonObj = new HashMap<String, Object>();
+ jsonObj.put("type", type);
+ jsonObj.put("firstCheckTime", String.valueOf(firstCheckTime));
+ jsonObj.put("timezone", timezone.getID());
+ jsonObj.put("nextCheckTime", String.valueOf(nextCheckTime));
+ jsonObj.put("isRecurring", String.valueOf(isRecurring));
+ jsonObj.put("skipPastChecks", String.valueOf(skipPastChecks));
+ jsonObj.put("period", Utils.createPeriodString(period));
+ jsonObj.put("id", id);
+
+ return jsonObj;
+ }
+
+ @Override
+ public void stopChecker() {
+ return;
+ }
+
+ @Override
+ public void setContext(Map<String, Object> context) {
+ }
}
diff --git a/src/main/java/azkaban/trigger/builtin/CreateTriggerAction.java b/src/main/java/azkaban/trigger/builtin/CreateTriggerAction.java
index ad86c3f..7f4a3c1 100644
--- a/src/main/java/azkaban/trigger/builtin/CreateTriggerAction.java
+++ b/src/main/java/azkaban/trigger/builtin/CreateTriggerAction.java
@@ -24,73 +24,74 @@ import azkaban.trigger.TriggerAction;
import azkaban.trigger.TriggerManager;
public class CreateTriggerAction implements TriggerAction {
-
- public static final String type = "CreateTriggerAction";
- private static TriggerManager triggerManager;
- private Trigger trigger;
- @SuppressWarnings("unused")
- private Map<String, Object> context;
- private String actionId;
-
- public CreateTriggerAction(String actionId, Trigger trigger) {
- this.actionId = actionId;
- this.trigger = trigger;
- }
-
- @Override
- public String getType() {
- return type;
- }
-
- public static void setTriggerManager(TriggerManager trm) {
- triggerManager = trm;
- }
-
- @SuppressWarnings("unchecked")
- public static CreateTriggerAction createFromJson(Object obj) throws Exception {
- Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
- if(!jsonObj.get("type").equals(type)) {
- throw new Exception("Cannot create action of " + type + " from " + jsonObj.get("type"));
- }
- String actionId = (String) jsonObj.get("actionId");
- Trigger trigger = Trigger.fromJson(jsonObj.get("trigger"));
- return new CreateTriggerAction(actionId, trigger);
- }
-
- @Override
- public CreateTriggerAction fromJson(Object obj) throws Exception {
- // TODO Auto-generated method stub
- return createFromJson(obj);
- }
-
- @Override
- public Object toJson() {
- Map<String, Object> jsonObj = new HashMap<String, Object>();
- jsonObj.put("actionId", actionId);
- jsonObj.put("type", type);
- jsonObj.put("trigger", trigger.toJson());
-
- return jsonObj;
- }
-
- @Override
- public void doAction() throws Exception {
- triggerManager.insertTrigger(trigger);
- }
-
- @Override
- public String getDescription() {
- return "create another: " + trigger.getDescription();
- }
-
- @Override
- public String getId() {
- return actionId;
- }
-
- @Override
- public void setContext(Map<String, Object> context) {
- this.context = context;
- }
+
+ public static final String type = "CreateTriggerAction";
+ private static TriggerManager triggerManager;
+ private Trigger trigger;
+ @SuppressWarnings("unused")
+ private Map<String, Object> context;
+ private String actionId;
+
+ public CreateTriggerAction(String actionId, Trigger trigger) {
+ this.actionId = actionId;
+ this.trigger = trigger;
+ }
+
+ @Override
+ public String getType() {
+ return type;
+ }
+
+ public static void setTriggerManager(TriggerManager trm) {
+ triggerManager = trm;
+ }
+
+ @SuppressWarnings("unchecked")
+ public static CreateTriggerAction createFromJson(Object obj) throws Exception {
+ Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
+ if (!jsonObj.get("type").equals(type)) {
+ throw new Exception("Cannot create action of " + type + " from "
+ + jsonObj.get("type"));
+ }
+ String actionId = (String) jsonObj.get("actionId");
+ Trigger trigger = Trigger.fromJson(jsonObj.get("trigger"));
+ return new CreateTriggerAction(actionId, trigger);
+ }
+
+ @Override
+ public CreateTriggerAction fromJson(Object obj) throws Exception {
+ // TODO Auto-generated method stub
+ return createFromJson(obj);
+ }
+
+ @Override
+ public Object toJson() {
+ Map<String, Object> jsonObj = new HashMap<String, Object>();
+ jsonObj.put("actionId", actionId);
+ jsonObj.put("type", type);
+ jsonObj.put("trigger", trigger.toJson());
+
+ return jsonObj;
+ }
+
+ @Override
+ public void doAction() throws Exception {
+ triggerManager.insertTrigger(trigger);
+ }
+
+ @Override
+ public String getDescription() {
+ return "create another: " + trigger.getDescription();
+ }
+
+ @Override
+ public String getId() {
+ return actionId;
+ }
+
+ @Override
+ public void setContext(Map<String, Object> context) {
+ this.context = context;
+ }
}
src/main/java/azkaban/trigger/builtin/ExecuteFlowAction.java 528(+275 -253)
diff --git a/src/main/java/azkaban/trigger/builtin/ExecuteFlowAction.java b/src/main/java/azkaban/trigger/builtin/ExecuteFlowAction.java
index caa07bf..f7a1cfa 100644
--- a/src/main/java/azkaban/trigger/builtin/ExecuteFlowAction.java
+++ b/src/main/java/azkaban/trigger/builtin/ExecuteFlowAction.java
@@ -39,258 +39,280 @@ import azkaban.trigger.TriggerManager;
public class ExecuteFlowAction implements TriggerAction {
- public static final String type = "ExecuteFlowAction";
-
- public static final String EXEC_ID = "ExecuteFlowAction.execid";
-
- private static ExecutorManagerAdapter executorManager;
- private static TriggerManager triggerManager;
- private String actionId;
- private int projectId;
- private String projectName;
- private String flowName;
- private String submitUser;
- private static ProjectManager projectManager;
- private ExecutionOptions executionOptions = new ExecutionOptions();
- private List<SlaOption> slaOptions;
-
- private static Logger logger = Logger.getLogger(ExecuteFlowAction.class);
-
- public ExecuteFlowAction(String actionId, int projectId, String projectName, String flowName, String submitUser, ExecutionOptions executionOptions, List<SlaOption> slaOptions) {
- this.actionId = actionId;
- this.projectId = projectId;
- this.projectName = projectName;
- this.flowName = flowName;
- this.submitUser = submitUser;
- this.executionOptions = executionOptions;
- this.slaOptions = slaOptions;
- }
-
- public static void setLogger(Logger logger) {
- ExecuteFlowAction.logger = logger;
- }
-
- public String getProjectName() {
- return projectName;
- }
-
- public int getProjectId() {
- return projectId;
- }
-
- protected void setProjectId(int projectId) {
- this.projectId = projectId;
- }
-
- public String getFlowName() {
- return flowName;
- }
-
- protected void setFlowName(String flowName) {
- this.flowName = flowName;
- }
-
- public String getSubmitUser() {
- return submitUser;
- }
-
- protected void setSubmitUser(String submitUser) {
- this.submitUser = submitUser;
- }
-
- public ExecutionOptions getExecutionOptions() {
- return executionOptions;
- }
-
- protected void setExecutionOptions(ExecutionOptions executionOptions) {
- this.executionOptions = executionOptions;
- }
-
- public List<SlaOption> getSlaOptions() {
- return slaOptions;
- }
-
- protected void setSlaOptions(List<SlaOption> slaOptions) {
- this.slaOptions = slaOptions;
- }
-
- public static ExecutorManagerAdapter getExecutorManager() {
- return executorManager;
- }
-
- public static void setExecutorManager(ExecutorManagerAdapter executorManager) {
- ExecuteFlowAction.executorManager = executorManager;
- }
-
- public static TriggerManager getTriggerManager() {
- return triggerManager;
- }
-
- public static void setTriggerManager(TriggerManager triggerManager) {
- ExecuteFlowAction.triggerManager = triggerManager;
- }
-
- public static ProjectManager getProjectManager() {
- return projectManager;
- }
-
- public static void setProjectManager(ProjectManager projectManager) {
- ExecuteFlowAction.projectManager = projectManager;
- }
-
- @Override
- public String getType() {
- return type;
- }
-
- @SuppressWarnings("unchecked")
- @Override
- public TriggerAction fromJson(Object obj) {
- return createFromJson((HashMap<String, Object>) obj);
- }
-
- @SuppressWarnings("unchecked")
- public static TriggerAction createFromJson(HashMap<String, Object> obj) {
- Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
- String objType = (String) jsonObj.get("type");
- if(! objType.equals(type)) {
- throw new RuntimeException("Cannot create action of " + type + " from " + objType);
- }
- String actionId = (String) jsonObj.get("actionId");
- int projectId = Integer.valueOf((String)jsonObj.get("projectId"));
- String projectName = (String) jsonObj.get("projectName");
- String flowName = (String) jsonObj.get("flowName");
- String submitUser = (String) jsonObj.get("submitUser");
- ExecutionOptions executionOptions = null;
- if(jsonObj.containsKey("executionOptions")) {
- executionOptions = ExecutionOptions.createFromObject(jsonObj.get("executionOptions"));
- }
- List<SlaOption> slaOptions = null;
- if(jsonObj.containsKey("slaOptions")) {
- slaOptions = new ArrayList<SlaOption>();
- List<Object> slaOptionsObj = (List<Object>) jsonObj.get("slaOptions");
- for(Object slaObj : slaOptionsObj) {
- slaOptions.add(SlaOption.fromObject(slaObj));
- }
- }
- return new ExecuteFlowAction(actionId, projectId, projectName, flowName, submitUser, executionOptions, slaOptions);
- }
-
- @Override
- public Object toJson() {
- Map<String, Object> jsonObj = new HashMap<String, Object>();
- jsonObj.put("actionId", actionId);
- jsonObj.put("type", type);
- jsonObj.put("projectId", String.valueOf(projectId));
- jsonObj.put("projectName", projectName);
- jsonObj.put("flowName", flowName);
- jsonObj.put("submitUser", submitUser);
- if(executionOptions != null) {
- jsonObj.put("executionOptions", executionOptions.toObject());
- }
- if(slaOptions != null) {
- List<Object> slaOptionsObj = new ArrayList<Object>();
- for(SlaOption sla : slaOptions) {
- slaOptionsObj.add(sla.toObject());
- }
- jsonObj.put("slaOptions", slaOptionsObj);
- }
- return jsonObj;
- }
-
- @Override
- public void doAction() throws Exception {
- if(projectManager == null || executorManager == null) {
- throw new Exception("ExecuteFlowAction not properly initialized!");
- }
-
- Project project = projectManager.getProject(projectId);
- if(project == null) {
- logger.error("Project to execute " + projectId + " does not exist!");
- throw new RuntimeException("Error finding the project to execute " + projectId);
- }
-
- Flow flow = project.getFlow(flowName);
- if(flow == null) {
- logger.error("Flow " + flowName + " cannot be found in project " + project.getName());
- throw new RuntimeException("Error finding the flow to execute " + flowName);
- }
-
- ExecutableFlow exflow = new ExecutableFlow(project, flow);
- exflow.setSubmitUser(submitUser);
- exflow.addAllProxyUsers(project.getProxyUsers());
-
- if(executionOptions == null) {
- executionOptions = new ExecutionOptions();
- }
- if(!executionOptions.isFailureEmailsOverridden()) {
- executionOptions.setFailureEmails(flow.getFailureEmails());
- }
- if(!executionOptions.isSuccessEmailsOverridden()) {
- executionOptions.setSuccessEmails(flow.getSuccessEmails());
- }
- exflow.setExecutionOptions(executionOptions);
-
- try{
- executorManager.submitExecutableFlow(exflow, submitUser);
-// Map<String, Object> outputProps = new HashMap<String, Object>();
-// outputProps.put(EXEC_ID, exflow.getExecutionId());
-// context.put(actionId, outputProps);
- logger.info("Invoked flow " + project.getName() + "." + flowName);
- } catch (ExecutorManagerException e) {
- throw new RuntimeException(e);
- }
-
- // deal with sla
- if(slaOptions != null && slaOptions.size() > 0) {
- int execId = exflow.getExecutionId();
- for(SlaOption sla : slaOptions) {
- logger.info("Adding sla trigger " + sla.toString() + " to execution " + execId);
- SlaChecker slaFailChecker = new SlaChecker("slaFailChecker", sla, execId);
- Map<String, ConditionChecker> slaCheckers = new HashMap<String, ConditionChecker>();
- slaCheckers.put(slaFailChecker.getId(), slaFailChecker);
- Condition triggerCond = new Condition(slaCheckers, slaFailChecker.getId() + ".isSlaFailed()");
- // if whole flow finish before violate sla, just expire
- SlaChecker slaPassChecker = new SlaChecker("slaPassChecker", sla, execId);
- Map<String, ConditionChecker> expireCheckers = new HashMap<String, ConditionChecker>();
- expireCheckers.put(slaPassChecker.getId(), slaPassChecker);
- Condition expireCond = new Condition(expireCheckers, slaPassChecker.getId() + ".isSlaPassed()");
- List<TriggerAction> actions = new ArrayList<TriggerAction>();
- List<String> slaActions = sla.getActions();
- for(String act : slaActions) {
- if(act.equals(SlaOption.ACTION_ALERT)) {
- SlaAlertAction slaAlert = new SlaAlertAction("slaAlert", sla, execId);
- actions.add(slaAlert);
- } else if(act.equals(SlaOption.ACTION_CANCEL_FLOW)) {
- KillExecutionAction killAct = new KillExecutionAction("killExecution", execId);
- actions.add(killAct);
- }
- }
- Trigger slaTrigger = new Trigger("azkaban_sla", "azkaban", triggerCond, expireCond, actions);
- slaTrigger.getInfo().put("monitored.finished.execution", String.valueOf(execId));
- slaTrigger.setResetOnTrigger(false);
- slaTrigger.setResetOnExpire(false);
- logger.info("Ready to put in the sla trigger");
- triggerManager.insertTrigger(slaTrigger);
- logger.info("Sla inserted.");
- }
- }
-
- }
-
- @Override
- public String getDescription() {
- return "Execute flow " + getFlowName() +
- " from project " + getProjectName();
- }
-
- @Override
- public void setContext(Map<String, Object> context) {
- }
-
- @Override
- public String getId() {
- return actionId;
- }
+ public static final String type = "ExecuteFlowAction";
+
+ public static final String EXEC_ID = "ExecuteFlowAction.execid";
+
+ private static ExecutorManagerAdapter executorManager;
+ private static TriggerManager triggerManager;
+ private String actionId;
+ private int projectId;
+ private String projectName;
+ private String flowName;
+ private String submitUser;
+ private static ProjectManager projectManager;
+ private ExecutionOptions executionOptions = new ExecutionOptions();
+ private List<SlaOption> slaOptions;
+
+ private static Logger logger = Logger.getLogger(ExecuteFlowAction.class);
+
+ public ExecuteFlowAction(String actionId, int projectId, String projectName,
+ String flowName, String submitUser, ExecutionOptions executionOptions,
+ List<SlaOption> slaOptions) {
+ this.actionId = actionId;
+ this.projectId = projectId;
+ this.projectName = projectName;
+ this.flowName = flowName;
+ this.submitUser = submitUser;
+ this.executionOptions = executionOptions;
+ this.slaOptions = slaOptions;
+ }
+
+ public static void setLogger(Logger logger) {
+ ExecuteFlowAction.logger = logger;
+ }
+
+ public String getProjectName() {
+ return projectName;
+ }
+
+ public int getProjectId() {
+ return projectId;
+ }
+
+ protected void setProjectId(int projectId) {
+ this.projectId = projectId;
+ }
+
+ public String getFlowName() {
+ return flowName;
+ }
+
+ protected void setFlowName(String flowName) {
+ this.flowName = flowName;
+ }
+
+ public String getSubmitUser() {
+ return submitUser;
+ }
+
+ protected void setSubmitUser(String submitUser) {
+ this.submitUser = submitUser;
+ }
+
+ public ExecutionOptions getExecutionOptions() {
+ return executionOptions;
+ }
+
+ protected void setExecutionOptions(ExecutionOptions executionOptions) {
+ this.executionOptions = executionOptions;
+ }
+
+ public List<SlaOption> getSlaOptions() {
+ return slaOptions;
+ }
+
+ protected void setSlaOptions(List<SlaOption> slaOptions) {
+ this.slaOptions = slaOptions;
+ }
+
+ public static ExecutorManagerAdapter getExecutorManager() {
+ return executorManager;
+ }
+
+ public static void setExecutorManager(ExecutorManagerAdapter executorManager) {
+ ExecuteFlowAction.executorManager = executorManager;
+ }
+
+ public static TriggerManager getTriggerManager() {
+ return triggerManager;
+ }
+
+ public static void setTriggerManager(TriggerManager triggerManager) {
+ ExecuteFlowAction.triggerManager = triggerManager;
+ }
+
+ public static ProjectManager getProjectManager() {
+ return projectManager;
+ }
+
+ public static void setProjectManager(ProjectManager projectManager) {
+ ExecuteFlowAction.projectManager = projectManager;
+ }
+
+ @Override
+ public String getType() {
+ return type;
+ }
+
+ @SuppressWarnings("unchecked")
+ @Override
+ public TriggerAction fromJson(Object obj) {
+ return createFromJson((HashMap<String, Object>) obj);
+ }
+
+ @SuppressWarnings("unchecked")
+ public static TriggerAction createFromJson(HashMap<String, Object> obj) {
+ Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
+ String objType = (String) jsonObj.get("type");
+ if (!objType.equals(type)) {
+ throw new RuntimeException("Cannot create action of " + type + " from "
+ + objType);
+ }
+ String actionId = (String) jsonObj.get("actionId");
+ int projectId = Integer.valueOf((String) jsonObj.get("projectId"));
+ String projectName = (String) jsonObj.get("projectName");
+ String flowName = (String) jsonObj.get("flowName");
+ String submitUser = (String) jsonObj.get("submitUser");
+ ExecutionOptions executionOptions = null;
+ if (jsonObj.containsKey("executionOptions")) {
+ executionOptions =
+ ExecutionOptions.createFromObject(jsonObj.get("executionOptions"));
+ }
+ List<SlaOption> slaOptions = null;
+ if (jsonObj.containsKey("slaOptions")) {
+ slaOptions = new ArrayList<SlaOption>();
+ List<Object> slaOptionsObj = (List<Object>) jsonObj.get("slaOptions");
+ for (Object slaObj : slaOptionsObj) {
+ slaOptions.add(SlaOption.fromObject(slaObj));
+ }
+ }
+ return new ExecuteFlowAction(actionId, projectId, projectName, flowName,
+ submitUser, executionOptions, slaOptions);
+ }
+
+ @Override
+ public Object toJson() {
+ Map<String, Object> jsonObj = new HashMap<String, Object>();
+ jsonObj.put("actionId", actionId);
+ jsonObj.put("type", type);
+ jsonObj.put("projectId", String.valueOf(projectId));
+ jsonObj.put("projectName", projectName);
+ jsonObj.put("flowName", flowName);
+ jsonObj.put("submitUser", submitUser);
+ if (executionOptions != null) {
+ jsonObj.put("executionOptions", executionOptions.toObject());
+ }
+ if (slaOptions != null) {
+ List<Object> slaOptionsObj = new ArrayList<Object>();
+ for (SlaOption sla : slaOptions) {
+ slaOptionsObj.add(sla.toObject());
+ }
+ jsonObj.put("slaOptions", slaOptionsObj);
+ }
+ return jsonObj;
+ }
+
+ @Override
+ public void doAction() throws Exception {
+ if (projectManager == null || executorManager == null) {
+ throw new Exception("ExecuteFlowAction not properly initialized!");
+ }
+
+ Project project = projectManager.getProject(projectId);
+ if (project == null) {
+ logger.error("Project to execute " + projectId + " does not exist!");
+ throw new RuntimeException("Error finding the project to execute "
+ + projectId);
+ }
+
+ Flow flow = project.getFlow(flowName);
+ if (flow == null) {
+ logger.error("Flow " + flowName + " cannot be found in project "
+ + project.getName());
+ throw new RuntimeException("Error finding the flow to execute "
+ + flowName);
+ }
+
+ ExecutableFlow exflow = new ExecutableFlow(project, flow);
+ exflow.setSubmitUser(submitUser);
+ exflow.addAllProxyUsers(project.getProxyUsers());
+
+ if (executionOptions == null) {
+ executionOptions = new ExecutionOptions();
+ }
+ if (!executionOptions.isFailureEmailsOverridden()) {
+ executionOptions.setFailureEmails(flow.getFailureEmails());
+ }
+ if (!executionOptions.isSuccessEmailsOverridden()) {
+ executionOptions.setSuccessEmails(flow.getSuccessEmails());
+ }
+ exflow.setExecutionOptions(executionOptions);
+
+ try {
+ executorManager.submitExecutableFlow(exflow, submitUser);
+ // Map<String, Object> outputProps = new HashMap<String, Object>();
+ // outputProps.put(EXEC_ID, exflow.getExecutionId());
+ // context.put(actionId, outputProps);
+ logger.info("Invoked flow " + project.getName() + "." + flowName);
+ } catch (ExecutorManagerException e) {
+ throw new RuntimeException(e);
+ }
+
+ // deal with sla
+ if (slaOptions != null && slaOptions.size() > 0) {
+ int execId = exflow.getExecutionId();
+ for (SlaOption sla : slaOptions) {
+ logger.info("Adding sla trigger " + sla.toString() + " to execution "
+ + execId);
+ SlaChecker slaFailChecker =
+ new SlaChecker("slaFailChecker", sla, execId);
+ Map<String, ConditionChecker> slaCheckers =
+ new HashMap<String, ConditionChecker>();
+ slaCheckers.put(slaFailChecker.getId(), slaFailChecker);
+ Condition triggerCond =
+ new Condition(slaCheckers, slaFailChecker.getId()
+ + ".isSlaFailed()");
+ // if whole flow finish before violate sla, just expire
+ SlaChecker slaPassChecker =
+ new SlaChecker("slaPassChecker", sla, execId);
+ Map<String, ConditionChecker> expireCheckers =
+ new HashMap<String, ConditionChecker>();
+ expireCheckers.put(slaPassChecker.getId(), slaPassChecker);
+ Condition expireCond =
+ new Condition(expireCheckers, slaPassChecker.getId()
+ + ".isSlaPassed()");
+ List<TriggerAction> actions = new ArrayList<TriggerAction>();
+ List<String> slaActions = sla.getActions();
+ for (String act : slaActions) {
+ if (act.equals(SlaOption.ACTION_ALERT)) {
+ SlaAlertAction slaAlert =
+ new SlaAlertAction("slaAlert", sla, execId);
+ actions.add(slaAlert);
+ } else if (act.equals(SlaOption.ACTION_CANCEL_FLOW)) {
+ KillExecutionAction killAct =
+ new KillExecutionAction("killExecution", execId);
+ actions.add(killAct);
+ }
+ }
+ Trigger slaTrigger =
+ new Trigger("azkaban_sla", "azkaban", triggerCond, expireCond,
+ actions);
+ slaTrigger.getInfo().put("monitored.finished.execution",
+ String.valueOf(execId));
+ slaTrigger.setResetOnTrigger(false);
+ slaTrigger.setResetOnExpire(false);
+ logger.info("Ready to put in the sla trigger");
+ triggerManager.insertTrigger(slaTrigger);
+ logger.info("Sla inserted.");
+ }
+ }
+
+ }
+
+ @Override
+ public String getDescription() {
+ return "Execute flow " + getFlowName() + " from project "
+ + getProjectName();
+ }
+
+ @Override
+ public void setContext(Map<String, Object> context) {
+ }
+
+ @Override
+ public String getId() {
+ return actionId;
+ }
}
src/main/java/azkaban/trigger/builtin/ExecutionChecker.java 221(+112 -109)
diff --git a/src/main/java/azkaban/trigger/builtin/ExecutionChecker.java b/src/main/java/azkaban/trigger/builtin/ExecutionChecker.java
index 01fa43a..f351103 100644
--- a/src/main/java/azkaban/trigger/builtin/ExecutionChecker.java
+++ b/src/main/java/azkaban/trigger/builtin/ExecutionChecker.java
@@ -26,114 +26,117 @@ import azkaban.executor.ExecutorManagerException;
import azkaban.executor.Status;
import azkaban.trigger.ConditionChecker;
-public class ExecutionChecker implements ConditionChecker{
-
- public static final String type = "ExecutionChecker";
- public static ExecutorManagerAdapter executorManager;
-
- private String checkerId;
- private int execId;
- private String jobName;
- private Status wantedStatus;
-
- public ExecutionChecker(String checkerId, int execId, String jobName, Status wantedStatus) {
- this.checkerId = checkerId;
- this.execId = execId;
- this.jobName = jobName;
- this.wantedStatus = wantedStatus;
- }
-
- public static void setExecutorManager(ExecutorManagerAdapter em) {
- executorManager = em;
- }
-
- @Override
- public Object eval() {
- ExecutableFlow exflow;
- try {
- exflow = executorManager.getExecutableFlow(execId);
- } catch (ExecutorManagerException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- return Boolean.FALSE;
- }
- if(jobName != null) {
- ExecutableNode job = exflow.getExecutableNode(jobName);
- if(job != null) {
- return job.getStatus().equals(wantedStatus);
- } else {
- return Boolean.FALSE;
- }
- } else {
- return exflow.getStatus().equals(wantedStatus);
- }
-
- }
-
- @Override
- public Object getNum() {
- return null;
- }
-
- @Override
- public void reset() {
- }
-
- @Override
- public String getId() {
- return checkerId;
- }
-
- @Override
- public String getType() {
- return type;
- }
-
- public static ExecutionChecker createFromJson(HashMap<String, Object> jsonObj) throws Exception {
- if(!jsonObj.get("type").equals(type)) {
- throw new Exception("Cannot create checker of " + type + " from " + jsonObj.get("type"));
- }
- int execId = Integer.valueOf((String) jsonObj.get("execId"));
- String jobName = null;
- if(jsonObj.containsKey("jobName")) {
- jobName = (String) jsonObj.get("jobName");
- }
- String checkerId = (String) jsonObj.get("checkerId");
- Status wantedStatus = Status.valueOf((String)jsonObj.get("wantedStatus"));
-
- return new ExecutionChecker(checkerId, execId, jobName, wantedStatus);
- }
-
- @SuppressWarnings("unchecked")
- @Override
- public ConditionChecker fromJson(Object obj) throws Exception {
- return createFromJson((HashMap<String, Object>) obj);
- }
-
- @Override
- public Object toJson() {
- Map<String, Object> jsonObj = new HashMap<String, Object>();
- jsonObj.put("type", type);
- jsonObj.put("execId", String.valueOf(execId));
- if(jobName != null) {
- jsonObj.put("jobName", jobName);
- }
- jsonObj.put("wantedStatus", wantedStatus.toString());
- jsonObj.put("checkerId", checkerId);
- return jsonObj;
- }
-
- @Override
- public void stopChecker() {
- }
-
- @Override
- public void setContext(Map<String, Object> context) {
- }
-
- @Override
- public long getNextCheckTime() {
- return -1;
- }
+public class ExecutionChecker implements ConditionChecker {
+
+ public static final String type = "ExecutionChecker";
+ public static ExecutorManagerAdapter executorManager;
+
+ private String checkerId;
+ private int execId;
+ private String jobName;
+ private Status wantedStatus;
+
+ public ExecutionChecker(String checkerId, int execId, String jobName,
+ Status wantedStatus) {
+ this.checkerId = checkerId;
+ this.execId = execId;
+ this.jobName = jobName;
+ this.wantedStatus = wantedStatus;
+ }
+
+ public static void setExecutorManager(ExecutorManagerAdapter em) {
+ executorManager = em;
+ }
+
+ @Override
+ public Object eval() {
+ ExecutableFlow exflow;
+ try {
+ exflow = executorManager.getExecutableFlow(execId);
+ } catch (ExecutorManagerException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ return Boolean.FALSE;
+ }
+ if (jobName != null) {
+ ExecutableNode job = exflow.getExecutableNode(jobName);
+ if (job != null) {
+ return job.getStatus().equals(wantedStatus);
+ } else {
+ return Boolean.FALSE;
+ }
+ } else {
+ return exflow.getStatus().equals(wantedStatus);
+ }
+
+ }
+
+ @Override
+ public Object getNum() {
+ return null;
+ }
+
+ @Override
+ public void reset() {
+ }
+
+ @Override
+ public String getId() {
+ return checkerId;
+ }
+
+ @Override
+ public String getType() {
+ return type;
+ }
+
+ public static ExecutionChecker createFromJson(HashMap<String, Object> jsonObj)
+ throws Exception {
+ if (!jsonObj.get("type").equals(type)) {
+ throw new Exception("Cannot create checker of " + type + " from "
+ + jsonObj.get("type"));
+ }
+ int execId = Integer.valueOf((String) jsonObj.get("execId"));
+ String jobName = null;
+ if (jsonObj.containsKey("jobName")) {
+ jobName = (String) jsonObj.get("jobName");
+ }
+ String checkerId = (String) jsonObj.get("checkerId");
+ Status wantedStatus = Status.valueOf((String) jsonObj.get("wantedStatus"));
+
+ return new ExecutionChecker(checkerId, execId, jobName, wantedStatus);
+ }
+
+ @SuppressWarnings("unchecked")
+ @Override
+ public ConditionChecker fromJson(Object obj) throws Exception {
+ return createFromJson((HashMap<String, Object>) obj);
+ }
+
+ @Override
+ public Object toJson() {
+ Map<String, Object> jsonObj = new HashMap<String, Object>();
+ jsonObj.put("type", type);
+ jsonObj.put("execId", String.valueOf(execId));
+ if (jobName != null) {
+ jsonObj.put("jobName", jobName);
+ }
+ jsonObj.put("wantedStatus", wantedStatus.toString());
+ jsonObj.put("checkerId", checkerId);
+ return jsonObj;
+ }
+
+ @Override
+ public void stopChecker() {
+ }
+
+ @Override
+ public void setContext(Map<String, Object> context) {
+ }
+
+ @Override
+ public long getNextCheckTime() {
+ return -1;
+ }
}
diff --git a/src/main/java/azkaban/trigger/builtin/KillExecutionAction.java b/src/main/java/azkaban/trigger/builtin/KillExecutionAction.java
index 3114fd2..ac32252 100644
--- a/src/main/java/azkaban/trigger/builtin/KillExecutionAction.java
+++ b/src/main/java/azkaban/trigger/builtin/KillExecutionAction.java
@@ -26,85 +26,87 @@ import azkaban.executor.ExecutorManagerAdapter;
import azkaban.executor.Status;
import azkaban.trigger.TriggerAction;
-public class KillExecutionAction implements TriggerAction{
-
- public static final String type = "KillExecutionAction";
-
- private static final Logger logger = Logger.getLogger(KillExecutionAction.class);
-
- private String actionId;
- private int execId;
- private static ExecutorManagerAdapter executorManager;
-
- public KillExecutionAction(String actionId, int execId) {
- this.execId = execId;
- this.actionId = actionId;
- }
-
- public static void setExecutorManager(ExecutorManagerAdapter em) {
- executorManager = em;
- }
-
- @Override
- public String getId() {
- return actionId;
- }
-
- @Override
- public String getType() {
- return type;
- }
-
- @SuppressWarnings("unchecked")
- public static KillExecutionAction createFromJson(Object obj) {
- return createFromJson((HashMap<String, Object>)obj);
- }
-
- public static KillExecutionAction createFromJson(HashMap<String, Object> obj) {
- Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
- String objType = (String) jsonObj.get("type");
- if(! objType.equals(type)) {
- throw new RuntimeException("Cannot create action of " + type + " from " + objType);
- }
- String actionId = (String) jsonObj.get("actionId");
- int execId = Integer.valueOf((String) jsonObj.get("execId"));
- return new KillExecutionAction(actionId, execId);
- }
-
- @SuppressWarnings("unchecked")
- @Override
- public KillExecutionAction fromJson(Object obj) throws Exception {
- return createFromJson((HashMap<String, Object>)obj);
- }
-
- @Override
- public Object toJson() {
- Map<String, Object> jsonObj = new HashMap<String, Object>();
- jsonObj.put("actionId", actionId);
- jsonObj.put("type", type);
- jsonObj.put("execId", String.valueOf(execId));
- return jsonObj;
- }
-
- @Override
- public void doAction() throws Exception {
- ExecutableFlow exFlow = executorManager.getExecutableFlow(execId);
- logger.info("ready to kill execution " + execId);
- if(!Status.isStatusFinished(exFlow.getStatus())) {
- logger.info("Killing execution " + execId);
- executorManager.cancelFlow(exFlow, "azkaban_sla");
- }
- }
-
- @Override
- public void setContext(Map<String, Object> context) {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public String getDescription() {
- return type + " for " + execId;
- }
+public class KillExecutionAction implements TriggerAction {
+
+ public static final String type = "KillExecutionAction";
+
+ private static final Logger logger = Logger
+ .getLogger(KillExecutionAction.class);
+
+ private String actionId;
+ private int execId;
+ private static ExecutorManagerAdapter executorManager;
+
+ public KillExecutionAction(String actionId, int execId) {
+ this.execId = execId;
+ this.actionId = actionId;
+ }
+
+ public static void setExecutorManager(ExecutorManagerAdapter em) {
+ executorManager = em;
+ }
+
+ @Override
+ public String getId() {
+ return actionId;
+ }
+
+ @Override
+ public String getType() {
+ return type;
+ }
+
+ @SuppressWarnings("unchecked")
+ public static KillExecutionAction createFromJson(Object obj) {
+ return createFromJson((HashMap<String, Object>) obj);
+ }
+
+ public static KillExecutionAction createFromJson(HashMap<String, Object> obj) {
+ Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
+ String objType = (String) jsonObj.get("type");
+ if (!objType.equals(type)) {
+ throw new RuntimeException("Cannot create action of " + type + " from "
+ + objType);
+ }
+ String actionId = (String) jsonObj.get("actionId");
+ int execId = Integer.valueOf((String) jsonObj.get("execId"));
+ return new KillExecutionAction(actionId, execId);
+ }
+
+ @SuppressWarnings("unchecked")
+ @Override
+ public KillExecutionAction fromJson(Object obj) throws Exception {
+ return createFromJson((HashMap<String, Object>) obj);
+ }
+
+ @Override
+ public Object toJson() {
+ Map<String, Object> jsonObj = new HashMap<String, Object>();
+ jsonObj.put("actionId", actionId);
+ jsonObj.put("type", type);
+ jsonObj.put("execId", String.valueOf(execId));
+ return jsonObj;
+ }
+
+ @Override
+ public void doAction() throws Exception {
+ ExecutableFlow exFlow = executorManager.getExecutableFlow(execId);
+ logger.info("ready to kill execution " + execId);
+ if (!Status.isStatusFinished(exFlow.getStatus())) {
+ logger.info("Killing execution " + execId);
+ executorManager.cancelFlow(exFlow, "azkaban_sla");
+ }
+ }
+
+ @Override
+ public void setContext(Map<String, Object> context) {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public String getDescription() {
+ return type + " for " + execId;
+ }
}
diff --git a/src/main/java/azkaban/trigger/builtin/SendEmailAction.java b/src/main/java/azkaban/trigger/builtin/SendEmailAction.java
index dd5997b..fb7739b 100644
--- a/src/main/java/azkaban/trigger/builtin/SendEmailAction.java
+++ b/src/main/java/azkaban/trigger/builtin/SendEmailAction.java
@@ -26,82 +26,84 @@ import azkaban.utils.EmailMessage;
import azkaban.utils.Props;
public class SendEmailAction implements TriggerAction {
-
- private String actionId;
- private static AbstractMailer mailer;
- private String message;
- public static final String type = "SendEmailAction";
- private String mimetype = "text/html";
- private List<String> emailList;
- private String subject;
-
- public static void init(Props props) {
- mailer = new AbstractMailer(props);
- }
-
- public SendEmailAction(String actionId, String subject, String message, List<String> emailList) {
- this.actionId = actionId;
- this.message = message;
- this.subject = subject;
- this.emailList = emailList;
- }
-
- @Override
- public String getId() {
- return actionId;
- }
-
- @Override
- public String getType() {
- return type;
- }
-
- @SuppressWarnings("unchecked")
- public static SendEmailAction createFromJson(Object obj) throws Exception {
- Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
- if(!jsonObj.get("type").equals(type)) {
- throw new Exception("Cannot create action of " + type + " from " + jsonObj.get("type"));
- }
- String actionId = (String) jsonObj.get("actionId");
- String subject = (String) jsonObj.get("subject");
- String message = (String) jsonObj.get("message");
- List<String> emailList = (List<String>) jsonObj.get("emailList");
- return new SendEmailAction(actionId, subject, message, emailList);
- }
-
- @Override
- public TriggerAction fromJson(Object obj) throws Exception {
- return createFromJson(obj);
- }
-
- @Override
- public Object toJson() {
- Map<String, Object> jsonObj = new HashMap<String, Object>();
- jsonObj.put("actionId", actionId);
- jsonObj.put("type", type);
- jsonObj.put("subject", subject);
- jsonObj.put("message", message);
- jsonObj.put("emailList", emailList);
-
- return jsonObj;
- }
-
- @Override
- public void doAction() throws Exception {
- EmailMessage email = mailer.prepareEmailMessage(subject, mimetype, emailList);
- email.setBody(message);
- email.sendEmail();
- }
-
- @Override
- public void setContext(Map<String, Object> context) {
-
- }
-
- @Override
- public String getDescription() {
- return type;
- }
-
-
+
+ private String actionId;
+ private static AbstractMailer mailer;
+ private String message;
+ public static final String type = "SendEmailAction";
+ private String mimetype = "text/html";
+ private List<String> emailList;
+ private String subject;
+
+ public static void init(Props props) {
+ mailer = new AbstractMailer(props);
+ }
+
+ public SendEmailAction(String actionId, String subject, String message,
+ List<String> emailList) {
+ this.actionId = actionId;
+ this.message = message;
+ this.subject = subject;
+ this.emailList = emailList;
+ }
+
+ @Override
+ public String getId() {
+ return actionId;
+ }
+
+ @Override
+ public String getType() {
+ return type;
+ }
+
+ @SuppressWarnings("unchecked")
+ public static SendEmailAction createFromJson(Object obj) throws Exception {
+ Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
+ if (!jsonObj.get("type").equals(type)) {
+ throw new Exception("Cannot create action of " + type + " from "
+ + jsonObj.get("type"));
+ }
+ String actionId = (String) jsonObj.get("actionId");
+ String subject = (String) jsonObj.get("subject");
+ String message = (String) jsonObj.get("message");
+ List<String> emailList = (List<String>) jsonObj.get("emailList");
+ return new SendEmailAction(actionId, subject, message, emailList);
+ }
+
+ @Override
+ public TriggerAction fromJson(Object obj) throws Exception {
+ return createFromJson(obj);
+ }
+
+ @Override
+ public Object toJson() {
+ Map<String, Object> jsonObj = new HashMap<String, Object>();
+ jsonObj.put("actionId", actionId);
+ jsonObj.put("type", type);
+ jsonObj.put("subject", subject);
+ jsonObj.put("message", message);
+ jsonObj.put("emailList", emailList);
+
+ return jsonObj;
+ }
+
+ @Override
+ public void doAction() throws Exception {
+ EmailMessage email =
+ mailer.prepareEmailMessage(subject, mimetype, emailList);
+ email.setBody(message);
+ email.sendEmail();
+ }
+
+ @Override
+ public void setContext(Map<String, Object> context) {
+
+ }
+
+ @Override
+ public String getDescription() {
+ return type;
+ }
+
}
src/main/java/azkaban/trigger/builtin/SlaAlertAction.java 300(+159 -141)
diff --git a/src/main/java/azkaban/trigger/builtin/SlaAlertAction.java b/src/main/java/azkaban/trigger/builtin/SlaAlertAction.java
index f340497..e7b34de 100644
--- a/src/main/java/azkaban/trigger/builtin/SlaAlertAction.java
+++ b/src/main/java/azkaban/trigger/builtin/SlaAlertAction.java
@@ -27,146 +27,164 @@ import azkaban.executor.ExecutorManagerAdapter;
import azkaban.sla.SlaOption;
import azkaban.trigger.TriggerAction;
-public class SlaAlertAction implements TriggerAction{
-
- public static final String type = "AlertAction";
-
- private static final Logger logger = Logger.getLogger(SlaAlertAction.class);
-
- private String actionId;
- private SlaOption slaOption;
- private int execId;
-// private List<Map<String, Object>> alerts;
- private static Map<String, azkaban.alert.Alerter> alerters;
- private static ExecutorManagerAdapter executorManager;
-
- public SlaAlertAction(String id, SlaOption slaOption, int execId) {
- this.actionId = id;
- this.slaOption = slaOption;
- this.execId = execId;
-// this.alerts = alerts;
- }
-
- public static void setAlerters(Map<String, Alerter> alts) {
- alerters = alts;
- }
-
- public static void setExecutorManager(ExecutorManagerAdapter em) {
- executorManager = em;
- }
-
- @Override
- public String getId() {
- return actionId;
- }
-
- @Override
- public String getType() {
- return type;
- }
-
- @SuppressWarnings("unchecked")
- public static SlaAlertAction createFromJson(Object obj) throws Exception {
- return createFromJson((HashMap<String, Object>) obj);
- }
-
- public static SlaAlertAction createFromJson(HashMap<String, Object> obj) throws Exception {
- Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
- if(!jsonObj.get("type").equals(type)) {
- throw new Exception("Cannot create action of " + type + " from " + jsonObj.get("type"));
- }
- String actionId = (String) jsonObj.get("actionId");
- SlaOption slaOption = SlaOption.fromObject(jsonObj.get("slaOption"));
- int execId = Integer.valueOf((String) jsonObj.get("execId"));
-// List<Map<String, Object>> alerts = (List<Map<String, Object>>) jsonObj.get("alerts");
- return new SlaAlertAction(actionId, slaOption, execId);
- }
-
- @Override
- public TriggerAction fromJson(Object obj) throws Exception {
- return createFromJson(obj);
- }
-
- @Override
- public Object toJson() {
- Map<String, Object> jsonObj = new HashMap<String, Object>();
- jsonObj.put("actionId", actionId);
- jsonObj.put("type", type);
- jsonObj.put("slaOption", slaOption.toObject());
- jsonObj.put("execId", String.valueOf(execId));
-// jsonObj.put("alerts", alerts);
-
- return jsonObj;
- }
-
- @Override
- public void doAction() throws Exception {
-// for(Map<String, Object> alert : alerts) {
- logger.info("Alerting on sla failure.");
- Map<String, Object> alert = slaOption.getInfo();
- if(alert.containsKey(SlaOption.ALERT_TYPE)) {
- String alertType = (String) alert.get(SlaOption.ALERT_TYPE);
- Alerter alerter = alerters.get(alertType);
- if(alerter != null) {
- try {
- ExecutableFlow flow = executorManager.getExecutableFlow(execId);
- alerter.alertOnSla(slaOption, SlaOption.createSlaMessage(slaOption, flow));
- } catch (Exception e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- logger.error("Failed to alert by " + alertType);
- }
- }
- else {
- logger.error("Alerter type " + alertType + " doesn't exist. Failed to alert.");
- }
- }
-// }
- }
-
-// private String createSlaMessage() {
-// ExecutableFlow flow = null;
-// try {
-// flow = executorManager.getExecutableFlow(execId);
-// } catch (ExecutorManagerException e) {
-// e.printStackTrace();
-// logger.error("Failed to get executable flow.");
-// }
-// String type = slaOption.getType();
-// if(type.equals(SlaOption.TYPE_FLOW_FINISH)) {
-// String flowName = (String) slaOption.getInfo().get(SlaOption.INFO_FLOW_NAME);
-// String duration = (String) slaOption.getInfo().get(SlaOption.INFO_DURATION);
-// String basicinfo = "SLA Alert: Your flow " + flowName + " failed to FINISH within " + duration + "</br>";
-// String expected = "Here is details : </br>" + "Flow " + flowName + " in execution " + execId + " is expected to FINISH within " + duration + " from " + flow.getStartTime() + "</br>";
-// String actual = "Actual flow status is " + flow.getStatus();
-// return basicinfo + expected + actual;
-// } else if(type.equals(SlaOption.TYPE_FLOW_SUCCEED)) {
-// String flowName = (String) slaOption.getInfo().get(SlaOption.INFO_FLOW_NAME);
-// String duration = (String) slaOption.getInfo().get(SlaOption.INFO_DURATION);
-// String basicinfo = "SLA Alert: Your flow " + flowName + " failed to SUCCEED within " + duration + "</br>";
-// String expected = "Here is details : </br>" + "Flow " + flowName + " in execution " + execId + " expected to FINISH within " + duration + " from " + flow.getStartTime() + "</br>";
-// String actual = "Actual flow status is " + flow.getStatus();
-// return basicinfo + expected + actual;
-// } else if(type.equals(SlaOption.TYPE_JOB_FINISH)) {
-// String jobName = (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
-// String duration = (String) slaOption.getInfo().get(SlaOption.INFO_DURATION);
-// return "SLA Alert: Your job " + jobName + " failed to FINISH within " + duration + " in execution " + execId;
-// } else if(type.equals(SlaOption.TYPE_JOB_SUCCEED)) {
-// String jobName = (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
-// String duration = (String) slaOption.getInfo().get(SlaOption.INFO_DURATION);
-// return "SLA Alert: Your job " + jobName + " failed to SUCCEED within " + duration + " in execution " + execId;
-// } else {
-// return "Unrecognized SLA type " + type;
-// }
-// }
-
- @Override
- public void setContext(Map<String, Object> context) {
- }
-
- @Override
- public String getDescription() {
- return type + " for " + execId + " with " + slaOption.toString();
- }
+public class SlaAlertAction implements TriggerAction {
+
+ public static final String type = "AlertAction";
+
+ private static final Logger logger = Logger.getLogger(SlaAlertAction.class);
+
+ private String actionId;
+ private SlaOption slaOption;
+ private int execId;
+ // private List<Map<String, Object>> alerts;
+ private static Map<String, azkaban.alert.Alerter> alerters;
+ private static ExecutorManagerAdapter executorManager;
+
+ public SlaAlertAction(String id, SlaOption slaOption, int execId) {
+ this.actionId = id;
+ this.slaOption = slaOption;
+ this.execId = execId;
+ // this.alerts = alerts;
+ }
+
+ public static void setAlerters(Map<String, Alerter> alts) {
+ alerters = alts;
+ }
+
+ public static void setExecutorManager(ExecutorManagerAdapter em) {
+ executorManager = em;
+ }
+
+ @Override
+ public String getId() {
+ return actionId;
+ }
+
+ @Override
+ public String getType() {
+ return type;
+ }
+
+ @SuppressWarnings("unchecked")
+ public static SlaAlertAction createFromJson(Object obj) throws Exception {
+ return createFromJson((HashMap<String, Object>) obj);
+ }
+
+ public static SlaAlertAction createFromJson(HashMap<String, Object> obj)
+ throws Exception {
+ Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
+ if (!jsonObj.get("type").equals(type)) {
+ throw new Exception("Cannot create action of " + type + " from "
+ + jsonObj.get("type"));
+ }
+ String actionId = (String) jsonObj.get("actionId");
+ SlaOption slaOption = SlaOption.fromObject(jsonObj.get("slaOption"));
+ int execId = Integer.valueOf((String) jsonObj.get("execId"));
+ // List<Map<String, Object>> alerts = (List<Map<String, Object>>)
+ // jsonObj.get("alerts");
+ return new SlaAlertAction(actionId, slaOption, execId);
+ }
+
+ @Override
+ public TriggerAction fromJson(Object obj) throws Exception {
+ return createFromJson(obj);
+ }
+
+ @Override
+ public Object toJson() {
+ Map<String, Object> jsonObj = new HashMap<String, Object>();
+ jsonObj.put("actionId", actionId);
+ jsonObj.put("type", type);
+ jsonObj.put("slaOption", slaOption.toObject());
+ jsonObj.put("execId", String.valueOf(execId));
+ // jsonObj.put("alerts", alerts);
+
+ return jsonObj;
+ }
+
+ @Override
+ public void doAction() throws Exception {
+ // for(Map<String, Object> alert : alerts) {
+ logger.info("Alerting on sla failure.");
+ Map<String, Object> alert = slaOption.getInfo();
+ if (alert.containsKey(SlaOption.ALERT_TYPE)) {
+ String alertType = (String) alert.get(SlaOption.ALERT_TYPE);
+ Alerter alerter = alerters.get(alertType);
+ if (alerter != null) {
+ try {
+ ExecutableFlow flow = executorManager.getExecutableFlow(execId);
+ alerter.alertOnSla(slaOption,
+ SlaOption.createSlaMessage(slaOption, flow));
+ } catch (Exception e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ logger.error("Failed to alert by " + alertType);
+ }
+ } else {
+ logger.error("Alerter type " + alertType
+ + " doesn't exist. Failed to alert.");
+ }
+ }
+ // }
+ }
+
+ // private String createSlaMessage() {
+ // ExecutableFlow flow = null;
+ // try {
+ // flow = executorManager.getExecutableFlow(execId);
+ // } catch (ExecutorManagerException e) {
+ // e.printStackTrace();
+ // logger.error("Failed to get executable flow.");
+ // }
+ // String type = slaOption.getType();
+ // if(type.equals(SlaOption.TYPE_FLOW_FINISH)) {
+ // String flowName = (String)
+ // slaOption.getInfo().get(SlaOption.INFO_FLOW_NAME);
+ // String duration = (String)
+ // slaOption.getInfo().get(SlaOption.INFO_DURATION);
+ // String basicinfo = "SLA Alert: Your flow " + flowName +
+ // " failed to FINISH within " + duration + "</br>";
+ // String expected = "Here is details : </br>" + "Flow " + flowName +
+ // " in execution " + execId + " is expected to FINISH within " + duration +
+ // " from " + flow.getStartTime() + "</br>";
+ // String actual = "Actual flow status is " + flow.getStatus();
+ // return basicinfo + expected + actual;
+ // } else if(type.equals(SlaOption.TYPE_FLOW_SUCCEED)) {
+ // String flowName = (String)
+ // slaOption.getInfo().get(SlaOption.INFO_FLOW_NAME);
+ // String duration = (String)
+ // slaOption.getInfo().get(SlaOption.INFO_DURATION);
+ // String basicinfo = "SLA Alert: Your flow " + flowName +
+ // " failed to SUCCEED within " + duration + "</br>";
+ // String expected = "Here is details : </br>" + "Flow " + flowName +
+ // " in execution " + execId + " expected to FINISH within " + duration +
+ // " from " + flow.getStartTime() + "</br>";
+ // String actual = "Actual flow status is " + flow.getStatus();
+ // return basicinfo + expected + actual;
+ // } else if(type.equals(SlaOption.TYPE_JOB_FINISH)) {
+ // String jobName = (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
+ // String duration = (String)
+ // slaOption.getInfo().get(SlaOption.INFO_DURATION);
+ // return "SLA Alert: Your job " + jobName + " failed to FINISH within " +
+ // duration + " in execution " + execId;
+ // } else if(type.equals(SlaOption.TYPE_JOB_SUCCEED)) {
+ // String jobName = (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
+ // String duration = (String)
+ // slaOption.getInfo().get(SlaOption.INFO_DURATION);
+ // return "SLA Alert: Your job " + jobName + " failed to SUCCEED within " +
+ // duration + " in execution " + execId;
+ // } else {
+ // return "Unrecognized SLA type " + type;
+ // }
+ // }
+
+ @Override
+ public void setContext(Map<String, Object> context) {
+ }
+
+ @Override
+ public String getDescription() {
+ return type + " for " + execId + " with " + slaOption.toString();
+ }
}
src/main/java/azkaban/trigger/builtin/SlaChecker.java 584(+304 -280)
diff --git a/src/main/java/azkaban/trigger/builtin/SlaChecker.java b/src/main/java/azkaban/trigger/builtin/SlaChecker.java
index f8897a5..0db4250 100644
--- a/src/main/java/azkaban/trigger/builtin/SlaChecker.java
+++ b/src/main/java/azkaban/trigger/builtin/SlaChecker.java
@@ -32,284 +32,308 @@ import azkaban.sla.SlaOption;
import azkaban.trigger.ConditionChecker;
import azkaban.utils.Utils;
-public class SlaChecker implements ConditionChecker{
-
- private static final Logger logger = Logger.getLogger(SlaChecker.class);
- public static final String type = "SlaChecker";
-
- private String id;
- private SlaOption slaOption;
- private int execId;
- private long checkTime = -1;
-
- private static ExecutorManagerAdapter executorManager;
-
- public SlaChecker(String id, SlaOption slaOption, int execId) {
- this.id = id;
- this.slaOption = slaOption;
- this.execId = execId;
- }
-
- public static void setExecutorManager(ExecutorManagerAdapter em) {
- executorManager = em;
- }
-
- private Boolean isSlaMissed(ExecutableFlow flow) {
- String type = slaOption.getType();
- logger.info("flow is " + flow.getStatus());
- if(flow.getStartTime() < 0) {
- return Boolean.FALSE;
- }
- Status status;
- if(type.equals(SlaOption.TYPE_FLOW_FINISH)) {
- if(checkTime < flow.getStartTime()) {
- ReadablePeriod duration = Utils.parsePeriodString((String) slaOption.getInfo().get(SlaOption.INFO_DURATION));
- DateTime startTime = new DateTime(flow.getStartTime());
- DateTime nextCheckTime = startTime.plus(duration);
- this.checkTime = nextCheckTime.getMillis();
- }
- status = flow.getStatus();
- if(checkTime < DateTime.now().getMillis()) {
- return !isFlowFinished(status);
- }
- } else if(type.equals(SlaOption.TYPE_FLOW_SUCCEED)) {
- if(checkTime < flow.getStartTime()) {
- ReadablePeriod duration = Utils.parsePeriodString((String) slaOption.getInfo().get(SlaOption.INFO_DURATION));
- DateTime startTime = new DateTime(flow.getStartTime());
- DateTime nextCheckTime = startTime.plus(duration);
- this.checkTime = nextCheckTime.getMillis();
- }
- status = flow.getStatus();
- if(checkTime < DateTime.now().getMillis()) {
- return !isFlowSucceeded(status);
- } else {
- return status.equals(Status.FAILED) || status.equals(Status.KILLED);
- }
- } else if(type.equals(SlaOption.TYPE_JOB_FINISH)) {
- String jobName = (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
- ExecutableNode node = flow.getExecutableNode(jobName);
- if(node.getStartTime() < 0) {
- return Boolean.FALSE;
- }
- if(checkTime < node.getStartTime()) {
- ReadablePeriod duration = Utils.parsePeriodString((String) slaOption.getInfo().get(SlaOption.INFO_DURATION));
- DateTime startTime = new DateTime(node.getStartTime());
- DateTime nextCheckTime = startTime.plus(duration);
- this.checkTime = nextCheckTime.getMillis();
- }
- status = node.getStatus();
- if(checkTime < DateTime.now().getMillis()) {
- return !isJobFinished(status);
- }
- } else if(type.equals(SlaOption.TYPE_JOB_SUCCEED)) {
- String jobName = (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
- ExecutableNode node = flow.getExecutableNode(jobName);
- if(node.getStartTime() < 0) {
- return Boolean.FALSE;
- }
- if(checkTime < node.getStartTime()) {
- ReadablePeriod duration = Utils.parsePeriodString((String) slaOption.getInfo().get(SlaOption.INFO_DURATION));
- DateTime startTime = new DateTime(node.getStartTime());
- DateTime nextCheckTime = startTime.plus(duration);
- this.checkTime = nextCheckTime.getMillis();
- }
- status = node.getStatus();
- if(checkTime < DateTime.now().getMillis()) {
- return !isJobFinished(status);
- } else {
- return status.equals(Status.FAILED) || status.equals(Status.KILLED);
- }
- }
- return Boolean.FALSE;
- }
-
- private Boolean isSlaGood(ExecutableFlow flow) {
- String type = slaOption.getType();
- logger.info("flow is " + flow.getStatus());
- if(flow.getStartTime() < 0) {
- return Boolean.FALSE;
- }
- Status status;
- if(type.equals(SlaOption.TYPE_FLOW_FINISH)) {
- if(checkTime < flow.getStartTime()) {
- ReadablePeriod duration = Utils.parsePeriodString((String) slaOption.getInfo().get(SlaOption.INFO_DURATION));
- DateTime startTime = new DateTime(flow.getStartTime());
- DateTime nextCheckTime = startTime.plus(duration);
- this.checkTime = nextCheckTime.getMillis();
- }
- status = flow.getStatus();
- return isFlowFinished(status);
- } else if(type.equals(SlaOption.TYPE_FLOW_SUCCEED)) {
- if(checkTime < flow.getStartTime()) {
- ReadablePeriod duration = Utils.parsePeriodString((String) slaOption.getInfo().get(SlaOption.INFO_DURATION));
- DateTime startTime = new DateTime(flow.getStartTime());
- DateTime nextCheckTime = startTime.plus(duration);
- this.checkTime = nextCheckTime.getMillis();
- }
- status = flow.getStatus();
- return isFlowSucceeded(status);
- } else if(type.equals(SlaOption.TYPE_JOB_FINISH)) {
- String jobName = (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
- ExecutableNode node = flow.getExecutableNode(jobName);
- if(node.getStartTime() < 0) {
- return Boolean.FALSE;
- }
- if(checkTime < node.getStartTime()) {
- ReadablePeriod duration = Utils.parsePeriodString((String) slaOption.getInfo().get(SlaOption.INFO_DURATION));
- DateTime startTime = new DateTime(node.getStartTime());
- DateTime nextCheckTime = startTime.plus(duration);
- this.checkTime = nextCheckTime.getMillis();
- }
- status = node.getStatus();
- return isJobFinished(status);
- } else if(type.equals(SlaOption.TYPE_JOB_SUCCEED)) {
- String jobName = (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
- ExecutableNode node = flow.getExecutableNode(jobName);
- if(node.getStartTime() < 0) {
- return Boolean.FALSE;
- }
- if(checkTime < node.getStartTime()) {
- ReadablePeriod duration = Utils.parsePeriodString((String) slaOption.getInfo().get(SlaOption.INFO_DURATION));
- DateTime startTime = new DateTime(node.getStartTime());
- DateTime nextCheckTime = startTime.plus(duration);
- this.checkTime = nextCheckTime.getMillis();
- }
- status = node.getStatus();
- return isJobSucceeded(status);
- }
- return Boolean.FALSE;
- }
-
- // return true to trigger sla action
- @Override
- public Object eval() {
- logger.info("Checking sla for execution " + execId);
- ExecutableFlow flow;
- try {
- flow = executorManager.getExecutableFlow(execId);
- } catch (ExecutorManagerException e) {
- logger.error("Can't get executable flow.", e);
- e.printStackTrace();
- // something wrong, send out alerts
- return Boolean.TRUE;
- }
- return isSlaMissed(flow);
- }
-
- public Object isSlaFailed() {
- logger.info("Testing if sla failed for execution " + execId);
- ExecutableFlow flow;
- try {
- flow = executorManager.getExecutableFlow(execId);
- } catch (ExecutorManagerException e) {
- logger.error("Can't get executable flow.", e);
- e.printStackTrace();
- // something wrong, send out alerts
- return Boolean.TRUE;
- }
- return isSlaMissed(flow);
- }
-
- public Object isSlaPassed() {
- logger.info("Testing if sla is good for execution " + execId);
- ExecutableFlow flow;
- try {
- flow = executorManager.getExecutableFlow(execId);
- } catch (ExecutorManagerException e) {
- logger.error("Can't get executable flow.", e);
- e.printStackTrace();
- // something wrong, send out alerts
- return Boolean.TRUE;
- }
- return isSlaGood(flow);
- }
-
- @Override
- public Object getNum() {
- return null;
- }
-
- @Override
- public void reset() {
- }
-
- @Override
- public String getId() {
- return id;
- }
-
- @Override
- public String getType() {
- return type;
- }
-
- @Override
- public ConditionChecker fromJson(Object obj) throws Exception {
- return createFromJson(obj);
- }
-
- @SuppressWarnings("unchecked")
- public static SlaChecker createFromJson(Object obj) throws Exception {
- return createFromJson((HashMap<String, Object>)obj);
- }
-
- public static SlaChecker createFromJson(HashMap<String, Object> obj) throws Exception {
- Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
- if(!jsonObj.get("type").equals(type)) {
- throw new Exception("Cannot create checker of " + type + " from " + jsonObj.get("type"));
- }
- String id = (String) jsonObj.get("id");
- SlaOption slaOption = SlaOption.fromObject(jsonObj.get("slaOption"));
- int execId = Integer.valueOf((String) jsonObj.get("execId"));
- return new SlaChecker(id, slaOption, execId);
- }
-
- @Override
- public Object toJson() {
- Map<String, Object> jsonObj = new HashMap<String, Object>();
- jsonObj.put("type", type);
- jsonObj.put("id", id);
- jsonObj.put("slaOption", slaOption.toObject());
- jsonObj.put("execId", String.valueOf(execId));
-
- return jsonObj;
- }
-
- @Override
- public void stopChecker() {
-
- }
-
- @Override
- public void setContext(Map<String, Object> context) {
- }
-
- @Override
- public long getNextCheckTime() {
- return checkTime;
- }
-
- private boolean isFlowFinished(Status status) {
- if(status.equals(Status.FAILED) || status.equals(Status.KILLED) || status.equals(Status.SUCCEEDED)) {
- return Boolean.TRUE;
- } else {
- return Boolean.FALSE;
- }
- }
-
- private boolean isFlowSucceeded(Status status) {
- return status.equals(Status.SUCCEEDED);
- }
-
- private boolean isJobFinished(Status status) {
- if(status.equals(Status.FAILED) || status.equals(Status.KILLED) || status.equals(Status.SUCCEEDED)) {
- return Boolean.TRUE;
- } else {
- return Boolean.FALSE;
- }
- }
-
- private boolean isJobSucceeded(Status status) {
- return status.equals(Status.SUCCEEDED);
- }
+public class SlaChecker implements ConditionChecker {
+
+ private static final Logger logger = Logger.getLogger(SlaChecker.class);
+ public static final String type = "SlaChecker";
+
+ private String id;
+ private SlaOption slaOption;
+ private int execId;
+ private long checkTime = -1;
+
+ private static ExecutorManagerAdapter executorManager;
+
+ public SlaChecker(String id, SlaOption slaOption, int execId) {
+ this.id = id;
+ this.slaOption = slaOption;
+ this.execId = execId;
+ }
+
+ public static void setExecutorManager(ExecutorManagerAdapter em) {
+ executorManager = em;
+ }
+
+ private Boolean isSlaMissed(ExecutableFlow flow) {
+ String type = slaOption.getType();
+ logger.info("flow is " + flow.getStatus());
+ if (flow.getStartTime() < 0) {
+ return Boolean.FALSE;
+ }
+ Status status;
+ if (type.equals(SlaOption.TYPE_FLOW_FINISH)) {
+ if (checkTime < flow.getStartTime()) {
+ ReadablePeriod duration =
+ Utils.parsePeriodString((String) slaOption.getInfo().get(
+ SlaOption.INFO_DURATION));
+ DateTime startTime = new DateTime(flow.getStartTime());
+ DateTime nextCheckTime = startTime.plus(duration);
+ this.checkTime = nextCheckTime.getMillis();
+ }
+ status = flow.getStatus();
+ if (checkTime < DateTime.now().getMillis()) {
+ return !isFlowFinished(status);
+ }
+ } else if (type.equals(SlaOption.TYPE_FLOW_SUCCEED)) {
+ if (checkTime < flow.getStartTime()) {
+ ReadablePeriod duration =
+ Utils.parsePeriodString((String) slaOption.getInfo().get(
+ SlaOption.INFO_DURATION));
+ DateTime startTime = new DateTime(flow.getStartTime());
+ DateTime nextCheckTime = startTime.plus(duration);
+ this.checkTime = nextCheckTime.getMillis();
+ }
+ status = flow.getStatus();
+ if (checkTime < DateTime.now().getMillis()) {
+ return !isFlowSucceeded(status);
+ } else {
+ return status.equals(Status.FAILED) || status.equals(Status.KILLED);
+ }
+ } else if (type.equals(SlaOption.TYPE_JOB_FINISH)) {
+ String jobName =
+ (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
+ ExecutableNode node = flow.getExecutableNode(jobName);
+ if (node.getStartTime() < 0) {
+ return Boolean.FALSE;
+ }
+ if (checkTime < node.getStartTime()) {
+ ReadablePeriod duration =
+ Utils.parsePeriodString((String) slaOption.getInfo().get(
+ SlaOption.INFO_DURATION));
+ DateTime startTime = new DateTime(node.getStartTime());
+ DateTime nextCheckTime = startTime.plus(duration);
+ this.checkTime = nextCheckTime.getMillis();
+ }
+ status = node.getStatus();
+ if (checkTime < DateTime.now().getMillis()) {
+ return !isJobFinished(status);
+ }
+ } else if (type.equals(SlaOption.TYPE_JOB_SUCCEED)) {
+ String jobName =
+ (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
+ ExecutableNode node = flow.getExecutableNode(jobName);
+ if (node.getStartTime() < 0) {
+ return Boolean.FALSE;
+ }
+ if (checkTime < node.getStartTime()) {
+ ReadablePeriod duration =
+ Utils.parsePeriodString((String) slaOption.getInfo().get(
+ SlaOption.INFO_DURATION));
+ DateTime startTime = new DateTime(node.getStartTime());
+ DateTime nextCheckTime = startTime.plus(duration);
+ this.checkTime = nextCheckTime.getMillis();
+ }
+ status = node.getStatus();
+ if (checkTime < DateTime.now().getMillis()) {
+ return !isJobFinished(status);
+ } else {
+ return status.equals(Status.FAILED) || status.equals(Status.KILLED);
+ }
+ }
+ return Boolean.FALSE;
+ }
+
+ private Boolean isSlaGood(ExecutableFlow flow) {
+ String type = slaOption.getType();
+ logger.info("flow is " + flow.getStatus());
+ if (flow.getStartTime() < 0) {
+ return Boolean.FALSE;
+ }
+ Status status;
+ if (type.equals(SlaOption.TYPE_FLOW_FINISH)) {
+ if (checkTime < flow.getStartTime()) {
+ ReadablePeriod duration =
+ Utils.parsePeriodString((String) slaOption.getInfo().get(
+ SlaOption.INFO_DURATION));
+ DateTime startTime = new DateTime(flow.getStartTime());
+ DateTime nextCheckTime = startTime.plus(duration);
+ this.checkTime = nextCheckTime.getMillis();
+ }
+ status = flow.getStatus();
+ return isFlowFinished(status);
+ } else if (type.equals(SlaOption.TYPE_FLOW_SUCCEED)) {
+ if (checkTime < flow.getStartTime()) {
+ ReadablePeriod duration =
+ Utils.parsePeriodString((String) slaOption.getInfo().get(
+ SlaOption.INFO_DURATION));
+ DateTime startTime = new DateTime(flow.getStartTime());
+ DateTime nextCheckTime = startTime.plus(duration);
+ this.checkTime = nextCheckTime.getMillis();
+ }
+ status = flow.getStatus();
+ return isFlowSucceeded(status);
+ } else if (type.equals(SlaOption.TYPE_JOB_FINISH)) {
+ String jobName =
+ (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
+ ExecutableNode node = flow.getExecutableNode(jobName);
+ if (node.getStartTime() < 0) {
+ return Boolean.FALSE;
+ }
+ if (checkTime < node.getStartTime()) {
+ ReadablePeriod duration =
+ Utils.parsePeriodString((String) slaOption.getInfo().get(
+ SlaOption.INFO_DURATION));
+ DateTime startTime = new DateTime(node.getStartTime());
+ DateTime nextCheckTime = startTime.plus(duration);
+ this.checkTime = nextCheckTime.getMillis();
+ }
+ status = node.getStatus();
+ return isJobFinished(status);
+ } else if (type.equals(SlaOption.TYPE_JOB_SUCCEED)) {
+ String jobName =
+ (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
+ ExecutableNode node = flow.getExecutableNode(jobName);
+ if (node.getStartTime() < 0) {
+ return Boolean.FALSE;
+ }
+ if (checkTime < node.getStartTime()) {
+ ReadablePeriod duration =
+ Utils.parsePeriodString((String) slaOption.getInfo().get(
+ SlaOption.INFO_DURATION));
+ DateTime startTime = new DateTime(node.getStartTime());
+ DateTime nextCheckTime = startTime.plus(duration);
+ this.checkTime = nextCheckTime.getMillis();
+ }
+ status = node.getStatus();
+ return isJobSucceeded(status);
+ }
+ return Boolean.FALSE;
+ }
+
+ // return true to trigger sla action
+ @Override
+ public Object eval() {
+ logger.info("Checking sla for execution " + execId);
+ ExecutableFlow flow;
+ try {
+ flow = executorManager.getExecutableFlow(execId);
+ } catch (ExecutorManagerException e) {
+ logger.error("Can't get executable flow.", e);
+ e.printStackTrace();
+ // something wrong, send out alerts
+ return Boolean.TRUE;
+ }
+ return isSlaMissed(flow);
+ }
+
+ public Object isSlaFailed() {
+ logger.info("Testing if sla failed for execution " + execId);
+ ExecutableFlow flow;
+ try {
+ flow = executorManager.getExecutableFlow(execId);
+ } catch (ExecutorManagerException e) {
+ logger.error("Can't get executable flow.", e);
+ e.printStackTrace();
+ // something wrong, send out alerts
+ return Boolean.TRUE;
+ }
+ return isSlaMissed(flow);
+ }
+
+ public Object isSlaPassed() {
+ logger.info("Testing if sla is good for execution " + execId);
+ ExecutableFlow flow;
+ try {
+ flow = executorManager.getExecutableFlow(execId);
+ } catch (ExecutorManagerException e) {
+ logger.error("Can't get executable flow.", e);
+ e.printStackTrace();
+ // something wrong, send out alerts
+ return Boolean.TRUE;
+ }
+ return isSlaGood(flow);
+ }
+
+ @Override
+ public Object getNum() {
+ return null;
+ }
+
+ @Override
+ public void reset() {
+ }
+
+ @Override
+ public String getId() {
+ return id;
+ }
+
+ @Override
+ public String getType() {
+ return type;
+ }
+
+ @Override
+ public ConditionChecker fromJson(Object obj) throws Exception {
+ return createFromJson(obj);
+ }
+
+ @SuppressWarnings("unchecked")
+ public static SlaChecker createFromJson(Object obj) throws Exception {
+ return createFromJson((HashMap<String, Object>) obj);
+ }
+
+ public static SlaChecker createFromJson(HashMap<String, Object> obj)
+ throws Exception {
+ Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
+ if (!jsonObj.get("type").equals(type)) {
+ throw new Exception("Cannot create checker of " + type + " from "
+ + jsonObj.get("type"));
+ }
+ String id = (String) jsonObj.get("id");
+ SlaOption slaOption = SlaOption.fromObject(jsonObj.get("slaOption"));
+ int execId = Integer.valueOf((String) jsonObj.get("execId"));
+ return new SlaChecker(id, slaOption, execId);
+ }
+
+ @Override
+ public Object toJson() {
+ Map<String, Object> jsonObj = new HashMap<String, Object>();
+ jsonObj.put("type", type);
+ jsonObj.put("id", id);
+ jsonObj.put("slaOption", slaOption.toObject());
+ jsonObj.put("execId", String.valueOf(execId));
+
+ return jsonObj;
+ }
+
+ @Override
+ public void stopChecker() {
+
+ }
+
+ @Override
+ public void setContext(Map<String, Object> context) {
+ }
+
+ @Override
+ public long getNextCheckTime() {
+ return checkTime;
+ }
+
+ private boolean isFlowFinished(Status status) {
+ if (status.equals(Status.FAILED) || status.equals(Status.KILLED)
+ || status.equals(Status.SUCCEEDED)) {
+ return Boolean.TRUE;
+ } else {
+ return Boolean.FALSE;
+ }
+ }
+
+ private boolean isFlowSucceeded(Status status) {
+ return status.equals(Status.SUCCEEDED);
+ }
+
+ private boolean isJobFinished(Status status) {
+ if (status.equals(Status.FAILED) || status.equals(Status.KILLED)
+ || status.equals(Status.SUCCEEDED)) {
+ return Boolean.TRUE;
+ } else {
+ return Boolean.FALSE;
+ }
+ }
+
+ private boolean isJobSucceeded(Status status) {
+ return status.equals(Status.SUCCEEDED);
+ }
}
src/main/java/azkaban/trigger/CheckerTypeLoader.java 316(+164 -152)
diff --git a/src/main/java/azkaban/trigger/CheckerTypeLoader.java b/src/main/java/azkaban/trigger/CheckerTypeLoader.java
index 7c02bed..85a024a 100644
--- a/src/main/java/azkaban/trigger/CheckerTypeLoader.java
+++ b/src/main/java/azkaban/trigger/CheckerTypeLoader.java
@@ -24,157 +24,169 @@ import org.apache.log4j.Logger;
import azkaban.utils.Props;
import azkaban.utils.Utils;
-
public class CheckerTypeLoader {
-
- private static Logger logger = Logger.getLogger(CheckerTypeLoader.class);
-
- public static final String DEFAULT_CONDITION_CHECKER_PLUGIN_DIR = "plugins/conditioncheckers";
-
- protected static Map<String, Class<? extends ConditionChecker>> checkerToClass = new HashMap<String, Class<? extends ConditionChecker>>();
-
- public void init(Props props) throws TriggerException {
-
-
- // load built-in checkers
-//
-// loadBuiltinCheckers();
-//
-// loadPluginCheckers(props);
-
- }
-
- public synchronized void registerCheckerType(String type, Class<? extends ConditionChecker> checkerClass) {
- logger.info("Registering checker " + type);
- if(!checkerToClass.containsKey(type)) {
- checkerToClass.put(type, checkerClass);
- }
- }
-
-// private void loadPluginCheckers(Props props) throws TriggerException {
-//
-// String checkerDir = props.getString("azkaban.condition.checker.plugin.dir", DEFAULT_CONDITION_CHECKER_PLUGIN_DIR);
-// File pluginDir = new File(checkerDir);
-// if(!pluginDir.exists() || !pluginDir.isDirectory() || !pluginDir.canRead()) {
-// logger.info("No conditon checker plugins to load.");
-// return;
-// }
-//
-// logger.info("Loading plugin condition checkers from " + pluginDir);
-// ClassLoader parentCl = this.getClass().getClassLoader();
-//
-// Props globalCheckerConf = null;
-// File confFile = Utils.findFilefromDir(pluginDir, COMMONCONFFILE);
-// try {
-// if(confFile != null) {
-// globalCheckerConf = new Props(null, confFile);
-// } else {
-// globalCheckerConf = new Props();
-// }
-// } catch (IOException e) {
-// throw new TriggerException("Failed to get global properties." + e);
-// }
-//
-// for(File dir : pluginDir.listFiles()) {
-// if(dir.isDirectory() && dir.canRead()) {
-// try {
-// loadPluginTypes(globalCheckerConf, pluginDir, parentCl);
-// } catch (Exception e) {
-// logger.info("Plugin checkers failed to load. " + e.getCause());
-// throw new TriggerException("Failed to load all condition checkers!", e);
-// }
-// }
-// }
-// }
-//
-// @SuppressWarnings("unchecked")
-// private void loadPluginTypes(Props globalConf, File dir, ClassLoader parentCl) throws TriggerException {
-// Props checkerConf = null;
-// File confFile = Utils.findFilefromDir(dir, CHECKERTYPECONFFILE);
-// if(confFile == null) {
-// logger.info("No checker type found in " + dir.getAbsolutePath());
-// return;
-// }
-// try {
-// checkerConf = new Props(globalConf, confFile);
-// } catch (IOException e) {
-// throw new TriggerException("Failed to load config for the checker type", e);
-// }
-//
-// String checkerName = dir.getName();
-// String checkerClass = checkerConf.getString("checker.class");
-//
-// List<URL> resources = new ArrayList<URL>();
-// for(File f : dir.listFiles()) {
-// try {
-// if(f.getName().endsWith(".jar")) {
-// resources.add(f.toURI().toURL());
-// logger.info("adding to classpath " + f.toURI().toURL());
-// }
-// } catch (MalformedURLException e) {
-// // TODO Auto-generated catch block
-// throw new TriggerException(e);
-// }
-// }
-//
-// // each job type can have a different class loader
-// ClassLoader checkerCl = new URLClassLoader(resources.toArray(new URL[resources.size()]), parentCl);
-//
-// Class<? extends ConditionChecker> clazz = null;
-// try {
-// clazz = (Class<? extends ConditionChecker>)checkerCl.loadClass(checkerClass);
-// checkerToClass.put(checkerName, clazz);
-// }
-// catch (ClassNotFoundException e) {
-// throw new TriggerException(e);
-// }
-//
-// if(checkerConf.getBoolean("need.init")) {
-// try {
-// Utils.invokeStaticMethod(checkerCl, checkerClass, "init", checkerConf);
-// } catch (Exception e) {
-// e.printStackTrace();
-// logger.error("Failed to init the checker type " + checkerName);
-// throw new TriggerException(e);
-// }
-// }
-//
-// logger.info("Loaded checker type " + checkerName + " " + checkerClass);
-// }
-
- public static void registerBuiltinCheckers(Map<String, Class<? extends ConditionChecker>> builtinCheckers) {
- checkerToClass.putAll(checkerToClass);
- for(String type : builtinCheckers.keySet()) {
- logger.info("Loaded " + type + " checker.");
- }
- }
-
-// private void loadBuiltinCheckers() {
-// checkerToClass.put("BasicTimeChecker", BasicTimeChecker.class);
-// logger.info("Loaded BasicTimeChecker type.");
-// }
-
- public ConditionChecker createCheckerFromJson(String type, Object obj) throws Exception {
- ConditionChecker checker = null;
- Class<? extends ConditionChecker> checkerClass = checkerToClass.get(type);
- if(checkerClass == null) {
- throw new Exception("Checker type " + type + " not supported!");
- }
- checker = (ConditionChecker) Utils.invokeStaticMethod(checkerClass.getClassLoader(), checkerClass.getName(), "createFromJson", obj);
-
- return checker;
- }
-
- public ConditionChecker createChecker(String type, Object ... args) {
- ConditionChecker checker = null;
- Class<? extends ConditionChecker> checkerClass = checkerToClass.get(type);
- checker = (ConditionChecker) Utils.callConstructor(checkerClass, args);
-
- return checker;
- }
-
- public Map<String, Class<? extends ConditionChecker>> getSupportedCheckers() {
- return checkerToClass;
- }
-
+
+ private static Logger logger = Logger.getLogger(CheckerTypeLoader.class);
+
+ public static final String DEFAULT_CONDITION_CHECKER_PLUGIN_DIR =
+ "plugins/conditioncheckers";
+
+ protected static Map<String, Class<? extends ConditionChecker>> checkerToClass =
+ new HashMap<String, Class<? extends ConditionChecker>>();
+
+ public void init(Props props) throws TriggerException {
+
+ // load built-in checkers
+ //
+ // loadBuiltinCheckers();
+ //
+ // loadPluginCheckers(props);
+
+ }
+
+ public synchronized void registerCheckerType(String type,
+ Class<? extends ConditionChecker> checkerClass) {
+ logger.info("Registering checker " + type);
+ if (!checkerToClass.containsKey(type)) {
+ checkerToClass.put(type, checkerClass);
+ }
+ }
+
+ // private void loadPluginCheckers(Props props) throws TriggerException {
+ //
+ // String checkerDir = props.getString("azkaban.condition.checker.plugin.dir",
+ // DEFAULT_CONDITION_CHECKER_PLUGIN_DIR);
+ // File pluginDir = new File(checkerDir);
+ // if(!pluginDir.exists() || !pluginDir.isDirectory() || !pluginDir.canRead())
+ // {
+ // logger.info("No conditon checker plugins to load.");
+ // return;
+ // }
+ //
+ // logger.info("Loading plugin condition checkers from " + pluginDir);
+ // ClassLoader parentCl = this.getClass().getClassLoader();
+ //
+ // Props globalCheckerConf = null;
+ // File confFile = Utils.findFilefromDir(pluginDir, COMMONCONFFILE);
+ // try {
+ // if(confFile != null) {
+ // globalCheckerConf = new Props(null, confFile);
+ // } else {
+ // globalCheckerConf = new Props();
+ // }
+ // } catch (IOException e) {
+ // throw new TriggerException("Failed to get global properties." + e);
+ // }
+ //
+ // for(File dir : pluginDir.listFiles()) {
+ // if(dir.isDirectory() && dir.canRead()) {
+ // try {
+ // loadPluginTypes(globalCheckerConf, pluginDir, parentCl);
+ // } catch (Exception e) {
+ // logger.info("Plugin checkers failed to load. " + e.getCause());
+ // throw new TriggerException("Failed to load all condition checkers!", e);
+ // }
+ // }
+ // }
+ // }
+ //
+ // @SuppressWarnings("unchecked")
+ // private void loadPluginTypes(Props globalConf, File dir, ClassLoader
+ // parentCl) throws TriggerException {
+ // Props checkerConf = null;
+ // File confFile = Utils.findFilefromDir(dir, CHECKERTYPECONFFILE);
+ // if(confFile == null) {
+ // logger.info("No checker type found in " + dir.getAbsolutePath());
+ // return;
+ // }
+ // try {
+ // checkerConf = new Props(globalConf, confFile);
+ // } catch (IOException e) {
+ // throw new TriggerException("Failed to load config for the checker type",
+ // e);
+ // }
+ //
+ // String checkerName = dir.getName();
+ // String checkerClass = checkerConf.getString("checker.class");
+ //
+ // List<URL> resources = new ArrayList<URL>();
+ // for(File f : dir.listFiles()) {
+ // try {
+ // if(f.getName().endsWith(".jar")) {
+ // resources.add(f.toURI().toURL());
+ // logger.info("adding to classpath " + f.toURI().toURL());
+ // }
+ // } catch (MalformedURLException e) {
+ // // TODO Auto-generated catch block
+ // throw new TriggerException(e);
+ // }
+ // }
+ //
+ // // each job type can have a different class loader
+ // ClassLoader checkerCl = new URLClassLoader(resources.toArray(new
+ // URL[resources.size()]), parentCl);
+ //
+ // Class<? extends ConditionChecker> clazz = null;
+ // try {
+ // clazz = (Class<? extends
+ // ConditionChecker>)checkerCl.loadClass(checkerClass);
+ // checkerToClass.put(checkerName, clazz);
+ // }
+ // catch (ClassNotFoundException e) {
+ // throw new TriggerException(e);
+ // }
+ //
+ // if(checkerConf.getBoolean("need.init")) {
+ // try {
+ // Utils.invokeStaticMethod(checkerCl, checkerClass, "init", checkerConf);
+ // } catch (Exception e) {
+ // e.printStackTrace();
+ // logger.error("Failed to init the checker type " + checkerName);
+ // throw new TriggerException(e);
+ // }
+ // }
+ //
+ // logger.info("Loaded checker type " + checkerName + " " + checkerClass);
+ // }
+
+ public static void registerBuiltinCheckers(
+ Map<String, Class<? extends ConditionChecker>> builtinCheckers) {
+ checkerToClass.putAll(checkerToClass);
+ for (String type : builtinCheckers.keySet()) {
+ logger.info("Loaded " + type + " checker.");
+ }
+ }
+
+ // private void loadBuiltinCheckers() {
+ // checkerToClass.put("BasicTimeChecker", BasicTimeChecker.class);
+ // logger.info("Loaded BasicTimeChecker type.");
+ // }
+
+ public ConditionChecker createCheckerFromJson(String type, Object obj)
+ throws Exception {
+ ConditionChecker checker = null;
+ Class<? extends ConditionChecker> checkerClass = checkerToClass.get(type);
+ if (checkerClass == null) {
+ throw new Exception("Checker type " + type + " not supported!");
+ }
+ checker =
+ (ConditionChecker) Utils.invokeStaticMethod(
+ checkerClass.getClassLoader(), checkerClass.getName(),
+ "createFromJson", obj);
+
+ return checker;
+ }
+
+ public ConditionChecker createChecker(String type, Object... args) {
+ ConditionChecker checker = null;
+ Class<? extends ConditionChecker> checkerClass = checkerToClass.get(type);
+ checker = (ConditionChecker) Utils.callConstructor(checkerClass, args);
+
+ return checker;
+ }
+
+ public Map<String, Class<? extends ConditionChecker>> getSupportedCheckers() {
+ return checkerToClass;
+ }
+
}
src/main/java/azkaban/trigger/Condition.java 294(+150 -144)
diff --git a/src/main/java/azkaban/trigger/Condition.java b/src/main/java/azkaban/trigger/Condition.java
index 1bead36..c6698b2 100644
--- a/src/main/java/azkaban/trigger/Condition.java
+++ b/src/main/java/azkaban/trigger/Condition.java
@@ -28,149 +28,155 @@ import org.apache.log4j.Logger;
import org.joda.time.DateTime;
public class Condition {
-
- private static Logger logger = Logger.getLogger(Condition.class);
-
- private static JexlEngine jexl = new JexlEngine();
- private static CheckerTypeLoader checkerLoader = null;
- private Expression expression;
- private Map<String, ConditionChecker> checkers = new HashMap<String, ConditionChecker>();
- private MapContext context = new MapContext();
- private Long nextCheckTime = -1L;
-
- public Condition(Map<String, ConditionChecker> checkers, String expr) {
- setCheckers(checkers);
- this.expression = jexl.createExpression(expr);
- updateNextCheckTime();
- }
-
- public Condition(Map<String, ConditionChecker> checkers, String expr, long nextCheckTime) {
- this.nextCheckTime = nextCheckTime;
- setCheckers(checkers);
-// for(ConditionChecker ck : checkers.values()) {
-// ck.setCondition(this);
-// }
- this.expression = jexl.createExpression(expr);
- }
-
- public synchronized static void setJexlEngine(JexlEngine jexl) {
- Condition.jexl = jexl;
- }
-
- public synchronized static void setCheckerLoader(CheckerTypeLoader loader) {
- Condition.checkerLoader = loader;
- }
-
- protected static CheckerTypeLoader getCheckerLoader() {
- return checkerLoader;
- }
-
- protected void registerChecker(ConditionChecker checker) {
- checkers.put(checker.getId(), checker);
- context.set(checker.getId(), checker);
- updateNextCheckTime();
- }
-
- public long getNextCheckTime() {
- return nextCheckTime;
- }
-
- public Map<String, ConditionChecker> getCheckers() {
- return this.checkers;
- }
-
- public void setCheckers(Map<String, ConditionChecker> checkers){
- this.checkers = checkers;
- for(ConditionChecker checker : checkers.values()) {
- this.context.set(checker.getId(), checker);
-// checker.setCondition(this);
- }
- updateNextCheckTime();
- }
-
- public void updateCheckTime(Long ct) {
- if(nextCheckTime < ct) {
- nextCheckTime = ct;
- }
- }
-
- private void updateNextCheckTime() {
- long time = Long.MAX_VALUE;
- for(ConditionChecker checker : checkers.values()) {
- time = Math.min(time, checker.getNextCheckTime());
- }
- this.nextCheckTime = time;
- }
-
- public void resetCheckers() {
- for(ConditionChecker checker : checkers.values()) {
- checker.reset();
- }
- updateNextCheckTime();
- logger.info("Done resetting checkers. The next check time will be " + new DateTime(nextCheckTime));
- }
-
- public String getExpression() {
- return this.expression.getExpression();
- }
-
- public void setExpression(String expr) {
- this.expression = jexl.createExpression(expr);
- }
-
- public boolean isMet() {
- logger.info("Testing condition " + expression);
- return expression.evaluate(context).equals(Boolean.TRUE);
- }
-
- public Object toJson() {
- Map<String, Object> jsonObj = new HashMap<String, Object>();
- jsonObj.put("expression", expression.getExpression());
-
- List<Object> checkersJson = new ArrayList<Object>();
- for(ConditionChecker checker : checkers.values()) {
- Map<String, Object> oneChecker = new HashMap<String, Object>();
- oneChecker.put("type", checker.getType());
- oneChecker.put("checkerJson", checker.toJson());
- checkersJson.add(oneChecker);
- }
- jsonObj.put("checkers", checkersJson);
- jsonObj.put("nextCheckTime", String.valueOf(nextCheckTime));
-
- return jsonObj;
- }
-
- @SuppressWarnings("unchecked")
- public static Condition fromJson(Object obj) throws Exception {
- if(checkerLoader == null) {
- throw new Exception("Condition Checker loader not initialized!");
- }
-
- Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
- Condition cond = null;
-
- try {
- Map<String, ConditionChecker> checkers = new HashMap<String, ConditionChecker>();
- List<Object> checkersJson = (List<Object>) jsonObj.get("checkers");
- for(Object oneCheckerJson : checkersJson) {
- Map<String, Object> oneChecker = (HashMap<String, Object>) oneCheckerJson;
- String type = (String) oneChecker.get("type");
- ConditionChecker ck = checkerLoader.createCheckerFromJson(type, oneChecker.get("checkerJson"));
- checkers.put(ck.getId(), ck);
- }
- String expr = (String) jsonObj.get("expression");
- Long nextCheckTime = Long.valueOf((String) jsonObj.get("nextCheckTime"));
-
- cond = new Condition(checkers, expr, nextCheckTime);
-
- } catch(Exception e) {
- e.printStackTrace();
- logger.error("Failed to recreate condition from json.", e);
- throw new Exception("Failed to recreate condition from json.", e);
- }
-
- return cond;
- }
-
+
+ private static Logger logger = Logger.getLogger(Condition.class);
+
+ private static JexlEngine jexl = new JexlEngine();
+ private static CheckerTypeLoader checkerLoader = null;
+ private Expression expression;
+ private Map<String, ConditionChecker> checkers =
+ new HashMap<String, ConditionChecker>();
+ private MapContext context = new MapContext();
+ private Long nextCheckTime = -1L;
+
+ public Condition(Map<String, ConditionChecker> checkers, String expr) {
+ setCheckers(checkers);
+ this.expression = jexl.createExpression(expr);
+ updateNextCheckTime();
+ }
+
+ public Condition(Map<String, ConditionChecker> checkers, String expr,
+ long nextCheckTime) {
+ this.nextCheckTime = nextCheckTime;
+ setCheckers(checkers);
+ // for(ConditionChecker ck : checkers.values()) {
+ // ck.setCondition(this);
+ // }
+ this.expression = jexl.createExpression(expr);
+ }
+
+ public synchronized static void setJexlEngine(JexlEngine jexl) {
+ Condition.jexl = jexl;
+ }
+
+ public synchronized static void setCheckerLoader(CheckerTypeLoader loader) {
+ Condition.checkerLoader = loader;
+ }
+
+ protected static CheckerTypeLoader getCheckerLoader() {
+ return checkerLoader;
+ }
+
+ protected void registerChecker(ConditionChecker checker) {
+ checkers.put(checker.getId(), checker);
+ context.set(checker.getId(), checker);
+ updateNextCheckTime();
+ }
+
+ public long getNextCheckTime() {
+ return nextCheckTime;
+ }
+
+ public Map<String, ConditionChecker> getCheckers() {
+ return this.checkers;
+ }
+
+ public void setCheckers(Map<String, ConditionChecker> checkers) {
+ this.checkers = checkers;
+ for (ConditionChecker checker : checkers.values()) {
+ this.context.set(checker.getId(), checker);
+ // checker.setCondition(this);
+ }
+ updateNextCheckTime();
+ }
+
+ public void updateCheckTime(Long ct) {
+ if (nextCheckTime < ct) {
+ nextCheckTime = ct;
+ }
+ }
+
+ private void updateNextCheckTime() {
+ long time = Long.MAX_VALUE;
+ for (ConditionChecker checker : checkers.values()) {
+ time = Math.min(time, checker.getNextCheckTime());
+ }
+ this.nextCheckTime = time;
+ }
+
+ public void resetCheckers() {
+ for (ConditionChecker checker : checkers.values()) {
+ checker.reset();
+ }
+ updateNextCheckTime();
+ logger.info("Done resetting checkers. The next check time will be "
+ + new DateTime(nextCheckTime));
+ }
+
+ public String getExpression() {
+ return this.expression.getExpression();
+ }
+
+ public void setExpression(String expr) {
+ this.expression = jexl.createExpression(expr);
+ }
+
+ public boolean isMet() {
+ logger.info("Testing condition " + expression);
+ return expression.evaluate(context).equals(Boolean.TRUE);
+ }
+
+ public Object toJson() {
+ Map<String, Object> jsonObj = new HashMap<String, Object>();
+ jsonObj.put("expression", expression.getExpression());
+
+ List<Object> checkersJson = new ArrayList<Object>();
+ for (ConditionChecker checker : checkers.values()) {
+ Map<String, Object> oneChecker = new HashMap<String, Object>();
+ oneChecker.put("type", checker.getType());
+ oneChecker.put("checkerJson", checker.toJson());
+ checkersJson.add(oneChecker);
+ }
+ jsonObj.put("checkers", checkersJson);
+ jsonObj.put("nextCheckTime", String.valueOf(nextCheckTime));
+
+ return jsonObj;
+ }
+
+ @SuppressWarnings("unchecked")
+ public static Condition fromJson(Object obj) throws Exception {
+ if (checkerLoader == null) {
+ throw new Exception("Condition Checker loader not initialized!");
+ }
+
+ Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
+ Condition cond = null;
+
+ try {
+ Map<String, ConditionChecker> checkers =
+ new HashMap<String, ConditionChecker>();
+ List<Object> checkersJson = (List<Object>) jsonObj.get("checkers");
+ for (Object oneCheckerJson : checkersJson) {
+ Map<String, Object> oneChecker =
+ (HashMap<String, Object>) oneCheckerJson;
+ String type = (String) oneChecker.get("type");
+ ConditionChecker ck =
+ checkerLoader.createCheckerFromJson(type,
+ oneChecker.get("checkerJson"));
+ checkers.put(ck.getId(), ck);
+ }
+ String expr = (String) jsonObj.get("expression");
+ Long nextCheckTime = Long.valueOf((String) jsonObj.get("nextCheckTime"));
+
+ cond = new Condition(checkers, expr, nextCheckTime);
+
+ } catch (Exception e) {
+ e.printStackTrace();
+ logger.error("Failed to recreate condition from json.", e);
+ throw new Exception("Failed to recreate condition from json.", e);
+ }
+
+ return cond;
+ }
}
diff --git a/src/main/java/azkaban/trigger/ConditionChecker.java b/src/main/java/azkaban/trigger/ConditionChecker.java
index 94e3841..af215ea 100644
--- a/src/main/java/azkaban/trigger/ConditionChecker.java
+++ b/src/main/java/azkaban/trigger/ConditionChecker.java
@@ -19,25 +19,25 @@ package azkaban.trigger;
import java.util.Map;
public interface ConditionChecker {
-
- Object eval();
-
- Object getNum();
-
- void reset();
-
- String getId();
-
- String getType();
-
- ConditionChecker fromJson(Object obj) throws Exception;
-
- Object toJson();
-
- void stopChecker();
-
- void setContext(Map<String, Object> context);
-
- long getNextCheckTime();
-
+
+ Object eval();
+
+ Object getNum();
+
+ void reset();
+
+ String getId();
+
+ String getType();
+
+ ConditionChecker fromJson(Object obj) throws Exception;
+
+ Object toJson();
+
+ void stopChecker();
+
+ void setContext(Map<String, Object> context);
+
+ long getNextCheckTime();
+
}
src/main/java/azkaban/trigger/JdbcTriggerLoader.java 626(+318 -308)
diff --git a/src/main/java/azkaban/trigger/JdbcTriggerLoader.java b/src/main/java/azkaban/trigger/JdbcTriggerLoader.java
index b290c6e..099db6c 100644
--- a/src/main/java/azkaban/trigger/JdbcTriggerLoader.java
+++ b/src/main/java/azkaban/trigger/JdbcTriggerLoader.java
@@ -35,313 +35,323 @@ import azkaban.utils.GZIPUtils;
import azkaban.utils.JSONUtils;
import azkaban.utils.Props;
-public class JdbcTriggerLoader extends AbstractJdbcLoader implements TriggerLoader {
- private static Logger logger = Logger.getLogger(JdbcTriggerLoader.class);
-
- private EncodingType defaultEncodingType = EncodingType.GZIP;
-
- private static final String triggerTblName = "triggers";
-
- private static final String GET_UPDATED_TRIGGERS =
- "SELECT trigger_id, trigger_source, modify_time, enc_type, data FROM " + triggerTblName + " WHERE modify_time>=?";
-
- private static String GET_ALL_TRIGGERS =
- "SELECT trigger_id, trigger_source, modify_time, enc_type, data FROM " + triggerTblName;
-
- private static String GET_TRIGGER =
- "SELECT trigger_id, trigger_source, modify_time, enc_type, data FROM " + triggerTblName + " WHERE trigger_id=?";
-
- private static String ADD_TRIGGER =
- "INSERT INTO " + triggerTblName + " ( modify_time) values (?)";
-
- private static String REMOVE_TRIGGER =
- "DELETE FROM " + triggerTblName + " WHERE trigger_id=?";
-
- private static String UPDATE_TRIGGER =
- "UPDATE " + triggerTblName + " SET trigger_source=?, modify_time=?, enc_type=?, data=? WHERE trigger_id=?";
-
- public EncodingType getDefaultEncodingType() {
- return defaultEncodingType;
- }
-
- public void setDefaultEncodingType(EncodingType defaultEncodingType) {
- this.defaultEncodingType = defaultEncodingType;
- }
-
- public JdbcTriggerLoader(Props props) {
- super(props);
- }
-
- @Override
- public List<Trigger> getUpdatedTriggers(long lastUpdateTime) throws TriggerLoaderException {
- logger.info("Loading triggers changed since " + new DateTime(lastUpdateTime).toString());
- Connection connection = getConnection();
-
- QueryRunner runner = new QueryRunner();
- ResultSetHandler<List<Trigger>> handler = new TriggerResultHandler();
-
- List<Trigger> triggers;
-
- try {
- triggers = runner.query(connection, GET_UPDATED_TRIGGERS, handler, lastUpdateTime);
- } catch (SQLException e) {
- logger.error(GET_ALL_TRIGGERS + " failed.");
-
- throw new TriggerLoaderException("Loading triggers from db failed. ", e);
- } finally {
- DbUtils.closeQuietly(connection);
- }
-
- logger.info("Loaded " + triggers.size() + " triggers.");
-
- return triggers;
- }
-
- @Override
- public List<Trigger> loadTriggers() throws TriggerLoaderException {
- logger.info("Loading all triggers from db.");
- Connection connection = getConnection();
-
- QueryRunner runner = new QueryRunner();
- ResultSetHandler<List<Trigger>> handler = new TriggerResultHandler();
-
- List<Trigger> triggers;
-
- try {
- triggers = runner.query(connection, GET_ALL_TRIGGERS, handler);
- } catch (SQLException e) {
- logger.error(GET_ALL_TRIGGERS + " failed.");
-
- throw new TriggerLoaderException("Loading triggers from db failed. ", e);
- } finally {
- DbUtils.closeQuietly(connection);
- }
-
- logger.info("Loaded " + triggers.size() + " triggers.");
-
- return triggers;
- }
-
- @Override
- public void removeTrigger(Trigger t) throws TriggerLoaderException {
- logger.info("Removing trigger " + t.toString() + " from db.");
-
- QueryRunner runner = createQueryRunner();
- try {
- int removes = runner.update(REMOVE_TRIGGER, t.getTriggerId());
- if (removes == 0) {
- throw new TriggerLoaderException("No trigger has been removed.");
- }
- } catch (SQLException e) {
- logger.error(REMOVE_TRIGGER + " failed.");
- throw new TriggerLoaderException("Remove trigger " + t.toString() + " from db failed. ", e);
- }
- }
-
- @Override
- public void addTrigger(Trigger t) throws TriggerLoaderException {
- logger.info("Inserting trigger " + t.toString() + " into db.");
- t.setLastModifyTime(System.currentTimeMillis());
- Connection connection = getConnection();
- try {
- addTrigger(connection, t, defaultEncodingType);
- }
- catch (Exception e) {
- throw new TriggerLoaderException("Error uploading trigger", e);
- }
- finally {
- DbUtils.closeQuietly(connection);
- }
- }
-
- private synchronized void addTrigger(Connection connection, Trigger t, EncodingType encType) throws TriggerLoaderException {
-
- QueryRunner runner = new QueryRunner();
-
- long id;
-
- try {
- runner.update(connection, ADD_TRIGGER, DateTime.now().getMillis());
- connection.commit();
- id = runner.query(connection, LastInsertID.LAST_INSERT_ID, new LastInsertID());
-
- if (id == -1l) {
- logger.error("trigger id is not properly created.");
- throw new TriggerLoaderException("trigger id is not properly created.");
- }
-
- t.setTriggerId((int)id);
- updateTrigger(t);
- logger.info("uploaded trigger " + t.getDescription());
- } catch (SQLException e) {
- throw new TriggerLoaderException("Error creating trigger.", e);
- }
-
- }
-
- @Override
- public void updateTrigger(Trigger t) throws TriggerLoaderException {
- logger.info("Updating trigger " + t.getTriggerId() + " into db.");
- t.setLastModifyTime(System.currentTimeMillis());
- Connection connection = getConnection();
- try{
- updateTrigger(connection, t, defaultEncodingType);
- }
- catch(Exception e) {
- e.printStackTrace();
- throw new TriggerLoaderException("Failed to update trigger " + t.toString() + " into db!");
- }
- finally {
- DbUtils.closeQuietly(connection);
- }
- }
-
- private void updateTrigger(Connection connection, Trigger t, EncodingType encType) throws TriggerLoaderException {
-
- String json = JSONUtils.toJSON(t.toJson());
- byte[] data = null;
- try {
- byte[] stringData = json.getBytes("UTF-8");
- data = stringData;
-
- if (encType == EncodingType.GZIP) {
- data = GZIPUtils.gzipBytes(stringData);
- }
- logger.debug("NumChars: " + json.length() + " UTF-8:" + stringData.length + " Gzip:"+ data.length);
- }
- catch (IOException e) {
- throw new TriggerLoaderException("Error encoding the trigger " + t.toString());
- }
-
- QueryRunner runner = new QueryRunner();
-
- try {
- int updates = runner.update( connection,
- UPDATE_TRIGGER,
- t.getSource(),
- t.getLastModifyTime(),
- encType.getNumVal(),
- data,
- t.getTriggerId());
- connection.commit();
- if (updates == 0) {
- throw new TriggerLoaderException("No trigger has been updated.");
- //logger.error("No trigger is updated!");
- } else {
- logger.info("Updated " + updates + " records.");
- }
- } catch (SQLException e) {
- logger.error(UPDATE_TRIGGER + " failed.");
- throw new TriggerLoaderException("Update trigger " + t.toString() + " into db failed. ", e);
- }
- }
-
- private static class LastInsertID implements ResultSetHandler<Long> {
- private static String LAST_INSERT_ID = "SELECT LAST_INSERT_ID()";
-
- @Override
- public Long handle(ResultSet rs) throws SQLException {
- if (!rs.next()) {
- return -1l;
- }
-
- long id = rs.getLong(1);
- return id;
- }
-
- }
-
- public class TriggerResultHandler implements ResultSetHandler<List<Trigger>> {
-
- @Override
- public List<Trigger> handle(ResultSet rs) throws SQLException {
- if (!rs.next()) {
- return Collections.<Trigger>emptyList();
- }
-
- ArrayList<Trigger> triggers = new ArrayList<Trigger>();
- do {
- int triggerId = rs.getInt(1);
-// String triggerSource = rs.getString(2);
-// long modifyTime = rs.getLong(3);
- int encodingType = rs.getInt(4);
- byte[] data = rs.getBytes(5);
-
- Object jsonObj = null;
- if (data != null) {
- EncodingType encType = EncodingType.fromInteger(encodingType);
-
- try {
- // Convoluted way to inflate strings. Should find common package or helper function.
- if (encType == EncodingType.GZIP) {
- // Decompress the sucker.
- String jsonString = GZIPUtils.unGzipString(data, "UTF-8");
- jsonObj = JSONUtils.parseJSONFromString(jsonString);
- }
- else {
- String jsonString = new String(data, "UTF-8");
- jsonObj = JSONUtils.parseJSONFromString(jsonString);
- }
- } catch (IOException e) {
- throw new SQLException("Error reconstructing trigger data " );
- }
- }
-
- Trigger t = null;
- try {
- t = Trigger.fromJson(jsonObj);
- triggers.add(t);
- } catch (Exception e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- logger.error("Failed to load trigger " + triggerId);
- }
- } while (rs.next());
-
- return triggers;
- }
-
- }
-
- private Connection getConnection() throws TriggerLoaderException {
- Connection connection = null;
- try {
- connection = super.getDBConnection(false);
- } catch (Exception e) {
- DbUtils.closeQuietly(connection);
- throw new TriggerLoaderException("Error getting DB connection.", e);
- }
-
- return connection;
- }
-
- @Override
- public Trigger loadTrigger(int triggerId) throws TriggerLoaderException {
- logger.info("Loading trigger " + triggerId + " from db.");
- Connection connection = getConnection();
-
- QueryRunner runner = new QueryRunner();
- ResultSetHandler<List<Trigger>> handler = new TriggerResultHandler();
-
- List<Trigger> triggers;
-
- try {
- triggers = runner.query(connection, GET_TRIGGER, handler, triggerId);
- } catch (SQLException e) {
- logger.error(GET_TRIGGER + " failed.");
- throw new TriggerLoaderException("Loading trigger from db failed. ", e);
- } finally {
- DbUtils.closeQuietly(connection);
- }
-
- if(triggers.size() == 0) {
- logger.error("Loaded 0 triggers. Failed to load trigger " + triggerId);
- throw new TriggerLoaderException("Loaded 0 triggers. Failed to load trigger " + triggerId);
- }
-
- return triggers.get(0);
- }
-
-
+public class JdbcTriggerLoader extends AbstractJdbcLoader implements
+ TriggerLoader {
+ private static Logger logger = Logger.getLogger(JdbcTriggerLoader.class);
+
+ private EncodingType defaultEncodingType = EncodingType.GZIP;
+
+ private static final String triggerTblName = "triggers";
+
+ private static final String GET_UPDATED_TRIGGERS =
+ "SELECT trigger_id, trigger_source, modify_time, enc_type, data FROM "
+ + triggerTblName + " WHERE modify_time>=?";
+
+ private static String GET_ALL_TRIGGERS =
+ "SELECT trigger_id, trigger_source, modify_time, enc_type, data FROM "
+ + triggerTblName;
+
+ private static String GET_TRIGGER =
+ "SELECT trigger_id, trigger_source, modify_time, enc_type, data FROM "
+ + triggerTblName + " WHERE trigger_id=?";
+
+ private static String ADD_TRIGGER = "INSERT INTO " + triggerTblName
+ + " ( modify_time) values (?)";
+
+ private static String REMOVE_TRIGGER = "DELETE FROM " + triggerTblName
+ + " WHERE trigger_id=?";
+
+ private static String UPDATE_TRIGGER =
+ "UPDATE "
+ + triggerTblName
+ + " SET trigger_source=?, modify_time=?, enc_type=?, data=? WHERE trigger_id=?";
+
+ public EncodingType getDefaultEncodingType() {
+ return defaultEncodingType;
+ }
+
+ public void setDefaultEncodingType(EncodingType defaultEncodingType) {
+ this.defaultEncodingType = defaultEncodingType;
+ }
+
+ public JdbcTriggerLoader(Props props) {
+ super(props);
+ }
+
+ @Override
+ public List<Trigger> getUpdatedTriggers(long lastUpdateTime)
+ throws TriggerLoaderException {
+ logger.info("Loading triggers changed since "
+ + new DateTime(lastUpdateTime).toString());
+ Connection connection = getConnection();
+
+ QueryRunner runner = new QueryRunner();
+ ResultSetHandler<List<Trigger>> handler = new TriggerResultHandler();
+
+ List<Trigger> triggers;
+
+ try {
+ triggers =
+ runner.query(connection, GET_UPDATED_TRIGGERS, handler,
+ lastUpdateTime);
+ } catch (SQLException e) {
+ logger.error(GET_ALL_TRIGGERS + " failed.");
+
+ throw new TriggerLoaderException("Loading triggers from db failed. ", e);
+ } finally {
+ DbUtils.closeQuietly(connection);
+ }
+
+ logger.info("Loaded " + triggers.size() + " triggers.");
+
+ return triggers;
+ }
+
+ @Override
+ public List<Trigger> loadTriggers() throws TriggerLoaderException {
+ logger.info("Loading all triggers from db.");
+ Connection connection = getConnection();
+
+ QueryRunner runner = new QueryRunner();
+ ResultSetHandler<List<Trigger>> handler = new TriggerResultHandler();
+
+ List<Trigger> triggers;
+
+ try {
+ triggers = runner.query(connection, GET_ALL_TRIGGERS, handler);
+ } catch (SQLException e) {
+ logger.error(GET_ALL_TRIGGERS + " failed.");
+
+ throw new TriggerLoaderException("Loading triggers from db failed. ", e);
+ } finally {
+ DbUtils.closeQuietly(connection);
+ }
+
+ logger.info("Loaded " + triggers.size() + " triggers.");
+
+ return triggers;
+ }
+
+ @Override
+ public void removeTrigger(Trigger t) throws TriggerLoaderException {
+ logger.info("Removing trigger " + t.toString() + " from db.");
+
+ QueryRunner runner = createQueryRunner();
+ try {
+ int removes = runner.update(REMOVE_TRIGGER, t.getTriggerId());
+ if (removes == 0) {
+ throw new TriggerLoaderException("No trigger has been removed.");
+ }
+ } catch (SQLException e) {
+ logger.error(REMOVE_TRIGGER + " failed.");
+ throw new TriggerLoaderException("Remove trigger " + t.toString()
+ + " from db failed. ", e);
+ }
+ }
+
+ @Override
+ public void addTrigger(Trigger t) throws TriggerLoaderException {
+ logger.info("Inserting trigger " + t.toString() + " into db.");
+ t.setLastModifyTime(System.currentTimeMillis());
+ Connection connection = getConnection();
+ try {
+ addTrigger(connection, t, defaultEncodingType);
+ } catch (Exception e) {
+ throw new TriggerLoaderException("Error uploading trigger", e);
+ } finally {
+ DbUtils.closeQuietly(connection);
+ }
+ }
+
+ private synchronized void addTrigger(Connection connection, Trigger t,
+ EncodingType encType) throws TriggerLoaderException {
+
+ QueryRunner runner = new QueryRunner();
+
+ long id;
+
+ try {
+ runner.update(connection, ADD_TRIGGER, DateTime.now().getMillis());
+ connection.commit();
+ id =
+ runner.query(connection, LastInsertID.LAST_INSERT_ID,
+ new LastInsertID());
+
+ if (id == -1l) {
+ logger.error("trigger id is not properly created.");
+ throw new TriggerLoaderException("trigger id is not properly created.");
+ }
+
+ t.setTriggerId((int) id);
+ updateTrigger(t);
+ logger.info("uploaded trigger " + t.getDescription());
+ } catch (SQLException e) {
+ throw new TriggerLoaderException("Error creating trigger.", e);
+ }
+
+ }
+
+ @Override
+ public void updateTrigger(Trigger t) throws TriggerLoaderException {
+ logger.info("Updating trigger " + t.getTriggerId() + " into db.");
+ t.setLastModifyTime(System.currentTimeMillis());
+ Connection connection = getConnection();
+ try {
+ updateTrigger(connection, t, defaultEncodingType);
+ } catch (Exception e) {
+ e.printStackTrace();
+ throw new TriggerLoaderException("Failed to update trigger "
+ + t.toString() + " into db!");
+ } finally {
+ DbUtils.closeQuietly(connection);
+ }
+ }
+
+ private void updateTrigger(Connection connection, Trigger t,
+ EncodingType encType) throws TriggerLoaderException {
+
+ String json = JSONUtils.toJSON(t.toJson());
+ byte[] data = null;
+ try {
+ byte[] stringData = json.getBytes("UTF-8");
+ data = stringData;
+
+ if (encType == EncodingType.GZIP) {
+ data = GZIPUtils.gzipBytes(stringData);
+ }
+ logger.debug("NumChars: " + json.length() + " UTF-8:" + stringData.length
+ + " Gzip:" + data.length);
+ } catch (IOException e) {
+ throw new TriggerLoaderException("Error encoding the trigger "
+ + t.toString());
+ }
+
+ QueryRunner runner = new QueryRunner();
+
+ try {
+ int updates =
+ runner.update(connection, UPDATE_TRIGGER, t.getSource(),
+ t.getLastModifyTime(), encType.getNumVal(), data,
+ t.getTriggerId());
+ connection.commit();
+ if (updates == 0) {
+ throw new TriggerLoaderException("No trigger has been updated.");
+ // logger.error("No trigger is updated!");
+ } else {
+ logger.info("Updated " + updates + " records.");
+ }
+ } catch (SQLException e) {
+ logger.error(UPDATE_TRIGGER + " failed.");
+ throw new TriggerLoaderException("Update trigger " + t.toString()
+ + " into db failed. ", e);
+ }
+ }
+
+ private static class LastInsertID implements ResultSetHandler<Long> {
+ private static String LAST_INSERT_ID = "SELECT LAST_INSERT_ID()";
+
+ @Override
+ public Long handle(ResultSet rs) throws SQLException {
+ if (!rs.next()) {
+ return -1l;
+ }
+
+ long id = rs.getLong(1);
+ return id;
+ }
+
+ }
+
+ public class TriggerResultHandler implements ResultSetHandler<List<Trigger>> {
+
+ @Override
+ public List<Trigger> handle(ResultSet rs) throws SQLException {
+ if (!rs.next()) {
+ return Collections.<Trigger> emptyList();
+ }
+
+ ArrayList<Trigger> triggers = new ArrayList<Trigger>();
+ do {
+ int triggerId = rs.getInt(1);
+ // String triggerSource = rs.getString(2);
+ // long modifyTime = rs.getLong(3);
+ int encodingType = rs.getInt(4);
+ byte[] data = rs.getBytes(5);
+
+ Object jsonObj = null;
+ if (data != null) {
+ EncodingType encType = EncodingType.fromInteger(encodingType);
+
+ try {
+ // Convoluted way to inflate strings. Should find common package or
+ // helper function.
+ if (encType == EncodingType.GZIP) {
+ // Decompress the sucker.
+ String jsonString = GZIPUtils.unGzipString(data, "UTF-8");
+ jsonObj = JSONUtils.parseJSONFromString(jsonString);
+ } else {
+ String jsonString = new String(data, "UTF-8");
+ jsonObj = JSONUtils.parseJSONFromString(jsonString);
+ }
+ } catch (IOException e) {
+ throw new SQLException("Error reconstructing trigger data ");
+ }
+ }
+
+ Trigger t = null;
+ try {
+ t = Trigger.fromJson(jsonObj);
+ triggers.add(t);
+ } catch (Exception e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ logger.error("Failed to load trigger " + triggerId);
+ }
+ } while (rs.next());
+
+ return triggers;
+ }
+
+ }
+
+ private Connection getConnection() throws TriggerLoaderException {
+ Connection connection = null;
+ try {
+ connection = super.getDBConnection(false);
+ } catch (Exception e) {
+ DbUtils.closeQuietly(connection);
+ throw new TriggerLoaderException("Error getting DB connection.", e);
+ }
+
+ return connection;
+ }
+
+ @Override
+ public Trigger loadTrigger(int triggerId) throws TriggerLoaderException {
+ logger.info("Loading trigger " + triggerId + " from db.");
+ Connection connection = getConnection();
+
+ QueryRunner runner = new QueryRunner();
+ ResultSetHandler<List<Trigger>> handler = new TriggerResultHandler();
+
+ List<Trigger> triggers;
+
+ try {
+ triggers = runner.query(connection, GET_TRIGGER, handler, triggerId);
+ } catch (SQLException e) {
+ logger.error(GET_TRIGGER + " failed.");
+ throw new TriggerLoaderException("Loading trigger from db failed. ", e);
+ } finally {
+ DbUtils.closeQuietly(connection);
+ }
+
+ if (triggers.size() == 0) {
+ logger.error("Loaded 0 triggers. Failed to load trigger " + triggerId);
+ throw new TriggerLoaderException(
+ "Loaded 0 triggers. Failed to load trigger " + triggerId);
+ }
+
+ return triggers.get(0);
+ }
}
src/main/java/azkaban/trigger/Trigger.java 853(+411 -442)
diff --git a/src/main/java/azkaban/trigger/Trigger.java b/src/main/java/azkaban/trigger/Trigger.java
index 740ce3d..968e9d8 100644
--- a/src/main/java/azkaban/trigger/Trigger.java
+++ b/src/main/java/azkaban/trigger/Trigger.java
@@ -27,446 +27,415 @@ import org.joda.time.DateTime;
import azkaban.utils.JSONUtils;
public class Trigger {
-
- private static Logger logger = Logger.getLogger(Trigger.class);
-
- private int triggerId = -1;
- private long lastModifyTime;
- private long submitTime;
- private String submitUser;
- private String source;
- private TriggerStatus status = TriggerStatus.READY;
-
- private Condition triggerCondition;
- private Condition expireCondition;
- private List<TriggerAction> actions;
- private List<TriggerAction> expireActions;
-
- private Map<String, Object> info = new HashMap<String, Object>();
- private Map<String, Object> context = new HashMap<String, Object>();
-
- private static ActionTypeLoader actionTypeLoader;
-
- private boolean resetOnTrigger = true;
- private boolean resetOnExpire = true;
-
- private long nextCheckTime = -1;
-
- @SuppressWarnings("unused")
- private Trigger() throws TriggerManagerException {
- throw new TriggerManagerException("Triggers should always be specified");
- }
-
- public void updateNextCheckTime() {
- this.nextCheckTime = Math.min(triggerCondition.getNextCheckTime(), expireCondition.getNextCheckTime());
- }
-
- public long getNextCheckTime() {
- return nextCheckTime;
- }
-
- public void setNextCheckTime(long nct) {
- this.nextCheckTime = nct;
- }
-
- public long getSubmitTime() {
- return submitTime;
- }
-
- public String getSubmitUser() {
- return submitUser;
- }
-
- public TriggerStatus getStatus() {
- return status;
- }
-
- public void setStatus(TriggerStatus status) {
- this.status = status;
- }
-
- public Condition getTriggerCondition() {
- return triggerCondition;
- }
-
- public Condition getExpireCondition() {
- return expireCondition;
- }
-
- public List<TriggerAction> getActions() {
- return actions;
- }
-
- public List<TriggerAction> getExpireActions() {
- return expireActions;
- }
-
- public Map<String, Object> getInfo() {
- return info;
- }
-
- public void setInfo(Map<String, Object> info) {
- this.info = info;
- }
-
- public Map<String, Object> getContext() {
- return context;
- }
-
- public void setContext(Map<String, Object> context) {
- this.context = context;
- }
-
- public Trigger(
- long lastModifyTime,
- long submitTime,
- String submitUser,
- String source,
- Condition triggerCondition,
- Condition expireCondition,
- List<TriggerAction> actions,
- List<TriggerAction> expireActions,
- Map<String, Object> info,
- Map<String, Object> context) {
- this.lastModifyTime = lastModifyTime;
- this.submitTime = submitTime;
- this.submitUser = submitUser;
- this.source = source;
- this.triggerCondition = triggerCondition;
- this.expireCondition = expireCondition;
- this.actions = actions;
- this.expireActions = expireActions;
- this.info = info;
- this.context = context;
- }
-
- public Trigger(
- long lastModifyTime,
- long submitTime,
- String submitUser,
- String source,
- Condition triggerCondition,
- Condition expireCondition,
- List<TriggerAction> actions,
- List<TriggerAction> expireActions) {
- this.lastModifyTime = lastModifyTime;
- this.submitTime = submitTime;
- this.submitUser = submitUser;
- this.source = source;
- this.triggerCondition = triggerCondition;
- this.expireCondition = expireCondition;
- this.actions = actions;
- this.expireActions = expireActions;
- }
-
- public Trigger(
- String submitUser,
- String source,
- Condition triggerCondition,
- Condition expireCondition,
- List<TriggerAction> actions,
- List<TriggerAction> expireActions) {
- this.lastModifyTime = DateTime.now().getMillis();
- this.submitTime = DateTime.now().getMillis();
- this.submitUser = submitUser;
- this.source = source;
- this.triggerCondition = triggerCondition;
- this.expireCondition = expireCondition;
- this.actions = actions;
- this.expireActions = expireActions;
- }
-
- public Trigger(
- String submitUser,
- String source,
- Condition triggerCondition,
- Condition expireCondition,
- List<TriggerAction> actions) {
- this.lastModifyTime = DateTime.now().getMillis();
- this.submitTime = DateTime.now().getMillis();
- this.submitUser = submitUser;
- this.source = source;
- this.triggerCondition = triggerCondition;
- this.expireCondition = expireCondition;
- this.actions = actions;
- this.expireActions = new ArrayList<TriggerAction>();
- }
-
- public Trigger(
- long lastModifyTime,
- long submitTime,
- String submitUser,
- String source,
- Condition triggerCondition,
- Condition expireCondition,
- List<TriggerAction> actions) {
- this.lastModifyTime = lastModifyTime;
- this.submitTime = submitTime;
- this.submitUser = submitUser;
- this.source = source;
- this.triggerCondition = triggerCondition;
- this.expireCondition = expireCondition;
- this.actions = actions;
- this.expireActions = new ArrayList<TriggerAction>();
- }
-
- public Trigger(
- int triggerId,
- long lastModifyTime,
- long submitTime,
- String submitUser,
- String source,
- Condition triggerCondition,
- Condition expireCondition,
- List<TriggerAction> actions,
- List<TriggerAction> expireActions,
- Map<String, Object> info,
- Map<String, Object> context) {
- this.triggerId = triggerId;
- this.lastModifyTime = lastModifyTime;
- this.submitTime = submitTime;
- this.submitUser = submitUser;
- this.source = source;
- this.triggerCondition = triggerCondition;
- this.expireCondition = expireCondition;
- this.actions = actions;
- this.expireActions = expireActions;
- this.info = info;
- this.context = context;
- }
-
- public Trigger(
- int triggerId,
- long lastModifyTime,
- long submitTime,
- String submitUser,
- String source,
- Condition triggerCondition,
- Condition expireCondition,
- List<TriggerAction> actions,
- List<TriggerAction> expireActions) {
- this.triggerId = triggerId;
- this.lastModifyTime = lastModifyTime;
- this.submitTime = submitTime;
- this.submitUser = submitUser;
- this.source = source;
- this.triggerCondition = triggerCondition;
- this.expireCondition = expireCondition;
- this.actions = actions;
- this.expireActions = expireActions;
- }
-
- public Trigger(
- int triggerId,
- long lastModifyTime,
- long submitTime,
- String submitUser,
- String source,
- Condition triggerCondition,
- Condition expireCondition,
- List<TriggerAction> actions) {
- this.triggerId = triggerId;
- this.lastModifyTime = lastModifyTime;
- this.submitTime = submitTime;
- this.submitUser = submitUser;
- this.source = source;
- this.triggerCondition = triggerCondition;
- this.expireCondition = expireCondition;
- this.actions = actions;
- this.expireActions = new ArrayList<TriggerAction>();
- }
-
- public static synchronized void setActionTypeLoader(ActionTypeLoader loader) {
- Trigger.actionTypeLoader = loader;
- }
-
- public static ActionTypeLoader getActionTypeLoader() {
- return actionTypeLoader;
- }
-
- public boolean isResetOnTrigger() {
- return resetOnTrigger;
- }
-
- public void setResetOnTrigger(boolean resetOnTrigger) {
- this.resetOnTrigger = resetOnTrigger;
- }
-
- public boolean isResetOnExpire() {
- return resetOnExpire;
- }
-
- public void setResetOnExpire(boolean resetOnExpire) {
- this.resetOnExpire = resetOnExpire;
- }
-
- public long getLastModifyTime() {
- return lastModifyTime;
- }
-
- public void setLastModifyTime(long lastModifyTime) {
- this.lastModifyTime = lastModifyTime;
- }
-
- public void setTriggerId(int id) {
- this.triggerId = id;
- }
-
- public int getTriggerId() {
- return triggerId;
- }
-
- public boolean triggerConditionMet(){
- return triggerCondition.isMet();
- }
-
- public boolean expireConditionMet(){
- return expireCondition.isMet();
- }
-
- public void resetTriggerConditions() {
- triggerCondition.resetCheckers();
- updateNextCheckTime();
- }
-
- public void resetExpireCondition() {
- expireCondition.resetCheckers();
- updateNextCheckTime();
- }
-
- public List<TriggerAction> getTriggerActions () {
- return actions;
- }
-
- public Map<String, Object> toJson() {
- Map<String, Object> jsonObj = new HashMap<String, Object>();
- jsonObj.put("triggerCondition", triggerCondition.toJson());
- jsonObj.put("expireCondition", expireCondition.toJson());
- List<Object> actionsJson = new ArrayList<Object>();
- for(TriggerAction action : actions) {
- Map<String, Object> oneActionJson = new HashMap<String, Object>();
- oneActionJson.put("type", action.getType());
- oneActionJson.put("actionJson", action.toJson());
- actionsJson.add(oneActionJson);
- }
- jsonObj.put("actions", actionsJson);
- List<Object> expireActionsJson = new ArrayList<Object>();
- for(TriggerAction expireAction : expireActions) {
- Map<String, Object> oneExpireActionJson = new HashMap<String, Object>();
- oneExpireActionJson.put("type", expireAction.getType());
- oneExpireActionJson.put("actionJson", expireAction.toJson());
- expireActionsJson.add(oneExpireActionJson);
- }
- jsonObj.put("expireActions", expireActionsJson);
-
- jsonObj.put("resetOnTrigger", String.valueOf(resetOnTrigger));
- jsonObj.put("resetOnExpire", String.valueOf(resetOnExpire));
- jsonObj.put("submitUser", submitUser);
- jsonObj.put("source", source);
- jsonObj.put("submitTime", String.valueOf(submitTime));
- jsonObj.put("lastModifyTime", String.valueOf(lastModifyTime));
- jsonObj.put("triggerId", String.valueOf(triggerId));
- jsonObj.put("status", status.toString());
- jsonObj.put("info", info);
- jsonObj.put("context", context);
- return jsonObj;
- }
-
-
- public String getSource() {
- return source;
- }
-
- @SuppressWarnings("unchecked")
- public static Trigger fromJson(Object obj) throws Exception {
-
- if(actionTypeLoader == null) {
- throw new Exception("Trigger Action Type loader not initialized.");
- }
-
- Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
-
- Trigger trigger = null;
- try{
- logger.info("Decoding for " + JSONUtils.toJSON(obj));
- Condition triggerCond = Condition.fromJson(jsonObj.get("triggerCondition"));
- Condition expireCond = Condition.fromJson(jsonObj.get("expireCondition"));
- List<TriggerAction> actions = new ArrayList<TriggerAction>();
- List<Object> actionsJson = (List<Object>) jsonObj.get("actions");
- for(Object actObj : actionsJson) {
- Map<String, Object> oneActionJson = (HashMap<String, Object>) actObj;
- String type = (String) oneActionJson.get("type");
- TriggerAction act = actionTypeLoader.createActionFromJson(type, oneActionJson.get("actionJson"));
- actions.add(act);
- }
- List<TriggerAction> expireActions = new ArrayList<TriggerAction>();
- List<Object> expireActionsJson = (List<Object>) jsonObj.get("expireActions");
- for(Object expireActObj : expireActionsJson) {
- Map<String, Object> oneExpireActionJson = (HashMap<String, Object>) expireActObj;
- String type = (String) oneExpireActionJson.get("type");
- TriggerAction expireAct = actionTypeLoader.createActionFromJson(type, oneExpireActionJson.get("actionJson"));
- expireActions.add(expireAct);
- }
- boolean resetOnTrigger = Boolean.valueOf((String) jsonObj.get("resetOnTrigger"));
- boolean resetOnExpire = Boolean.valueOf((String) jsonObj.get("resetOnExpire"));
- String submitUser = (String) jsonObj.get("submitUser");
- String source = (String) jsonObj.get("source");
- long submitTime = Long.valueOf((String) jsonObj.get("submitTime"));
- long lastModifyTime = Long.valueOf((String) jsonObj.get("lastModifyTime"));
- int triggerId = Integer.valueOf((String) jsonObj.get("triggerId"));
- TriggerStatus status = TriggerStatus.valueOf((String)jsonObj.get("status"));
- Map<String, Object> info = (Map<String, Object>) jsonObj.get("info");
- Map<String, Object> context = (Map<String, Object>) jsonObj.get("context");
- if(context == null) {
- context = new HashMap<String, Object>();
- }
- for(ConditionChecker checker : triggerCond.getCheckers().values()) {
- checker.setContext(context);
- }
- for(ConditionChecker checker : expireCond.getCheckers().values()) {
- checker.setContext(context);
- }
- for(TriggerAction action : actions) {
- action.setContext(context);
- }
- for(TriggerAction action : expireActions) {
- action.setContext(context);
- }
-
- trigger = new Trigger(triggerId, lastModifyTime, submitTime, submitUser, source, triggerCond, expireCond, actions, expireActions, info, context);
- trigger.setResetOnExpire(resetOnExpire);
- trigger.setResetOnTrigger(resetOnTrigger);
- trigger.setStatus(status);
- }catch(Exception e) {
- e.printStackTrace();
- logger.error("Failed to decode the trigger.", e);
- throw new Exception("Failed to decode the trigger.", e);
- }
-
- return trigger;
- }
-
- public String getDescription() {
- StringBuffer actionsString = new StringBuffer();
- for(TriggerAction act : actions) {
- actionsString.append(", ");
- actionsString.append(act.getDescription());
- }
- return "Trigger from " + getSource() +
- " with trigger condition of " + triggerCondition.getExpression() +
- " and expire condition of " + expireCondition.getExpression() +
- actionsString;
- }
-
- public void stopCheckers() {
- for(ConditionChecker checker : triggerCondition.getCheckers().values()) {
- checker.stopChecker();
- }
- for(ConditionChecker checker : expireCondition.getCheckers().values()) {
- checker.stopChecker();
- }
-
- }
-
-
+
+ private static Logger logger = Logger.getLogger(Trigger.class);
+
+ private int triggerId = -1;
+ private long lastModifyTime;
+ private long submitTime;
+ private String submitUser;
+ private String source;
+ private TriggerStatus status = TriggerStatus.READY;
+
+ private Condition triggerCondition;
+ private Condition expireCondition;
+ private List<TriggerAction> actions;
+ private List<TriggerAction> expireActions;
+
+ private Map<String, Object> info = new HashMap<String, Object>();
+ private Map<String, Object> context = new HashMap<String, Object>();
+
+ private static ActionTypeLoader actionTypeLoader;
+
+ private boolean resetOnTrigger = true;
+ private boolean resetOnExpire = true;
+
+ private long nextCheckTime = -1;
+
+ @SuppressWarnings("unused")
+ private Trigger() throws TriggerManagerException {
+ throw new TriggerManagerException("Triggers should always be specified");
+ }
+
+ public void updateNextCheckTime() {
+ this.nextCheckTime =
+ Math.min(triggerCondition.getNextCheckTime(),
+ expireCondition.getNextCheckTime());
+ }
+
+ public long getNextCheckTime() {
+ return nextCheckTime;
+ }
+
+ public void setNextCheckTime(long nct) {
+ this.nextCheckTime = nct;
+ }
+
+ public long getSubmitTime() {
+ return submitTime;
+ }
+
+ public String getSubmitUser() {
+ return submitUser;
+ }
+
+ public TriggerStatus getStatus() {
+ return status;
+ }
+
+ public void setStatus(TriggerStatus status) {
+ this.status = status;
+ }
+
+ public Condition getTriggerCondition() {
+ return triggerCondition;
+ }
+
+ public Condition getExpireCondition() {
+ return expireCondition;
+ }
+
+ public List<TriggerAction> getActions() {
+ return actions;
+ }
+
+ public List<TriggerAction> getExpireActions() {
+ return expireActions;
+ }
+
+ public Map<String, Object> getInfo() {
+ return info;
+ }
+
+ public void setInfo(Map<String, Object> info) {
+ this.info = info;
+ }
+
+ public Map<String, Object> getContext() {
+ return context;
+ }
+
+ public void setContext(Map<String, Object> context) {
+ this.context = context;
+ }
+
+ public Trigger(long lastModifyTime, long submitTime, String submitUser,
+ String source, Condition triggerCondition, Condition expireCondition,
+ List<TriggerAction> actions, List<TriggerAction> expireActions,
+ Map<String, Object> info, Map<String, Object> context) {
+ this.lastModifyTime = lastModifyTime;
+ this.submitTime = submitTime;
+ this.submitUser = submitUser;
+ this.source = source;
+ this.triggerCondition = triggerCondition;
+ this.expireCondition = expireCondition;
+ this.actions = actions;
+ this.expireActions = expireActions;
+ this.info = info;
+ this.context = context;
+ }
+
+ public Trigger(long lastModifyTime, long submitTime, String submitUser,
+ String source, Condition triggerCondition, Condition expireCondition,
+ List<TriggerAction> actions, List<TriggerAction> expireActions) {
+ this.lastModifyTime = lastModifyTime;
+ this.submitTime = submitTime;
+ this.submitUser = submitUser;
+ this.source = source;
+ this.triggerCondition = triggerCondition;
+ this.expireCondition = expireCondition;
+ this.actions = actions;
+ this.expireActions = expireActions;
+ }
+
+ public Trigger(String submitUser, String source, Condition triggerCondition,
+ Condition expireCondition, List<TriggerAction> actions,
+ List<TriggerAction> expireActions) {
+ this.lastModifyTime = DateTime.now().getMillis();
+ this.submitTime = DateTime.now().getMillis();
+ this.submitUser = submitUser;
+ this.source = source;
+ this.triggerCondition = triggerCondition;
+ this.expireCondition = expireCondition;
+ this.actions = actions;
+ this.expireActions = expireActions;
+ }
+
+ public Trigger(String submitUser, String source, Condition triggerCondition,
+ Condition expireCondition, List<TriggerAction> actions) {
+ this.lastModifyTime = DateTime.now().getMillis();
+ this.submitTime = DateTime.now().getMillis();
+ this.submitUser = submitUser;
+ this.source = source;
+ this.triggerCondition = triggerCondition;
+ this.expireCondition = expireCondition;
+ this.actions = actions;
+ this.expireActions = new ArrayList<TriggerAction>();
+ }
+
+ public Trigger(long lastModifyTime, long submitTime, String submitUser,
+ String source, Condition triggerCondition, Condition expireCondition,
+ List<TriggerAction> actions) {
+ this.lastModifyTime = lastModifyTime;
+ this.submitTime = submitTime;
+ this.submitUser = submitUser;
+ this.source = source;
+ this.triggerCondition = triggerCondition;
+ this.expireCondition = expireCondition;
+ this.actions = actions;
+ this.expireActions = new ArrayList<TriggerAction>();
+ }
+
+ public Trigger(int triggerId, long lastModifyTime, long submitTime,
+ String submitUser, String source, Condition triggerCondition,
+ Condition expireCondition, List<TriggerAction> actions,
+ List<TriggerAction> expireActions, Map<String, Object> info,
+ Map<String, Object> context) {
+ this.triggerId = triggerId;
+ this.lastModifyTime = lastModifyTime;
+ this.submitTime = submitTime;
+ this.submitUser = submitUser;
+ this.source = source;
+ this.triggerCondition = triggerCondition;
+ this.expireCondition = expireCondition;
+ this.actions = actions;
+ this.expireActions = expireActions;
+ this.info = info;
+ this.context = context;
+ }
+
+ public Trigger(int triggerId, long lastModifyTime, long submitTime,
+ String submitUser, String source, Condition triggerCondition,
+ Condition expireCondition, List<TriggerAction> actions,
+ List<TriggerAction> expireActions) {
+ this.triggerId = triggerId;
+ this.lastModifyTime = lastModifyTime;
+ this.submitTime = submitTime;
+ this.submitUser = submitUser;
+ this.source = source;
+ this.triggerCondition = triggerCondition;
+ this.expireCondition = expireCondition;
+ this.actions = actions;
+ this.expireActions = expireActions;
+ }
+
+ public Trigger(int triggerId, long lastModifyTime, long submitTime,
+ String submitUser, String source, Condition triggerCondition,
+ Condition expireCondition, List<TriggerAction> actions) {
+ this.triggerId = triggerId;
+ this.lastModifyTime = lastModifyTime;
+ this.submitTime = submitTime;
+ this.submitUser = submitUser;
+ this.source = source;
+ this.triggerCondition = triggerCondition;
+ this.expireCondition = expireCondition;
+ this.actions = actions;
+ this.expireActions = new ArrayList<TriggerAction>();
+ }
+
+ public static synchronized void setActionTypeLoader(ActionTypeLoader loader) {
+ Trigger.actionTypeLoader = loader;
+ }
+
+ public static ActionTypeLoader getActionTypeLoader() {
+ return actionTypeLoader;
+ }
+
+ public boolean isResetOnTrigger() {
+ return resetOnTrigger;
+ }
+
+ public void setResetOnTrigger(boolean resetOnTrigger) {
+ this.resetOnTrigger = resetOnTrigger;
+ }
+
+ public boolean isResetOnExpire() {
+ return resetOnExpire;
+ }
+
+ public void setResetOnExpire(boolean resetOnExpire) {
+ this.resetOnExpire = resetOnExpire;
+ }
+
+ public long getLastModifyTime() {
+ return lastModifyTime;
+ }
+
+ public void setLastModifyTime(long lastModifyTime) {
+ this.lastModifyTime = lastModifyTime;
+ }
+
+ public void setTriggerId(int id) {
+ this.triggerId = id;
+ }
+
+ public int getTriggerId() {
+ return triggerId;
+ }
+
+ public boolean triggerConditionMet() {
+ return triggerCondition.isMet();
+ }
+
+ public boolean expireConditionMet() {
+ return expireCondition.isMet();
+ }
+
+ public void resetTriggerConditions() {
+ triggerCondition.resetCheckers();
+ updateNextCheckTime();
+ }
+
+ public void resetExpireCondition() {
+ expireCondition.resetCheckers();
+ updateNextCheckTime();
+ }
+
+ public List<TriggerAction> getTriggerActions() {
+ return actions;
+ }
+
+ public Map<String, Object> toJson() {
+ Map<String, Object> jsonObj = new HashMap<String, Object>();
+ jsonObj.put("triggerCondition", triggerCondition.toJson());
+ jsonObj.put("expireCondition", expireCondition.toJson());
+ List<Object> actionsJson = new ArrayList<Object>();
+ for (TriggerAction action : actions) {
+ Map<String, Object> oneActionJson = new HashMap<String, Object>();
+ oneActionJson.put("type", action.getType());
+ oneActionJson.put("actionJson", action.toJson());
+ actionsJson.add(oneActionJson);
+ }
+ jsonObj.put("actions", actionsJson);
+ List<Object> expireActionsJson = new ArrayList<Object>();
+ for (TriggerAction expireAction : expireActions) {
+ Map<String, Object> oneExpireActionJson = new HashMap<String, Object>();
+ oneExpireActionJson.put("type", expireAction.getType());
+ oneExpireActionJson.put("actionJson", expireAction.toJson());
+ expireActionsJson.add(oneExpireActionJson);
+ }
+ jsonObj.put("expireActions", expireActionsJson);
+
+ jsonObj.put("resetOnTrigger", String.valueOf(resetOnTrigger));
+ jsonObj.put("resetOnExpire", String.valueOf(resetOnExpire));
+ jsonObj.put("submitUser", submitUser);
+ jsonObj.put("source", source);
+ jsonObj.put("submitTime", String.valueOf(submitTime));
+ jsonObj.put("lastModifyTime", String.valueOf(lastModifyTime));
+ jsonObj.put("triggerId", String.valueOf(triggerId));
+ jsonObj.put("status", status.toString());
+ jsonObj.put("info", info);
+ jsonObj.put("context", context);
+ return jsonObj;
+ }
+
+ public String getSource() {
+ return source;
+ }
+
+ @SuppressWarnings("unchecked")
+ public static Trigger fromJson(Object obj) throws Exception {
+
+ if (actionTypeLoader == null) {
+ throw new Exception("Trigger Action Type loader not initialized.");
+ }
+
+ Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
+
+ Trigger trigger = null;
+ try {
+ logger.info("Decoding for " + JSONUtils.toJSON(obj));
+ Condition triggerCond =
+ Condition.fromJson(jsonObj.get("triggerCondition"));
+ Condition expireCond = Condition.fromJson(jsonObj.get("expireCondition"));
+ List<TriggerAction> actions = new ArrayList<TriggerAction>();
+ List<Object> actionsJson = (List<Object>) jsonObj.get("actions");
+ for (Object actObj : actionsJson) {
+ Map<String, Object> oneActionJson = (HashMap<String, Object>) actObj;
+ String type = (String) oneActionJson.get("type");
+ TriggerAction act =
+ actionTypeLoader.createActionFromJson(type,
+ oneActionJson.get("actionJson"));
+ actions.add(act);
+ }
+ List<TriggerAction> expireActions = new ArrayList<TriggerAction>();
+ List<Object> expireActionsJson =
+ (List<Object>) jsonObj.get("expireActions");
+ for (Object expireActObj : expireActionsJson) {
+ Map<String, Object> oneExpireActionJson =
+ (HashMap<String, Object>) expireActObj;
+ String type = (String) oneExpireActionJson.get("type");
+ TriggerAction expireAct =
+ actionTypeLoader.createActionFromJson(type,
+ oneExpireActionJson.get("actionJson"));
+ expireActions.add(expireAct);
+ }
+ boolean resetOnTrigger =
+ Boolean.valueOf((String) jsonObj.get("resetOnTrigger"));
+ boolean resetOnExpire =
+ Boolean.valueOf((String) jsonObj.get("resetOnExpire"));
+ String submitUser = (String) jsonObj.get("submitUser");
+ String source = (String) jsonObj.get("source");
+ long submitTime = Long.valueOf((String) jsonObj.get("submitTime"));
+ long lastModifyTime =
+ Long.valueOf((String) jsonObj.get("lastModifyTime"));
+ int triggerId = Integer.valueOf((String) jsonObj.get("triggerId"));
+ TriggerStatus status =
+ TriggerStatus.valueOf((String) jsonObj.get("status"));
+ Map<String, Object> info = (Map<String, Object>) jsonObj.get("info");
+ Map<String, Object> context =
+ (Map<String, Object>) jsonObj.get("context");
+ if (context == null) {
+ context = new HashMap<String, Object>();
+ }
+ for (ConditionChecker checker : triggerCond.getCheckers().values()) {
+ checker.setContext(context);
+ }
+ for (ConditionChecker checker : expireCond.getCheckers().values()) {
+ checker.setContext(context);
+ }
+ for (TriggerAction action : actions) {
+ action.setContext(context);
+ }
+ for (TriggerAction action : expireActions) {
+ action.setContext(context);
+ }
+
+ trigger =
+ new Trigger(triggerId, lastModifyTime, submitTime, submitUser,
+ source, triggerCond, expireCond, actions, expireActions, info,
+ context);
+ trigger.setResetOnExpire(resetOnExpire);
+ trigger.setResetOnTrigger(resetOnTrigger);
+ trigger.setStatus(status);
+ } catch (Exception e) {
+ e.printStackTrace();
+ logger.error("Failed to decode the trigger.", e);
+ throw new Exception("Failed to decode the trigger.", e);
+ }
+
+ return trigger;
+ }
+
+ public String getDescription() {
+ StringBuffer actionsString = new StringBuffer();
+ for (TriggerAction act : actions) {
+ actionsString.append(", ");
+ actionsString.append(act.getDescription());
+ }
+ return "Trigger from " + getSource() + " with trigger condition of "
+ + triggerCondition.getExpression() + " and expire condition of "
+ + expireCondition.getExpression() + actionsString;
+ }
+
+ public void stopCheckers() {
+ for (ConditionChecker checker : triggerCondition.getCheckers().values()) {
+ checker.stopChecker();
+ }
+ for (ConditionChecker checker : expireCondition.getCheckers().values()) {
+ checker.stopChecker();
+ }
+
+ }
+
}
src/main/java/azkaban/trigger/TriggerAction.java 30(+15 -15)
diff --git a/src/main/java/azkaban/trigger/TriggerAction.java b/src/main/java/azkaban/trigger/TriggerAction.java
index e496080..367b84c 100644
--- a/src/main/java/azkaban/trigger/TriggerAction.java
+++ b/src/main/java/azkaban/trigger/TriggerAction.java
@@ -19,19 +19,19 @@ package azkaban.trigger;
import java.util.Map;
public interface TriggerAction {
-
- String getId();
-
- String getType();
-
- TriggerAction fromJson(Object obj) throws Exception;
-
- Object toJson();
-
- void doAction() throws Exception;
-
- void setContext(Map<String, Object> context);
-
- String getDescription();
-
+
+ String getId();
+
+ String getType();
+
+ TriggerAction fromJson(Object obj) throws Exception;
+
+ Object toJson();
+
+ void doAction() throws Exception;
+
+ void setContext(Map<String, Object> context);
+
+ String getDescription();
+
}
diff --git a/src/main/java/azkaban/trigger/TriggerAgent.java b/src/main/java/azkaban/trigger/TriggerAgent.java
index 91b27b3..b467a1c 100644
--- a/src/main/java/azkaban/trigger/TriggerAgent.java
+++ b/src/main/java/azkaban/trigger/TriggerAgent.java
@@ -19,12 +19,12 @@ package azkaban.trigger;
import azkaban.utils.Props;
public interface TriggerAgent {
- public void loadTriggerFromProps(Props props) throws Exception;
+ public void loadTriggerFromProps(Props props) throws Exception;
- public String getTriggerSource();
-
- public void start() throws Exception;
-
- public void shutdown();
+ public String getTriggerSource();
+
+ public void start() throws Exception;
+
+ public void shutdown();
}
diff --git a/src/main/java/azkaban/trigger/TriggerException.java b/src/main/java/azkaban/trigger/TriggerException.java
index dbe8283..a4539bb 100644
--- a/src/main/java/azkaban/trigger/TriggerException.java
+++ b/src/main/java/azkaban/trigger/TriggerException.java
@@ -17,18 +17,17 @@
package azkaban.trigger;
public class TriggerException extends Exception {
- private static final long serialVersionUID = 1L;
+ private static final long serialVersionUID = 1L;
- public TriggerException(String message) {
- super(message);
- }
-
- public TriggerException(String message, Throwable cause) {
- super(message, cause);
- }
+ public TriggerException(String message) {
+ super(message);
+ }
- public TriggerException(Throwable e) {
- super(e);
- }
-}
+ public TriggerException(String message, Throwable cause) {
+ super(message, cause);
+ }
+ public TriggerException(Throwable e) {
+ super(e);
+ }
+}
diff --git a/src/main/java/azkaban/trigger/TriggerLoader.java b/src/main/java/azkaban/trigger/TriggerLoader.java
index 5af0954..662bbfd 100644
--- a/src/main/java/azkaban/trigger/TriggerLoader.java
+++ b/src/main/java/azkaban/trigger/TriggerLoader.java
@@ -20,16 +20,17 @@ import java.util.List;
public interface TriggerLoader {
- public void addTrigger(Trigger t) throws TriggerLoaderException;
+ public void addTrigger(Trigger t) throws TriggerLoaderException;
- public void removeTrigger(Trigger s) throws TriggerLoaderException;
-
- public void updateTrigger(Trigger t) throws TriggerLoaderException;
-
- public List<Trigger> loadTriggers() throws TriggerLoaderException;
+ public void removeTrigger(Trigger s) throws TriggerLoaderException;
- public Trigger loadTrigger(int triggerId) throws TriggerLoaderException;
+ public void updateTrigger(Trigger t) throws TriggerLoaderException;
+
+ public List<Trigger> loadTriggers() throws TriggerLoaderException;
+
+ public Trigger loadTrigger(int triggerId) throws TriggerLoaderException;
+
+ public List<Trigger> getUpdatedTriggers(long lastUpdateTime)
+ throws TriggerLoaderException;
- public List<Trigger> getUpdatedTriggers(long lastUpdateTime) throws TriggerLoaderException;
-
}
diff --git a/src/main/java/azkaban/trigger/TriggerLoaderException.java b/src/main/java/azkaban/trigger/TriggerLoaderException.java
index 0265984..b45d916 100644
--- a/src/main/java/azkaban/trigger/TriggerLoaderException.java
+++ b/src/main/java/azkaban/trigger/TriggerLoaderException.java
@@ -16,18 +16,18 @@
package azkaban.trigger;
-public class TriggerLoaderException extends Exception{
- private static final long serialVersionUID = 1L;
+public class TriggerLoaderException extends Exception {
+ private static final long serialVersionUID = 1L;
- public TriggerLoaderException(String message) {
- super(message);
- }
-
- public TriggerLoaderException(String message, Throwable cause) {
- super(message, cause);
- }
-
- public TriggerLoaderException(Throwable e) {
- super(e);
- }
+ public TriggerLoaderException(String message) {
+ super(message);
+ }
+
+ public TriggerLoaderException(String message, Throwable cause) {
+ super(message, cause);
+ }
+
+ public TriggerLoaderException(Throwable e) {
+ super(e);
+ }
}
src/main/java/azkaban/trigger/TriggerManager.java 964(+495 -469)
diff --git a/src/main/java/azkaban/trigger/TriggerManager.java b/src/main/java/azkaban/trigger/TriggerManager.java
index 5d85be6..ea058f0 100644
--- a/src/main/java/azkaban/trigger/TriggerManager.java
+++ b/src/main/java/azkaban/trigger/TriggerManager.java
@@ -36,473 +36,499 @@ import azkaban.executor.ExecutableFlow;
import azkaban.executor.ExecutorManager;
import azkaban.utils.Props;
-public class TriggerManager extends EventHandler implements TriggerManagerAdapter{
- private static Logger logger = Logger.getLogger(TriggerManager.class);
- public static final long DEFAULT_SCANNER_INTERVAL_MS = 60000;
-
- private static Map<Integer, Trigger> triggerIdMap = new ConcurrentHashMap<Integer, Trigger>();
-
- private CheckerTypeLoader checkerTypeLoader;
- private ActionTypeLoader actionTypeLoader;
- private TriggerLoader triggerLoader;
-
- private final TriggerScannerThread runnerThread;
- private long lastRunnerThreadCheckTime = -1;
- private long runnerThreadIdleTime = -1;
- private LocalTriggerJMX jmxStats = new LocalTriggerJMX();
-
- private ExecutorManagerEventListener listener = new ExecutorManagerEventListener();
-
- private final Object syncObj = new Object();
-
- private String scannerStage = "";
-
- public TriggerManager(Props props, TriggerLoader triggerLoader, ExecutorManager executorManager) throws TriggerManagerException {
-
- this.triggerLoader = triggerLoader;
-
- long scannerInterval = props.getLong("trigger.scan.interval", DEFAULT_SCANNER_INTERVAL_MS);
- runnerThread = new TriggerScannerThread(scannerInterval);
-
- checkerTypeLoader = new CheckerTypeLoader();
- actionTypeLoader = new ActionTypeLoader();
-
- try {
- checkerTypeLoader.init(props);
- actionTypeLoader.init(props);
- } catch (Exception e) {
- throw new TriggerManagerException(e);
- }
-
- Condition.setCheckerLoader(checkerTypeLoader);
- Trigger.setActionTypeLoader(actionTypeLoader);
-
- executorManager.addListener(listener);
-
- logger.info("TriggerManager loaded.");
- }
-
- @Override
- public void start() throws TriggerManagerException{
-
- try {
- // expect loader to return valid triggers
- List<Trigger> triggers = triggerLoader.loadTriggers();
- for(Trigger t : triggers) {
- runnerThread.addTrigger(t);
- triggerIdMap.put(t.getTriggerId(), t);
- }
- }catch(Exception e) {
- e.printStackTrace();
- throw new TriggerManagerException(e);
- }
-
- runnerThread.start();
- }
-
- protected CheckerTypeLoader getCheckerLoader() {
- return checkerTypeLoader;
- }
-
- protected ActionTypeLoader getActionLoader() {
- return actionTypeLoader;
- }
-
- public void insertTrigger(Trigger t) throws TriggerManagerException {
- synchronized (syncObj) {
- try {
- triggerLoader.addTrigger(t);
- } catch (TriggerLoaderException e) {
- throw new TriggerManagerException(e);
- }
- runnerThread.addTrigger(t);
- triggerIdMap.put(t.getTriggerId(), t);
- }
- }
-
- public void removeTrigger(int id) throws TriggerManagerException {
- synchronized (syncObj) {
- Trigger t = triggerIdMap.get(id);
- if(t != null) {
- removeTrigger(triggerIdMap.get(id));
- }
- }
- }
-
- public void updateTrigger(int id) throws TriggerManagerException {
- synchronized (syncObj) {
- if(! triggerIdMap.containsKey(id)) {
- throw new TriggerManagerException("The trigger to update " + id + " doesn't exist!");
- }
-
- Trigger t;
- try {
- t = triggerLoader.loadTrigger(id);
- } catch (TriggerLoaderException e) {
- throw new TriggerManagerException(e);
- }
- updateTrigger(t);
- }
- }
-
- public void updateTrigger(Trigger t) throws TriggerManagerException {
- synchronized (syncObj) {
- runnerThread.deleteTrigger(triggerIdMap.get(t.getTriggerId()));
- runnerThread.addTrigger(t);
- triggerIdMap.put(t.getTriggerId(), t);
- }
- }
-
- public void removeTrigger(Trigger t) throws TriggerManagerException {
- synchronized (syncObj) {
- runnerThread.deleteTrigger(t);
- triggerIdMap.remove(t.getTriggerId());
- try {
- t.stopCheckers();
- triggerLoader.removeTrigger(t);
- } catch (TriggerLoaderException e) {
- throw new TriggerManagerException(e);
- }
- }
- }
-
- public List<Trigger> getTriggers() {
- return new ArrayList<Trigger>(triggerIdMap.values());
- }
-
- public Map<String, Class<? extends ConditionChecker>> getSupportedCheckers() {
- return checkerTypeLoader.getSupportedCheckers();
- }
-
- private class TriggerScannerThread extends Thread {
- private BlockingQueue<Trigger> triggers;
- private Map<Integer, ExecutableFlow> justFinishedFlows;
- private boolean shutdown = false;
- //private AtomicBoolean stillAlive = new AtomicBoolean(true);
- private final long scannerInterval;
-
- public TriggerScannerThread(long scannerInterval) {
- triggers = new PriorityBlockingQueue<Trigger>(1, new TriggerComparator());
- justFinishedFlows = new ConcurrentHashMap<Integer, ExecutableFlow>();
- this.setName("TriggerRunnerManager-Trigger-Scanner-Thread");
- this.scannerInterval = scannerInterval;
- }
-
- public void shutdown() {
- logger.error("Shutting down trigger manager thread " + this.getName());
- shutdown = true;
- //stillAlive.set(false);
- this.interrupt();
- }
-
- public void addJustFinishedFlow(ExecutableFlow flow) {
- synchronized (syncObj) {
- justFinishedFlows.put(flow.getExecutionId(), flow);
- }
- }
-
- public void addTrigger(Trigger t) {
- synchronized (syncObj) {
- t.updateNextCheckTime();
- triggers.add(t);
- }
- }
-
- public void deleteTrigger(Trigger t) {
- triggers.remove(t);
- }
-
- public void run() {
- //while(stillAlive.get()) {
- while(!shutdown) {
- synchronized (syncObj) {
- try {
- lastRunnerThreadCheckTime = System.currentTimeMillis();
-
- scannerStage = "Ready to start a new scan cycle at " + lastRunnerThreadCheckTime;
-
- try {
- checkAllTriggers();
- justFinishedFlows.clear();
- } catch(Exception e) {
- e.printStackTrace();
- logger.error(e.getMessage());
- } catch(Throwable t) {
- t.printStackTrace();
- logger.error(t.getMessage());
- }
-
- scannerStage = "Done flipping all triggers.";
-
- runnerThreadIdleTime = scannerInterval - (System.currentTimeMillis() - lastRunnerThreadCheckTime);
-
- if(runnerThreadIdleTime < 0) {
- logger.error("Trigger manager thread " + this.getName() + " is too busy!");
- } else {
- syncObj.wait(runnerThreadIdleTime);
- }
- } catch(InterruptedException e) {
- logger.info("Interrupted. Probably to shut down.");
- }
- }
- }
- }
-
- private void checkAllTriggers() throws TriggerManagerException {
- long now = System.currentTimeMillis();
-
- // sweep through the rest of them
- for(Trigger t : triggers) {
- scannerStage = "Checking for trigger " + t.getTriggerId();
-
- boolean shouldSkip = true;
- if(shouldSkip && t.getInfo() != null && t.getInfo().containsKey("monitored.finished.execution")) {
- int execId = Integer.valueOf((String) t.getInfo().get("monitored.finished.execution"));
- if(justFinishedFlows.containsKey(execId)) {
- logger.info("Monitored execution has finished. Checking trigger earlier " + t.getTriggerId());
- shouldSkip = false;
- }
- }
- if(shouldSkip && t.getNextCheckTime() > now) {
- shouldSkip = false;
- }
-
- if(shouldSkip) {
- logger.info("Skipping trigger" + t.getTriggerId() + " until " + t.getNextCheckTime());
- }
-
- logger.info("Checking trigger " + t.getTriggerId());
- if(t.getStatus().equals(TriggerStatus.READY)) {
- if(t.triggerConditionMet()) {
- onTriggerTrigger(t);
- } else if (t.expireConditionMet()) {
- onTriggerExpire(t);
- }
- }
- if(t.getStatus().equals(TriggerStatus.EXPIRED) && t.getSource().equals("azkaban")) {
- removeTrigger(t);
- } else {
- t.updateNextCheckTime();
- }
- }
- }
-
- private void onTriggerTrigger(Trigger t) throws TriggerManagerException {
- List<TriggerAction> actions = t.getTriggerActions();
- for(TriggerAction action : actions) {
- try {
- logger.info("Doing trigger actions");
- action.doAction();
- } catch (Exception e) {
- logger.error("Failed to do action " + action.getDescription(), e);
- } catch (Throwable th) {
- logger.error("Failed to do action " + action.getDescription(), th);
- }
- }
- if(t.isResetOnTrigger()) {
- t.resetTriggerConditions();
- t.resetExpireCondition();
- } else {
- t.setStatus(TriggerStatus.EXPIRED);
- }
- try {
- triggerLoader.updateTrigger(t);
- }
- catch (TriggerLoaderException e) {
- throw new TriggerManagerException(e);
- }
- }
-
- private void onTriggerExpire(Trigger t) throws TriggerManagerException {
- List<TriggerAction> expireActions = t.getExpireActions();
- for(TriggerAction action : expireActions) {
- try {
- logger.info("Doing expire actions");
- action.doAction();
- } catch (Exception e) {
- logger.error("Failed to do expire action " + action.getDescription(), e);
- } catch (Throwable th) {
- logger.error("Failed to do expire action " + action.getDescription(), th);
- }
- }
- if(t.isResetOnExpire()) {
- t.resetTriggerConditions();
- t.resetExpireCondition();
-// updateTrigger(t);
- } else {
- t.setStatus(TriggerStatus.EXPIRED);
- }
- try {
- triggerLoader.updateTrigger(t);
- } catch (TriggerLoaderException e) {
- throw new TriggerManagerException(e);
- }
- }
-
- private class TriggerComparator implements Comparator<Trigger> {
- @Override
- public int compare(Trigger arg0, Trigger arg1) {
- long first = arg1.getNextCheckTime();
- long second = arg0.getNextCheckTime();
-
- if(first == second) {
- return 0;
- } else if (first < second) {
- return 1;
- }
- return -1;
- }
- }
- }
-
- public Trigger getTrigger(int triggerId) {
- synchronized (syncObj) {
- return triggerIdMap.get(triggerId);
- }
- }
-
- public void expireTrigger(int triggerId) {
- Trigger t = getTrigger(triggerId);
- t.setStatus(TriggerStatus.EXPIRED);
-// updateAgent(t);
- }
-
- @Override
- public List<Trigger> getTriggers(String triggerSource) {
- List<Trigger> triggers = new ArrayList<Trigger>();
- for(Trigger t : triggerIdMap.values()) {
- if(t.getSource().equals(triggerSource)) {
- triggers.add(t);
- }
- }
- return triggers;
- }
-
- @Override
- public List<Trigger> getTriggerUpdates(String triggerSource, long lastUpdateTime) throws TriggerManagerException{
- List<Trigger> triggers = new ArrayList<Trigger>();
- for(Trigger t : triggerIdMap.values()) {
- if(t.getSource().equals(triggerSource) && t.getLastModifyTime() > lastUpdateTime) {
- triggers.add(t);
- }
- }
- return triggers;
- }
-
- @Override
- public List<Trigger> getAllTriggerUpdates(long lastUpdateTime) throws TriggerManagerException {
- List<Trigger> triggers = new ArrayList<Trigger>();
- for(Trigger t : triggerIdMap.values()) {
- if(t.getLastModifyTime() > lastUpdateTime) {
- triggers.add(t);
- }
- }
- return triggers;
- }
-
- @Override
- public void insertTrigger(Trigger t, String user) throws TriggerManagerException {
- insertTrigger(t);
- }
-
- @Override
- public void removeTrigger(int id, String user) throws TriggerManagerException {
- removeTrigger(id);
- }
-
- @Override
- public void updateTrigger(Trigger t, String user) throws TriggerManagerException {
- updateTrigger(t);
- }
-
- @Override
- public void shutdown() {
- runnerThread.shutdown();
- }
-
- @Override
- public TriggerJMX getJMX() {
- return this.jmxStats;
- }
-
- private class LocalTriggerJMX implements TriggerJMX {
-
- @Override
- public long getLastRunnerThreadCheckTime() {
- return lastRunnerThreadCheckTime;
- }
-
- @Override
- public boolean isRunnerThreadActive() {
- return runnerThread.isAlive();
- }
-
- @Override
- public String getPrimaryServerHost() {
- return "local";
- }
-
- @Override
- public int getNumTriggers() {
- return triggerIdMap.size();
- }
-
- @Override
- public String getTriggerSources() {
- Set<String> sources = new HashSet<String>();
- for(Trigger t : triggerIdMap.values()) {
- sources.add(t.getSource());
- }
- return sources.toString();
- }
-
- @Override
- public String getTriggerIds() {
- return triggerIdMap.keySet().toString();
- }
-
- @Override
- public long getScannerIdleTime() {
- return runnerThreadIdleTime;
- }
-
- @Override
- public Map<String, Object> getAllJMXMbeans() {
- return new HashMap<String, Object>();
- }
-
- @Override
- public String getScannerThreadStage() {
- return scannerStage;
- }
-
- }
-
- @Override
- public void registerCheckerType(String name, Class<? extends ConditionChecker> checker) {
- checkerTypeLoader.registerCheckerType(name, checker);
- }
-
- @Override
- public void registerActionType(String name, Class<? extends TriggerAction> action) {
- actionTypeLoader.registerActionType(name, action);
- }
-
- private class ExecutorManagerEventListener implements EventListener {
- public ExecutorManagerEventListener() {
- }
-
- @Override
- public void handleEvent(Event event) {
- // this needs to be fixed for perf
- synchronized (syncObj) {
- ExecutableFlow flow = (ExecutableFlow) event.getRunner();
- if (event.getType() == Type.FLOW_FINISHED) {
- logger.info("Flow finish event received. " + flow.getExecutionId() );
- runnerThread.addJustFinishedFlow(flow);
- }
- }
- }
- }
-
+public class TriggerManager extends EventHandler implements
+ TriggerManagerAdapter {
+ private static Logger logger = Logger.getLogger(TriggerManager.class);
+ public static final long DEFAULT_SCANNER_INTERVAL_MS = 60000;
+
+ private static Map<Integer, Trigger> triggerIdMap =
+ new ConcurrentHashMap<Integer, Trigger>();
+
+ private CheckerTypeLoader checkerTypeLoader;
+ private ActionTypeLoader actionTypeLoader;
+ private TriggerLoader triggerLoader;
+
+ private final TriggerScannerThread runnerThread;
+ private long lastRunnerThreadCheckTime = -1;
+ private long runnerThreadIdleTime = -1;
+ private LocalTriggerJMX jmxStats = new LocalTriggerJMX();
+
+ private ExecutorManagerEventListener listener =
+ new ExecutorManagerEventListener();
+
+ private final Object syncObj = new Object();
+
+ private String scannerStage = "";
+
+ public TriggerManager(Props props, TriggerLoader triggerLoader,
+ ExecutorManager executorManager) throws TriggerManagerException {
+
+ this.triggerLoader = triggerLoader;
+
+ long scannerInterval =
+ props.getLong("trigger.scan.interval", DEFAULT_SCANNER_INTERVAL_MS);
+ runnerThread = new TriggerScannerThread(scannerInterval);
+
+ checkerTypeLoader = new CheckerTypeLoader();
+ actionTypeLoader = new ActionTypeLoader();
+
+ try {
+ checkerTypeLoader.init(props);
+ actionTypeLoader.init(props);
+ } catch (Exception e) {
+ throw new TriggerManagerException(e);
+ }
+
+ Condition.setCheckerLoader(checkerTypeLoader);
+ Trigger.setActionTypeLoader(actionTypeLoader);
+
+ executorManager.addListener(listener);
+
+ logger.info("TriggerManager loaded.");
+ }
+
+ @Override
+ public void start() throws TriggerManagerException {
+
+ try {
+ // expect loader to return valid triggers
+ List<Trigger> triggers = triggerLoader.loadTriggers();
+ for (Trigger t : triggers) {
+ runnerThread.addTrigger(t);
+ triggerIdMap.put(t.getTriggerId(), t);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ throw new TriggerManagerException(e);
+ }
+
+ runnerThread.start();
+ }
+
+ protected CheckerTypeLoader getCheckerLoader() {
+ return checkerTypeLoader;
+ }
+
+ protected ActionTypeLoader getActionLoader() {
+ return actionTypeLoader;
+ }
+
+ public void insertTrigger(Trigger t) throws TriggerManagerException {
+ synchronized (syncObj) {
+ try {
+ triggerLoader.addTrigger(t);
+ } catch (TriggerLoaderException e) {
+ throw new TriggerManagerException(e);
+ }
+ runnerThread.addTrigger(t);
+ triggerIdMap.put(t.getTriggerId(), t);
+ }
+ }
+
+ public void removeTrigger(int id) throws TriggerManagerException {
+ synchronized (syncObj) {
+ Trigger t = triggerIdMap.get(id);
+ if (t != null) {
+ removeTrigger(triggerIdMap.get(id));
+ }
+ }
+ }
+
+ public void updateTrigger(int id) throws TriggerManagerException {
+ synchronized (syncObj) {
+ if (!triggerIdMap.containsKey(id)) {
+ throw new TriggerManagerException("The trigger to update " + id
+ + " doesn't exist!");
+ }
+
+ Trigger t;
+ try {
+ t = triggerLoader.loadTrigger(id);
+ } catch (TriggerLoaderException e) {
+ throw new TriggerManagerException(e);
+ }
+ updateTrigger(t);
+ }
+ }
+
+ public void updateTrigger(Trigger t) throws TriggerManagerException {
+ synchronized (syncObj) {
+ runnerThread.deleteTrigger(triggerIdMap.get(t.getTriggerId()));
+ runnerThread.addTrigger(t);
+ triggerIdMap.put(t.getTriggerId(), t);
+ }
+ }
+
+ public void removeTrigger(Trigger t) throws TriggerManagerException {
+ synchronized (syncObj) {
+ runnerThread.deleteTrigger(t);
+ triggerIdMap.remove(t.getTriggerId());
+ try {
+ t.stopCheckers();
+ triggerLoader.removeTrigger(t);
+ } catch (TriggerLoaderException e) {
+ throw new TriggerManagerException(e);
+ }
+ }
+ }
+
+ public List<Trigger> getTriggers() {
+ return new ArrayList<Trigger>(triggerIdMap.values());
+ }
+
+ public Map<String, Class<? extends ConditionChecker>> getSupportedCheckers() {
+ return checkerTypeLoader.getSupportedCheckers();
+ }
+
+ private class TriggerScannerThread extends Thread {
+ private BlockingQueue<Trigger> triggers;
+ private Map<Integer, ExecutableFlow> justFinishedFlows;
+ private boolean shutdown = false;
+ // private AtomicBoolean stillAlive = new AtomicBoolean(true);
+ private final long scannerInterval;
+
+ public TriggerScannerThread(long scannerInterval) {
+ triggers = new PriorityBlockingQueue<Trigger>(1, new TriggerComparator());
+ justFinishedFlows = new ConcurrentHashMap<Integer, ExecutableFlow>();
+ this.setName("TriggerRunnerManager-Trigger-Scanner-Thread");
+ this.scannerInterval = scannerInterval;
+ }
+
+ public void shutdown() {
+ logger.error("Shutting down trigger manager thread " + this.getName());
+ shutdown = true;
+ // stillAlive.set(false);
+ this.interrupt();
+ }
+
+ public void addJustFinishedFlow(ExecutableFlow flow) {
+ synchronized (syncObj) {
+ justFinishedFlows.put(flow.getExecutionId(), flow);
+ }
+ }
+
+ public void addTrigger(Trigger t) {
+ synchronized (syncObj) {
+ t.updateNextCheckTime();
+ triggers.add(t);
+ }
+ }
+
+ public void deleteTrigger(Trigger t) {
+ triggers.remove(t);
+ }
+
+ public void run() {
+ // while(stillAlive.get()) {
+ while (!shutdown) {
+ synchronized (syncObj) {
+ try {
+ lastRunnerThreadCheckTime = System.currentTimeMillis();
+
+ scannerStage =
+ "Ready to start a new scan cycle at "
+ + lastRunnerThreadCheckTime;
+
+ try {
+ checkAllTriggers();
+ justFinishedFlows.clear();
+ } catch (Exception e) {
+ e.printStackTrace();
+ logger.error(e.getMessage());
+ } catch (Throwable t) {
+ t.printStackTrace();
+ logger.error(t.getMessage());
+ }
+
+ scannerStage = "Done flipping all triggers.";
+
+ runnerThreadIdleTime =
+ scannerInterval
+ - (System.currentTimeMillis() - lastRunnerThreadCheckTime);
+
+ if (runnerThreadIdleTime < 0) {
+ logger.error("Trigger manager thread " + this.getName()
+ + " is too busy!");
+ } else {
+ syncObj.wait(runnerThreadIdleTime);
+ }
+ } catch (InterruptedException e) {
+ logger.info("Interrupted. Probably to shut down.");
+ }
+ }
+ }
+ }
+
+ private void checkAllTriggers() throws TriggerManagerException {
+ long now = System.currentTimeMillis();
+
+ // sweep through the rest of them
+ for (Trigger t : triggers) {
+ scannerStage = "Checking for trigger " + t.getTriggerId();
+
+ boolean shouldSkip = true;
+ if (shouldSkip && t.getInfo() != null
+ && t.getInfo().containsKey("monitored.finished.execution")) {
+ int execId =
+ Integer.valueOf((String) t.getInfo().get(
+ "monitored.finished.execution"));
+ if (justFinishedFlows.containsKey(execId)) {
+ logger
+ .info("Monitored execution has finished. Checking trigger earlier "
+ + t.getTriggerId());
+ shouldSkip = false;
+ }
+ }
+ if (shouldSkip && t.getNextCheckTime() > now) {
+ shouldSkip = false;
+ }
+
+ if (shouldSkip) {
+ logger.info("Skipping trigger" + t.getTriggerId() + " until "
+ + t.getNextCheckTime());
+ }
+
+ logger.info("Checking trigger " + t.getTriggerId());
+ if (t.getStatus().equals(TriggerStatus.READY)) {
+ if (t.triggerConditionMet()) {
+ onTriggerTrigger(t);
+ } else if (t.expireConditionMet()) {
+ onTriggerExpire(t);
+ }
+ }
+ if (t.getStatus().equals(TriggerStatus.EXPIRED)
+ && t.getSource().equals("azkaban")) {
+ removeTrigger(t);
+ } else {
+ t.updateNextCheckTime();
+ }
+ }
+ }
+
+ private void onTriggerTrigger(Trigger t) throws TriggerManagerException {
+ List<TriggerAction> actions = t.getTriggerActions();
+ for (TriggerAction action : actions) {
+ try {
+ logger.info("Doing trigger actions");
+ action.doAction();
+ } catch (Exception e) {
+ logger.error("Failed to do action " + action.getDescription(), e);
+ } catch (Throwable th) {
+ logger.error("Failed to do action " + action.getDescription(), th);
+ }
+ }
+ if (t.isResetOnTrigger()) {
+ t.resetTriggerConditions();
+ t.resetExpireCondition();
+ } else {
+ t.setStatus(TriggerStatus.EXPIRED);
+ }
+ try {
+ triggerLoader.updateTrigger(t);
+ } catch (TriggerLoaderException e) {
+ throw new TriggerManagerException(e);
+ }
+ }
+
+ private void onTriggerExpire(Trigger t) throws TriggerManagerException {
+ List<TriggerAction> expireActions = t.getExpireActions();
+ for (TriggerAction action : expireActions) {
+ try {
+ logger.info("Doing expire actions");
+ action.doAction();
+ } catch (Exception e) {
+ logger.error("Failed to do expire action " + action.getDescription(),
+ e);
+ } catch (Throwable th) {
+ logger.error("Failed to do expire action " + action.getDescription(),
+ th);
+ }
+ }
+ if (t.isResetOnExpire()) {
+ t.resetTriggerConditions();
+ t.resetExpireCondition();
+ // updateTrigger(t);
+ } else {
+ t.setStatus(TriggerStatus.EXPIRED);
+ }
+ try {
+ triggerLoader.updateTrigger(t);
+ } catch (TriggerLoaderException e) {
+ throw new TriggerManagerException(e);
+ }
+ }
+
+ private class TriggerComparator implements Comparator<Trigger> {
+ @Override
+ public int compare(Trigger arg0, Trigger arg1) {
+ long first = arg1.getNextCheckTime();
+ long second = arg0.getNextCheckTime();
+
+ if (first == second) {
+ return 0;
+ } else if (first < second) {
+ return 1;
+ }
+ return -1;
+ }
+ }
+ }
+
+ public Trigger getTrigger(int triggerId) {
+ synchronized (syncObj) {
+ return triggerIdMap.get(triggerId);
+ }
+ }
+
+ public void expireTrigger(int triggerId) {
+ Trigger t = getTrigger(triggerId);
+ t.setStatus(TriggerStatus.EXPIRED);
+ // updateAgent(t);
+ }
+
+ @Override
+ public List<Trigger> getTriggers(String triggerSource) {
+ List<Trigger> triggers = new ArrayList<Trigger>();
+ for (Trigger t : triggerIdMap.values()) {
+ if (t.getSource().equals(triggerSource)) {
+ triggers.add(t);
+ }
+ }
+ return triggers;
+ }
+
+ @Override
+ public List<Trigger> getTriggerUpdates(String triggerSource,
+ long lastUpdateTime) throws TriggerManagerException {
+ List<Trigger> triggers = new ArrayList<Trigger>();
+ for (Trigger t : triggerIdMap.values()) {
+ if (t.getSource().equals(triggerSource)
+ && t.getLastModifyTime() > lastUpdateTime) {
+ triggers.add(t);
+ }
+ }
+ return triggers;
+ }
+
+ @Override
+ public List<Trigger> getAllTriggerUpdates(long lastUpdateTime)
+ throws TriggerManagerException {
+ List<Trigger> triggers = new ArrayList<Trigger>();
+ for (Trigger t : triggerIdMap.values()) {
+ if (t.getLastModifyTime() > lastUpdateTime) {
+ triggers.add(t);
+ }
+ }
+ return triggers;
+ }
+
+ @Override
+ public void insertTrigger(Trigger t, String user)
+ throws TriggerManagerException {
+ insertTrigger(t);
+ }
+
+ @Override
+ public void removeTrigger(int id, String user) throws TriggerManagerException {
+ removeTrigger(id);
+ }
+
+ @Override
+ public void updateTrigger(Trigger t, String user)
+ throws TriggerManagerException {
+ updateTrigger(t);
+ }
+
+ @Override
+ public void shutdown() {
+ runnerThread.shutdown();
+ }
+
+ @Override
+ public TriggerJMX getJMX() {
+ return this.jmxStats;
+ }
+
+ private class LocalTriggerJMX implements TriggerJMX {
+
+ @Override
+ public long getLastRunnerThreadCheckTime() {
+ return lastRunnerThreadCheckTime;
+ }
+
+ @Override
+ public boolean isRunnerThreadActive() {
+ return runnerThread.isAlive();
+ }
+
+ @Override
+ public String getPrimaryServerHost() {
+ return "local";
+ }
+
+ @Override
+ public int getNumTriggers() {
+ return triggerIdMap.size();
+ }
+
+ @Override
+ public String getTriggerSources() {
+ Set<String> sources = new HashSet<String>();
+ for (Trigger t : triggerIdMap.values()) {
+ sources.add(t.getSource());
+ }
+ return sources.toString();
+ }
+
+ @Override
+ public String getTriggerIds() {
+ return triggerIdMap.keySet().toString();
+ }
+
+ @Override
+ public long getScannerIdleTime() {
+ return runnerThreadIdleTime;
+ }
+
+ @Override
+ public Map<String, Object> getAllJMXMbeans() {
+ return new HashMap<String, Object>();
+ }
+
+ @Override
+ public String getScannerThreadStage() {
+ return scannerStage;
+ }
+
+ }
+
+ @Override
+ public void registerCheckerType(String name,
+ Class<? extends ConditionChecker> checker) {
+ checkerTypeLoader.registerCheckerType(name, checker);
+ }
+
+ @Override
+ public void registerActionType(String name,
+ Class<? extends TriggerAction> action) {
+ actionTypeLoader.registerActionType(name, action);
+ }
+
+ private class ExecutorManagerEventListener implements EventListener {
+ public ExecutorManagerEventListener() {
+ }
+
+ @Override
+ public void handleEvent(Event event) {
+ // this needs to be fixed for perf
+ synchronized (syncObj) {
+ ExecutableFlow flow = (ExecutableFlow) event.getRunner();
+ if (event.getType() == Type.FLOW_FINISHED) {
+ logger.info("Flow finish event received. " + flow.getExecutionId());
+ runnerThread.addJustFinishedFlow(flow);
+ }
+ }
+ }
+ }
+
}
diff --git a/src/main/java/azkaban/trigger/TriggerManagerAdapter.java b/src/main/java/azkaban/trigger/TriggerManagerAdapter.java
index 79d4626..a747a34 100644
--- a/src/main/java/azkaban/trigger/TriggerManagerAdapter.java
+++ b/src/main/java/azkaban/trigger/TriggerManagerAdapter.java
@@ -20,39 +20,53 @@ import java.util.List;
import java.util.Map;
public interface TriggerManagerAdapter {
-
- public void insertTrigger(Trigger t, String user) throws TriggerManagerException;
-
- public void removeTrigger(int id, String user) throws TriggerManagerException;
-
- public void updateTrigger(Trigger t, String user) throws TriggerManagerException;
-
- public List<Trigger> getAllTriggerUpdates(long lastUpdateTime) throws TriggerManagerException;
-
- public List<Trigger> getTriggerUpdates(String triggerSource, long lastUpdateTime) throws TriggerManagerException;
-
- public List<Trigger> getTriggers(String trigegerSource);
-
- public void start() throws TriggerManagerException;
-
- public void shutdown();
-
- public void registerCheckerType(String name, Class<? extends ConditionChecker> checker);
-
- public void registerActionType(String name, Class<? extends TriggerAction> action);
-
- public TriggerJMX getJMX();
-
- public interface TriggerJMX {
- public long getLastRunnerThreadCheckTime();
- public boolean isRunnerThreadActive();
- public String getPrimaryServerHost();
- public int getNumTriggers();
- public String getTriggerSources();
- public String getTriggerIds();
- public long getScannerIdleTime();
- public Map<String, Object> getAllJMXMbeans();
- public String getScannerThreadStage();
- }
-
+
+ public void insertTrigger(Trigger t, String user)
+ throws TriggerManagerException;
+
+ public void removeTrigger(int id, String user) throws TriggerManagerException;
+
+ public void updateTrigger(Trigger t, String user)
+ throws TriggerManagerException;
+
+ public List<Trigger> getAllTriggerUpdates(long lastUpdateTime)
+ throws TriggerManagerException;
+
+ public List<Trigger> getTriggerUpdates(String triggerSource,
+ long lastUpdateTime) throws TriggerManagerException;
+
+ public List<Trigger> getTriggers(String trigegerSource);
+
+ public void start() throws TriggerManagerException;
+
+ public void shutdown();
+
+ public void registerCheckerType(String name,
+ Class<? extends ConditionChecker> checker);
+
+ public void registerActionType(String name,
+ Class<? extends TriggerAction> action);
+
+ public TriggerJMX getJMX();
+
+ public interface TriggerJMX {
+ public long getLastRunnerThreadCheckTime();
+
+ public boolean isRunnerThreadActive();
+
+ public String getPrimaryServerHost();
+
+ public int getNumTriggers();
+
+ public String getTriggerSources();
+
+ public String getTriggerIds();
+
+ public long getScannerIdleTime();
+
+ public Map<String, Object> getAllJMXMbeans();
+
+ public String getScannerThreadStage();
+ }
+
}
diff --git a/src/main/java/azkaban/trigger/TriggerManagerException.java b/src/main/java/azkaban/trigger/TriggerManagerException.java
index bff0f81..e8cc823 100644
--- a/src/main/java/azkaban/trigger/TriggerManagerException.java
+++ b/src/main/java/azkaban/trigger/TriggerManagerException.java
@@ -16,19 +16,18 @@
package azkaban.trigger;
-public class TriggerManagerException extends Exception{
- private static final long serialVersionUID = 1L;
+public class TriggerManagerException extends Exception {
+ private static final long serialVersionUID = 1L;
- public TriggerManagerException(String message) {
- super(message);
- }
-
- public TriggerManagerException(String message, Throwable cause) {
- super(message, cause);
- }
-
- public TriggerManagerException(Throwable e) {
- super(e);
- }
-}
+ public TriggerManagerException(String message) {
+ super(message);
+ }
+
+ public TriggerManagerException(String message, Throwable cause) {
+ super(message, cause);
+ }
+ public TriggerManagerException(Throwable e) {
+ super(e);
+ }
+}
src/main/java/azkaban/trigger/TriggerStatus.java 44(+22 -22)
diff --git a/src/main/java/azkaban/trigger/TriggerStatus.java b/src/main/java/azkaban/trigger/TriggerStatus.java
index 349c3ff..3281941 100644
--- a/src/main/java/azkaban/trigger/TriggerStatus.java
+++ b/src/main/java/azkaban/trigger/TriggerStatus.java
@@ -17,29 +17,29 @@
package azkaban.trigger;
public enum TriggerStatus {
- READY(10), PAUSED(20), EXPIRED(30);
-
- private int numVal;
+ READY(10), PAUSED(20), EXPIRED(30);
- TriggerStatus(int numVal) {
- this.numVal = numVal;
- }
+ private int numVal;
- public int getNumVal() {
- return numVal;
- }
-
- public static TriggerStatus fromInteger(int x) {
- switch (x) {
- case 10:
- return READY;
- case 20:
- return PAUSED;
- case 30:
- return EXPIRED;
- default:
- return READY;
- }
- }
+ TriggerStatus(int numVal) {
+ this.numVal = numVal;
+ }
+
+ public int getNumVal() {
+ return numVal;
+ }
+
+ public static TriggerStatus fromInteger(int x) {
+ switch (x) {
+ case 10:
+ return READY;
+ case 20:
+ return PAUSED;
+ case 30:
+ return EXPIRED;
+ default:
+ return READY;
+ }
+ }
}
src/main/java/azkaban/user/Permission.java 350(+176 -174)
diff --git a/src/main/java/azkaban/user/Permission.java b/src/main/java/azkaban/user/Permission.java
index 42158f8..d0d123a 100644
--- a/src/main/java/azkaban/user/Permission.java
+++ b/src/main/java/azkaban/user/Permission.java
@@ -24,178 +24,180 @@ import java.util.Set;
import azkaban.utils.Utils;
public class Permission {
- public enum Type {
- READ(0x0000001),
- WRITE(0x0000002),
- EXECUTE(0x0000004),
- SCHEDULE(0x0000008),
- METRICS(0x0000010),
- CREATEPROJECTS(0x40000000), // Only used for roles
- ADMIN(0x8000000);
-
- private int numVal;
-
- Type(int numVal) {
- this.numVal = numVal;
- }
-
- public int getFlag() {
- return numVal;
- }
- }
-
- private Set<Type> permissions = new HashSet<Type>();
-
- public Permission() {
- }
-
- public Permission(int flags) {
- setPermissions(flags);
- }
-
- public Permission(Type ... list) {
- addPermission(list);
- }
-
- public void addPermissions(Permission perm) {
- this.permissions.addAll(perm.getTypes());
- }
-
- public void setPermission(Type type, boolean set) {
- if (set) {
- addPermission(type);
- }
- else {
- removePermissions(type);
- }
- }
-
- public void setPermissions(int flags) {
- permissions.clear();
- if((flags & Type.ADMIN.getFlag()) != 0) {
- addPermission(Type.ADMIN);
- }
- else {
- for (Type type: Type.values()) {
- if ((flags & type.getFlag()) != 0) {
- addPermission(type);
- }
- }
- }
- }
-
- public void addPermission(Type ... list) {
- // Admin is all encompassing permission. No need to add other types
- if (!permissions.contains(Type.ADMIN)) {
- for (Type perm: list) {
- permissions.add(perm);
- }
- // We add everything, and if there's Admin left, we make sure that only Admin is remaining.
- if (permissions.contains(Type.ADMIN)) {
- permissions.clear();
- permissions.add(Type.ADMIN);
- }
- }
- }
-
- public void addPermissionsByName(String ... list) {
- for (String perm: list) {
- Type type = Type.valueOf(perm);
- if (type != null) {
- addPermission(type);
- };
- }
- }
-
- public void addPermissions(Collection<Type> list) {
- for (Type perm: list) {
- addPermission(perm);
- }
- }
-
- public void addPermissionsByName(Collection<String> list) {
- for (String perm: list) {
- Type type = Type.valueOf(perm);
- if (type != null) {
- addPermission(type);
- };
- }
- }
-
- public Set<Type> getTypes() {
- return permissions;
- }
-
- public void removePermissions(Type ... list) {
- for (Type perm: list) {
- permissions.remove(perm);
- }
- }
-
- public void removePermissionsByName(String ... list) {
- for (String perm: list) {
- Type type = Type.valueOf(perm);
- if (type != null) {
- permissions.remove(type);
- };
- }
- }
-
- public boolean isPermissionSet(Type permission) {
- return permissions.contains(permission);
- }
-
- public boolean isPermissionNameSet(String permission) {
- return permissions.contains(Type.valueOf(permission));
- }
-
- public String[] toStringArray() {
- ArrayList<String> list = new ArrayList<String>();
- int count = 0;
- for (Type type: permissions) {
- list.add(type.toString());
- count++;
- }
-
- return list.toArray(new String[count]);
- }
-
- public String toString() {
- return Utils.flattenToString(permissions, ",");
- }
-
- @Override
- public int hashCode() {
- final int prime = 31;
- int result = 1;
- result = prime * result
- + ((permissions == null) ? 0 : permissions.hashCode());
- return result;
- }
-
- @Override
- public boolean equals(Object obj) {
- if (this == obj)
- return true;
- if (obj == null)
- return false;
- if (getClass() != obj.getClass())
- return false;
- Permission other = (Permission) obj;
- if (permissions == null) {
- if (other.permissions != null)
- return false;
- } else if (!permissions.equals(other.permissions))
- return false;
- return true;
- }
-
- public int toFlags() {
- int flag = 0;
- for (Type type: permissions) {
- flag |= type.getFlag();
- }
-
- return flag;
- }
+ public enum Type {
+ READ(0x0000001),
+ WRITE(0x0000002),
+ EXECUTE(0x0000004),
+ SCHEDULE(0x0000008),
+ METRICS(0x0000010),
+ CREATEPROJECTS(0x40000000), // Only used for roles
+ ADMIN(0x8000000);
+
+ private int numVal;
+
+ Type(int numVal) {
+ this.numVal = numVal;
+ }
+
+ public int getFlag() {
+ return numVal;
+ }
+ }
+
+ private Set<Type> permissions = new HashSet<Type>();
+
+ public Permission() {
+ }
+
+ public Permission(int flags) {
+ setPermissions(flags);
+ }
+
+ public Permission(Type... list) {
+ addPermission(list);
+ }
+
+ public void addPermissions(Permission perm) {
+ this.permissions.addAll(perm.getTypes());
+ }
+
+ public void setPermission(Type type, boolean set) {
+ if (set) {
+ addPermission(type);
+ } else {
+ removePermissions(type);
+ }
+ }
+
+ public void setPermissions(int flags) {
+ permissions.clear();
+ if ((flags & Type.ADMIN.getFlag()) != 0) {
+ addPermission(Type.ADMIN);
+ } else {
+ for (Type type : Type.values()) {
+ if ((flags & type.getFlag()) != 0) {
+ addPermission(type);
+ }
+ }
+ }
+ }
+
+ public void addPermission(Type... list) {
+ // Admin is all encompassing permission. No need to add other types
+ if (!permissions.contains(Type.ADMIN)) {
+ for (Type perm : list) {
+ permissions.add(perm);
+ }
+ // We add everything, and if there's Admin left, we make sure that only
+ // Admin is remaining.
+ if (permissions.contains(Type.ADMIN)) {
+ permissions.clear();
+ permissions.add(Type.ADMIN);
+ }
+ }
+ }
+
+ public void addPermissionsByName(String... list) {
+ for (String perm : list) {
+ Type type = Type.valueOf(perm);
+ if (type != null) {
+ addPermission(type);
+ }
+ ;
+ }
+ }
+
+ public void addPermissions(Collection<Type> list) {
+ for (Type perm : list) {
+ addPermission(perm);
+ }
+ }
+
+ public void addPermissionsByName(Collection<String> list) {
+ for (String perm : list) {
+ Type type = Type.valueOf(perm);
+ if (type != null) {
+ addPermission(type);
+ }
+ ;
+ }
+ }
+
+ public Set<Type> getTypes() {
+ return permissions;
+ }
+
+ public void removePermissions(Type... list) {
+ for (Type perm : list) {
+ permissions.remove(perm);
+ }
+ }
+
+ public void removePermissionsByName(String... list) {
+ for (String perm : list) {
+ Type type = Type.valueOf(perm);
+ if (type != null) {
+ permissions.remove(type);
+ }
+ ;
+ }
+ }
+
+ public boolean isPermissionSet(Type permission) {
+ return permissions.contains(permission);
+ }
+
+ public boolean isPermissionNameSet(String permission) {
+ return permissions.contains(Type.valueOf(permission));
+ }
+
+ public String[] toStringArray() {
+ ArrayList<String> list = new ArrayList<String>();
+ int count = 0;
+ for (Type type : permissions) {
+ list.add(type.toString());
+ count++;
+ }
+
+ return list.toArray(new String[count]);
+ }
+
+ public String toString() {
+ return Utils.flattenToString(permissions, ",");
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result =
+ prime * result + ((permissions == null) ? 0 : permissions.hashCode());
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj)
+ return true;
+ if (obj == null)
+ return false;
+ if (getClass() != obj.getClass())
+ return false;
+ Permission other = (Permission) obj;
+ if (permissions == null) {
+ if (other.permissions != null)
+ return false;
+ } else if (!permissions.equals(other.permissions))
+ return false;
+ return true;
+ }
+
+ public int toFlags() {
+ int flag = 0;
+ for (Type type : permissions) {
+ flag |= type.getFlag();
+ }
+
+ return flag;
+ }
}
src/main/java/azkaban/user/Role.java 34(+17 -17)
diff --git a/src/main/java/azkaban/user/Role.java b/src/main/java/azkaban/user/Role.java
index 64123ff..1954959 100644
--- a/src/main/java/azkaban/user/Role.java
+++ b/src/main/java/azkaban/user/Role.java
@@ -17,23 +17,23 @@
package azkaban.user;
public class Role {
- private final String name;
- private final Permission globalPermission;
-
- public Role(String name, Permission permission) {
- this.name = name;
- this.globalPermission = permission;
- }
+ private final String name;
+ private final Permission globalPermission;
- public Permission getPermission() {
- return globalPermission;
- }
+ public Role(String name, Permission permission) {
+ this.name = name;
+ this.globalPermission = permission;
+ }
- public String getName() {
- return name;
- }
-
- public String toString() {
- return "Role " + name;
- }
+ public Permission getPermission() {
+ return globalPermission;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public String toString() {
+ return "Role " + name;
+ }
}
src/main/java/azkaban/user/User.java 264(+132 -132)
diff --git a/src/main/java/azkaban/user/User.java b/src/main/java/azkaban/user/User.java
index 8ea3f59..a78a22f 100644
--- a/src/main/java/azkaban/user/User.java
+++ b/src/main/java/azkaban/user/User.java
@@ -23,136 +23,136 @@ import java.util.List;
import java.util.Set;
public class User {
- private final String userid;
- private String email = "";
- private Set<String> roles = new HashSet<String>();
- private Set<String> groups = new HashSet<String>();
- private UserPermissions userPermissions;
-
- private HashMap<String,String> properties = new HashMap<String,String>();
-
- public User(String userid) {
- this.userid = userid;
- }
-
- public String getUserId() {
- return userid;
- }
-
- public void setEmail(String email) {
- this.email = email;
- }
-
- public String getEmail() {
- return email;
- }
-
- public void setPermissions(UserPermissions checker) {
- this.userPermissions = checker;
- }
-
- public UserPermissions getPermissions() {
- return userPermissions;
- }
-
- public boolean hasPermission(String permission) {
- if (userPermissions == null) {
- return false;
- }
- return this.userPermissions.hasPermission(permission);
- }
-
- public List<String> getGroups() {
- return new ArrayList<String>(groups);
- }
-
- public void clearGroup() {
- groups.clear();
- }
-
- public void addGroup(String name) {
- groups.add(name);
- }
-
- public boolean isInGroup(String group) {
- return this.groups.contains(group);
- }
-
- public List<String> getRoles() {
- return new ArrayList<String>(roles);
- }
-
- public void addRole(String role) {
- this.roles.add(role);
- }
-
- public boolean hasRole(String role) {
- return roles.contains(role);
- }
-
- public String getProperty(String name) {
- return properties.get(name);
- }
-
- public String toString() {
- String groupStr = "[";
- for (String group: groups) {
- groupStr += group + ",";
- }
- groupStr += "]";
- return userid + ": " + groupStr;
- }
-
- @Override
- public int hashCode() {
- final int prime = 31;
- int result = 1;
- result = prime * result + ((userid == null) ? 0 : userid.hashCode());
- return result;
- }
-
- @Override
- public boolean equals(Object obj) {
- if (this == obj)
- return true;
- if (obj == null)
- return false;
- if (getClass() != obj.getClass())
- return false;
- User other = (User)obj;
- if (userid == null) {
- if (other.userid != null)
- return false;
- }
- else if (!userid.equals(other.userid))
- return false;
- return true;
- }
-
- public static interface UserPermissions {
- public boolean hasPermission(String permission);
- public void addPermission(String permission);
- }
-
- public static class DefaultUserPermission implements UserPermissions {
- Set<String> permissions;
-
- public DefaultUserPermission() {
- this(new HashSet<String>());
- }
-
- public DefaultUserPermission(Set<String> permissions) {
- this.permissions = permissions;
- }
-
- @Override
- public boolean hasPermission(String permission) {
- return permissions.contains(permission);
- }
-
- @Override
- public void addPermission(String permission) {
- permissions.add(permission);
- }
- }
+ private final String userid;
+ private String email = "";
+ private Set<String> roles = new HashSet<String>();
+ private Set<String> groups = new HashSet<String>();
+ private UserPermissions userPermissions;
+
+ private HashMap<String, String> properties = new HashMap<String, String>();
+
+ public User(String userid) {
+ this.userid = userid;
+ }
+
+ public String getUserId() {
+ return userid;
+ }
+
+ public void setEmail(String email) {
+ this.email = email;
+ }
+
+ public String getEmail() {
+ return email;
+ }
+
+ public void setPermissions(UserPermissions checker) {
+ this.userPermissions = checker;
+ }
+
+ public UserPermissions getPermissions() {
+ return userPermissions;
+ }
+
+ public boolean hasPermission(String permission) {
+ if (userPermissions == null) {
+ return false;
+ }
+ return this.userPermissions.hasPermission(permission);
+ }
+
+ public List<String> getGroups() {
+ return new ArrayList<String>(groups);
+ }
+
+ public void clearGroup() {
+ groups.clear();
+ }
+
+ public void addGroup(String name) {
+ groups.add(name);
+ }
+
+ public boolean isInGroup(String group) {
+ return this.groups.contains(group);
+ }
+
+ public List<String> getRoles() {
+ return new ArrayList<String>(roles);
+ }
+
+ public void addRole(String role) {
+ this.roles.add(role);
+ }
+
+ public boolean hasRole(String role) {
+ return roles.contains(role);
+ }
+
+ public String getProperty(String name) {
+ return properties.get(name);
+ }
+
+ public String toString() {
+ String groupStr = "[";
+ for (String group : groups) {
+ groupStr += group + ",";
+ }
+ groupStr += "]";
+ return userid + ": " + groupStr;
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + ((userid == null) ? 0 : userid.hashCode());
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj)
+ return true;
+ if (obj == null)
+ return false;
+ if (getClass() != obj.getClass())
+ return false;
+ User other = (User) obj;
+ if (userid == null) {
+ if (other.userid != null)
+ return false;
+ } else if (!userid.equals(other.userid))
+ return false;
+ return true;
+ }
+
+ public static interface UserPermissions {
+ public boolean hasPermission(String permission);
+
+ public void addPermission(String permission);
+ }
+
+ public static class DefaultUserPermission implements UserPermissions {
+ Set<String> permissions;
+
+ public DefaultUserPermission() {
+ this(new HashSet<String>());
+ }
+
+ public DefaultUserPermission(Set<String> permissions) {
+ this.permissions = permissions;
+ }
+
+ @Override
+ public boolean hasPermission(String permission) {
+ return permissions.contains(permission);
+ }
+
+ @Override
+ public void addPermission(String permission) {
+ permissions.add(permission);
+ }
+ }
}
src/main/java/azkaban/user/UserManager.java 74(+39 -35)
diff --git a/src/main/java/azkaban/user/UserManager.java b/src/main/java/azkaban/user/UserManager.java
index 253e817..1d5e6f1 100644
--- a/src/main/java/azkaban/user/UserManager.java
+++ b/src/main/java/azkaban/user/UserManager.java
@@ -20,44 +20,48 @@ package azkaban.user;
* Interface for the UserManager. Implementors will have to handle the retrieval
* of the User object given the username and password.
*
- * The constructor will be called with a azkaban.utils.Props object passed as the only
- * parameter. If such a constructor doesn't exist, than the UserManager instantiation may
- * fail.
+ * The constructor will be called with a azkaban.utils.Props object passed as
+ * the only parameter. If such a constructor doesn't exist, than the UserManager
+ * instantiation may fail.
*/
public interface UserManager {
- /**
- * Retrieves the user given the username and password to authenticate against.
- *
- * @param username
- * @param password
- * @return
- * @throws UserManagerException If the username/password combination doesn't exist.
- */
- public User getUser(String username, String password) throws UserManagerException;
+ /**
+ * Retrieves the user given the username and password to authenticate against.
+ *
+ * @param username
+ * @param password
+ * @return
+ * @throws UserManagerException If the username/password combination doesn't
+ * exist.
+ */
+ public User getUser(String username, String password)
+ throws UserManagerException;
- /**
- * Returns true if the user is valid. This is used when adding permissions for users
- *
- * @param username
- * @return
- */
- public boolean validateUser(String username);
+ /**
+ * Returns true if the user is valid. This is used when adding permissions for
+ * users
+ *
+ * @param username
+ * @return
+ */
+ public boolean validateUser(String username);
- /**
- * Returns true if the group is valid. This is used when adding permissions for groups.
- *
- * @param group
- * @return
- */
- public boolean validateGroup(String group);
-
- /**
- * Returns the user role. This may return null.
- *
- * @param roleName
- * @return
- */
- public Role getRole(String roleName);
+ /**
+ * Returns true if the group is valid. This is used when adding permissions
+ * for groups.
+ *
+ * @param group
+ * @return
+ */
+ public boolean validateGroup(String group);
- public boolean validateProxyUser(String proxyUser, User realUser);
+ /**
+ * Returns the user role. This may return null.
+ *
+ * @param roleName
+ * @return
+ */
+ public Role getRole(String roleName);
+
+ public boolean validateProxyUser(String proxyUser, User realUser);
}
diff --git a/src/main/java/azkaban/user/UserManagerException.java b/src/main/java/azkaban/user/UserManagerException.java
index 0943c16..e73ac25 100644
--- a/src/main/java/azkaban/user/UserManagerException.java
+++ b/src/main/java/azkaban/user/UserManagerException.java
@@ -21,13 +21,13 @@ package azkaban.user;
*
*/
public class UserManagerException extends Exception {
- private static final long serialVersionUID = 1L;
+ private static final long serialVersionUID = 1L;
- public UserManagerException(String message) {
- super(message);
- }
-
- public UserManagerException(String message, Throwable cause) {
- super(message, cause);
- }
+ public UserManagerException(String message) {
+ super(message);
+ }
+
+ public UserManagerException(String message, Throwable cause) {
+ super(message, cause);
+ }
}
src/main/java/azkaban/user/XmlUserManager.java 606(+308 -298)
diff --git a/src/main/java/azkaban/user/XmlUserManager.java b/src/main/java/azkaban/user/XmlUserManager.java
index 7bcfe26..46b9ff7 100644
--- a/src/main/java/azkaban/user/XmlUserManager.java
+++ b/src/main/java/azkaban/user/XmlUserManager.java
@@ -45,302 +45,312 @@ import azkaban.utils.Props;
* </azkaban-users>
*/
public class XmlUserManager implements UserManager {
- private static final Logger logger = Logger.getLogger(XmlUserManager.class.getName());
-
- public static final String XML_FILE_PARAM = "user.manager.xml.file";
- public static final String AZKABAN_USERS_TAG = "azkaban-users";
- public static final String USER_TAG = "user";
- public static final String ROLE_TAG = "role";
- public static final String GROUP_TAG = "group";
- public static final String ROLENAME_ATTR = "name";
- public static final String ROLEPERMISSIONS_ATTR = "permissions";
- public static final String USERNAME_ATTR = "username";
- public static final String PASSWORD_ATTR = "password";
- public static final String EMAIL_ATTR = "email";
- public static final String ROLES_ATTR = "roles";
- public static final String PROXY_ATTR = "proxy";
- public static final String GROUPS_ATTR = "groups";
- public static final String GROUPNAME_ATTR = "name";
-
- private String xmlPath;
-
- private HashMap<String, User> users;
- private HashMap<String, String> userPassword;
- private HashMap<String, Role> roles;
- private HashMap<String, Set<String>> groupRoles;
- private HashMap<String, Set<String>> proxyUserMap;
-
- /**
- * The constructor.
- *
- * @param props
- */
- public XmlUserManager(Props props) {
- xmlPath = props.getString(XML_FILE_PARAM);
-
- parseXMLFile();
- }
-
- private void parseXMLFile() {
- File file = new File(xmlPath);
- if (!file.exists()) {
- throw new IllegalArgumentException("User xml file " + xmlPath + " doesn't exist.");
- }
-
- HashMap<String, User> users = new HashMap<String, User>();
- HashMap<String, String> userPassword = new HashMap<String, String>();
- HashMap<String, Role> roles = new HashMap<String, Role>();
- HashMap<String, Set<String>> groupRoles = new HashMap<String, Set<String>>();
- HashMap<String, Set<String>> proxyUserMap = new HashMap<String, Set<String>>();
-
- // Creating the document builder to parse xml.
- DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory
- .newInstance();
- DocumentBuilder builder = null;
- try {
- builder = docBuilderFactory.newDocumentBuilder();
- } catch (ParserConfigurationException e) {
- throw new IllegalArgumentException("Exception while parsing user xml. Document builder not created.",e);
- }
-
- Document doc = null;
- try {
- doc = builder.parse(file);
- } catch (SAXException e) {
- throw new IllegalArgumentException("Exception while parsing "
- + xmlPath + ". Invalid XML.", e);
- } catch (IOException e) {
- throw new IllegalArgumentException("Exception while parsing "
- + xmlPath + ". Error reading file.", e);
- }
-
- // Only look at first item, because we should only be seeing
- // azkaban-users tag.
- NodeList tagList = doc.getChildNodes();
- Node azkabanUsers = tagList.item(0);
-
- NodeList azkabanUsersList = azkabanUsers.getChildNodes();
- for (int i = 0; i < azkabanUsersList.getLength(); ++i) {
- Node node = azkabanUsersList.item(i);
- if (node.getNodeType() == Node.ELEMENT_NODE) {
- if (node.getNodeName().equals(USER_TAG)) {
- parseUserTag(node, users, userPassword, proxyUserMap);
- }
- else if (node.getNodeName().equals(ROLE_TAG)) {
- parseRoleTag(node, roles);
- }
- else if (node.getNodeName().equals(GROUP_TAG)) {
- parseGroupRoleTag(node, groupRoles);
- }
- }
- }
-
- // Synchronize the swap. Similarly, the gets are synchronized to this.
- synchronized (this) {
- this.users = users;
- this.userPassword = userPassword;
- this.roles = roles;
- this.proxyUserMap = proxyUserMap;
- this.groupRoles = groupRoles;
- }
- }
-
- private void parseUserTag(Node node, HashMap<String, User> users, HashMap<String, String> userPassword, HashMap<String, Set<String>> proxyUserMap) {
- NamedNodeMap userAttrMap = node.getAttributes();
- Node userNameAttr = userAttrMap.getNamedItem(USERNAME_ATTR);
- if (userNameAttr == null) {
- throw new RuntimeException("Error loading user. The '" + USERNAME_ATTR + "' attribute doesn't exist");
- }
-
- Node passwordAttr = userAttrMap.getNamedItem(PASSWORD_ATTR);
- if (passwordAttr == null) {
- throw new RuntimeException("Error loading user. The '" + PASSWORD_ATTR + "' attribute doesn't exist");
- }
-
- // Add user to the user/password map
- String username = userNameAttr.getNodeValue();
- String password = passwordAttr.getNodeValue();
- userPassword.put(username, password);
- // Add the user to the node
- User user = new User(userNameAttr.getNodeValue());
- users.put(username, user);
- logger.info("Loading user " + user.getUserId());
-
- Node roles = userAttrMap.getNamedItem(ROLES_ATTR);
- if (roles != null) {
- String value = roles.getNodeValue();
- String[] roleSplit = value.split("\\s*,\\s*");
- for (String role : roleSplit) {
- user.addRole(role);
- }
- }
-
- Node proxy = userAttrMap.getNamedItem(PROXY_ATTR);
- if (proxy != null) {
- String value = proxy.getNodeValue();
- String[] proxySplit = value.split("\\s*,\\s*");
- for (String proxyUser : proxySplit) {
- Set<String> proxySet = proxyUserMap.get(username);
- if (proxySet == null) {
- proxySet = new HashSet<String>();
- proxyUserMap.put(username, proxySet);
- }
-
- proxySet.add(proxyUser);
- }
- }
-
- Node groups = userAttrMap.getNamedItem(GROUPS_ATTR);
- if (groups != null) {
- String value = groups.getNodeValue();
- String[] groupSplit = value.split("\\s*,\\s*");
- for (String group : groupSplit) {
- user.addGroup(group);
- }
- }
-
- Node emailAttr = userAttrMap.getNamedItem(EMAIL_ATTR);
- if (emailAttr != null) {
- user.setEmail(emailAttr.getNodeValue());
- }
- }
-
- private void parseRoleTag(Node node, HashMap<String, Role> roles) {
- NamedNodeMap roleAttrMap = node.getAttributes();
- Node roleNameAttr = roleAttrMap.getNamedItem(ROLENAME_ATTR);
- if (roleNameAttr == null) {
- throw new RuntimeException(
- "Error loading role. The role 'name' attribute doesn't exist");
- }
- Node permissionAttr = roleAttrMap.getNamedItem(ROLEPERMISSIONS_ATTR);
- if (permissionAttr == null) {
- throw new RuntimeException(
- "Error loading role. The role 'permissions' attribute doesn't exist");
- }
-
- String roleName = roleNameAttr.getNodeValue();
- String permissions = permissionAttr.getNodeValue();
-
- String[] permissionSplit = permissions.split("\\s*,\\s*");
-
- Permission perm = new Permission();
- for (String permString: permissionSplit) {
- try {
- Permission.Type type = Permission.Type.valueOf(permString);
- perm.addPermission(type);
- } catch (IllegalArgumentException e) {
- logger.error("Error adding type " + permString + ". Permission doesn't exist.", e);
- }
- }
-
- Role role = new Role(roleName, perm);
- roles.put(roleName, role);
- }
-
- @Override
- public User getUser(String username, String password) throws UserManagerException {
- if (username == null || username.trim().isEmpty()) {
- throw new UserManagerException("Username is empty.");
- } else if (password == null || password.trim().isEmpty()) {
- throw new UserManagerException("Password is empty.");
- }
-
- // Minimize the synchronization of the get. Shouldn't matter if it
- // doesn't exist.
- String foundPassword = null;
- User user = null;
- synchronized (this) {
- foundPassword = userPassword.get(username);
- if (foundPassword != null) {
- user = users.get(username);
- }
- }
-
- if (foundPassword == null || !foundPassword.equals(password)) {
- throw new UserManagerException("Username/Password not found.");
- }
- // Once it gets to this point, no exception has been thrown. User
- // shoudn't be
- // null, but adding this check for if user and user/password hash tables
- // go
- // out of sync.
- if (user == null) {
- throw new UserManagerException("Internal error: User not found.");
- }
-
- // Add all the roles the group has to the user
- resolveGroupRoles(user);
- user.setPermissions(new UserPermissions() {
- @Override
- public boolean hasPermission(String permission) {
- return true;
- }
-
- @Override
- public void addPermission(String permission) {
- }
- });
- return user;
- }
-
- private void resolveGroupRoles(User user) {
- for (String group: user.getGroups()) {
- Set<String> groupRoleSet = groupRoles.get(group);
- if (groupRoleSet != null) {
- for (String role: groupRoleSet) {
- user.addRole(role);
- }
- }
- }
- }
-
- private void parseGroupRoleTag(Node node, HashMap<String, Set<String>> groupRoles) {
- NamedNodeMap groupAttrMap = node.getAttributes();
- Node groupNameAttr = groupAttrMap.getNamedItem(GROUPNAME_ATTR);
- if (groupNameAttr == null) {
- throw new RuntimeException(
- "Error loading role. The role 'name' attribute doesn't exist");
- }
-
- String groupName = groupNameAttr.getNodeValue();
- Set<String> roleSet = new HashSet<String>();
-
- Node roles = groupAttrMap.getNamedItem(ROLES_ATTR);
- if (roles != null) {
- String value = roles.getNodeValue();
- String[] roleSplit = value.split("\\s*,\\s*");
- for (String role : roleSplit) {
- roleSet.add(role);
- }
- }
-
- groupRoles.put( groupName, roleSet );
- logger.info("Group roles " + groupName + " added.");
- }
-
- @Override
- public boolean validateUser(String username) {
- return users.containsKey(username);
- }
-
- @Override
- public Role getRole(String roleName) {
- return roles.get(roleName);
- }
-
- @Override
- public boolean validateGroup(String group) {
- // Return true. Validation should be added when groups are added to the xml.
- return true;
- }
-
- @Override
- public boolean validateProxyUser(String proxyUser, User realUser) {
- if(proxyUserMap.containsKey(realUser.getUserId()) && proxyUserMap.get(realUser.getUserId()).contains(proxyUser)) {
- return true;
- }
- else {
- return false;
- }
- }
+ private static final Logger logger = Logger.getLogger(XmlUserManager.class
+ .getName());
+
+ public static final String XML_FILE_PARAM = "user.manager.xml.file";
+ public static final String AZKABAN_USERS_TAG = "azkaban-users";
+ public static final String USER_TAG = "user";
+ public static final String ROLE_TAG = "role";
+ public static final String GROUP_TAG = "group";
+ public static final String ROLENAME_ATTR = "name";
+ public static final String ROLEPERMISSIONS_ATTR = "permissions";
+ public static final String USERNAME_ATTR = "username";
+ public static final String PASSWORD_ATTR = "password";
+ public static final String EMAIL_ATTR = "email";
+ public static final String ROLES_ATTR = "roles";
+ public static final String PROXY_ATTR = "proxy";
+ public static final String GROUPS_ATTR = "groups";
+ public static final String GROUPNAME_ATTR = "name";
+
+ private String xmlPath;
+
+ private HashMap<String, User> users;
+ private HashMap<String, String> userPassword;
+ private HashMap<String, Role> roles;
+ private HashMap<String, Set<String>> groupRoles;
+ private HashMap<String, Set<String>> proxyUserMap;
+
+ /**
+ * The constructor.
+ *
+ * @param props
+ */
+ public XmlUserManager(Props props) {
+ xmlPath = props.getString(XML_FILE_PARAM);
+
+ parseXMLFile();
+ }
+
+ private void parseXMLFile() {
+ File file = new File(xmlPath);
+ if (!file.exists()) {
+ throw new IllegalArgumentException("User xml file " + xmlPath
+ + " doesn't exist.");
+ }
+
+ HashMap<String, User> users = new HashMap<String, User>();
+ HashMap<String, String> userPassword = new HashMap<String, String>();
+ HashMap<String, Role> roles = new HashMap<String, Role>();
+ HashMap<String, Set<String>> groupRoles =
+ new HashMap<String, Set<String>>();
+ HashMap<String, Set<String>> proxyUserMap =
+ new HashMap<String, Set<String>>();
+
+ // Creating the document builder to parse xml.
+ DocumentBuilderFactory docBuilderFactory =
+ DocumentBuilderFactory.newInstance();
+ DocumentBuilder builder = null;
+ try {
+ builder = docBuilderFactory.newDocumentBuilder();
+ } catch (ParserConfigurationException e) {
+ throw new IllegalArgumentException(
+ "Exception while parsing user xml. Document builder not created.", e);
+ }
+
+ Document doc = null;
+ try {
+ doc = builder.parse(file);
+ } catch (SAXException e) {
+ throw new IllegalArgumentException("Exception while parsing " + xmlPath
+ + ". Invalid XML.", e);
+ } catch (IOException e) {
+ throw new IllegalArgumentException("Exception while parsing " + xmlPath
+ + ". Error reading file.", e);
+ }
+
+ // Only look at first item, because we should only be seeing
+ // azkaban-users tag.
+ NodeList tagList = doc.getChildNodes();
+ Node azkabanUsers = tagList.item(0);
+
+ NodeList azkabanUsersList = azkabanUsers.getChildNodes();
+ for (int i = 0; i < azkabanUsersList.getLength(); ++i) {
+ Node node = azkabanUsersList.item(i);
+ if (node.getNodeType() == Node.ELEMENT_NODE) {
+ if (node.getNodeName().equals(USER_TAG)) {
+ parseUserTag(node, users, userPassword, proxyUserMap);
+ } else if (node.getNodeName().equals(ROLE_TAG)) {
+ parseRoleTag(node, roles);
+ } else if (node.getNodeName().equals(GROUP_TAG)) {
+ parseGroupRoleTag(node, groupRoles);
+ }
+ }
+ }
+
+ // Synchronize the swap. Similarly, the gets are synchronized to this.
+ synchronized (this) {
+ this.users = users;
+ this.userPassword = userPassword;
+ this.roles = roles;
+ this.proxyUserMap = proxyUserMap;
+ this.groupRoles = groupRoles;
+ }
+ }
+
+ private void parseUserTag(Node node, HashMap<String, User> users,
+ HashMap<String, String> userPassword,
+ HashMap<String, Set<String>> proxyUserMap) {
+ NamedNodeMap userAttrMap = node.getAttributes();
+ Node userNameAttr = userAttrMap.getNamedItem(USERNAME_ATTR);
+ if (userNameAttr == null) {
+ throw new RuntimeException("Error loading user. The '" + USERNAME_ATTR
+ + "' attribute doesn't exist");
+ }
+
+ Node passwordAttr = userAttrMap.getNamedItem(PASSWORD_ATTR);
+ if (passwordAttr == null) {
+ throw new RuntimeException("Error loading user. The '" + PASSWORD_ATTR
+ + "' attribute doesn't exist");
+ }
+
+ // Add user to the user/password map
+ String username = userNameAttr.getNodeValue();
+ String password = passwordAttr.getNodeValue();
+ userPassword.put(username, password);
+ // Add the user to the node
+ User user = new User(userNameAttr.getNodeValue());
+ users.put(username, user);
+ logger.info("Loading user " + user.getUserId());
+
+ Node roles = userAttrMap.getNamedItem(ROLES_ATTR);
+ if (roles != null) {
+ String value = roles.getNodeValue();
+ String[] roleSplit = value.split("\\s*,\\s*");
+ for (String role : roleSplit) {
+ user.addRole(role);
+ }
+ }
+
+ Node proxy = userAttrMap.getNamedItem(PROXY_ATTR);
+ if (proxy != null) {
+ String value = proxy.getNodeValue();
+ String[] proxySplit = value.split("\\s*,\\s*");
+ for (String proxyUser : proxySplit) {
+ Set<String> proxySet = proxyUserMap.get(username);
+ if (proxySet == null) {
+ proxySet = new HashSet<String>();
+ proxyUserMap.put(username, proxySet);
+ }
+
+ proxySet.add(proxyUser);
+ }
+ }
+
+ Node groups = userAttrMap.getNamedItem(GROUPS_ATTR);
+ if (groups != null) {
+ String value = groups.getNodeValue();
+ String[] groupSplit = value.split("\\s*,\\s*");
+ for (String group : groupSplit) {
+ user.addGroup(group);
+ }
+ }
+
+ Node emailAttr = userAttrMap.getNamedItem(EMAIL_ATTR);
+ if (emailAttr != null) {
+ user.setEmail(emailAttr.getNodeValue());
+ }
+ }
+
+ private void parseRoleTag(Node node, HashMap<String, Role> roles) {
+ NamedNodeMap roleAttrMap = node.getAttributes();
+ Node roleNameAttr = roleAttrMap.getNamedItem(ROLENAME_ATTR);
+ if (roleNameAttr == null) {
+ throw new RuntimeException(
+ "Error loading role. The role 'name' attribute doesn't exist");
+ }
+ Node permissionAttr = roleAttrMap.getNamedItem(ROLEPERMISSIONS_ATTR);
+ if (permissionAttr == null) {
+ throw new RuntimeException(
+ "Error loading role. The role 'permissions' attribute doesn't exist");
+ }
+
+ String roleName = roleNameAttr.getNodeValue();
+ String permissions = permissionAttr.getNodeValue();
+
+ String[] permissionSplit = permissions.split("\\s*,\\s*");
+
+ Permission perm = new Permission();
+ for (String permString : permissionSplit) {
+ try {
+ Permission.Type type = Permission.Type.valueOf(permString);
+ perm.addPermission(type);
+ } catch (IllegalArgumentException e) {
+ logger.error("Error adding type " + permString
+ + ". Permission doesn't exist.", e);
+ }
+ }
+
+ Role role = new Role(roleName, perm);
+ roles.put(roleName, role);
+ }
+
+ @Override
+ public User getUser(String username, String password)
+ throws UserManagerException {
+ if (username == null || username.trim().isEmpty()) {
+ throw new UserManagerException("Username is empty.");
+ } else if (password == null || password.trim().isEmpty()) {
+ throw new UserManagerException("Password is empty.");
+ }
+
+ // Minimize the synchronization of the get. Shouldn't matter if it
+ // doesn't exist.
+ String foundPassword = null;
+ User user = null;
+ synchronized (this) {
+ foundPassword = userPassword.get(username);
+ if (foundPassword != null) {
+ user = users.get(username);
+ }
+ }
+
+ if (foundPassword == null || !foundPassword.equals(password)) {
+ throw new UserManagerException("Username/Password not found.");
+ }
+ // Once it gets to this point, no exception has been thrown. User
+ // shoudn't be
+ // null, but adding this check for if user and user/password hash tables
+ // go
+ // out of sync.
+ if (user == null) {
+ throw new UserManagerException("Internal error: User not found.");
+ }
+
+ // Add all the roles the group has to the user
+ resolveGroupRoles(user);
+ user.setPermissions(new UserPermissions() {
+ @Override
+ public boolean hasPermission(String permission) {
+ return true;
+ }
+
+ @Override
+ public void addPermission(String permission) {
+ }
+ });
+ return user;
+ }
+
+ private void resolveGroupRoles(User user) {
+ for (String group : user.getGroups()) {
+ Set<String> groupRoleSet = groupRoles.get(group);
+ if (groupRoleSet != null) {
+ for (String role : groupRoleSet) {
+ user.addRole(role);
+ }
+ }
+ }
+ }
+
+ private void parseGroupRoleTag(Node node,
+ HashMap<String, Set<String>> groupRoles) {
+ NamedNodeMap groupAttrMap = node.getAttributes();
+ Node groupNameAttr = groupAttrMap.getNamedItem(GROUPNAME_ATTR);
+ if (groupNameAttr == null) {
+ throw new RuntimeException(
+ "Error loading role. The role 'name' attribute doesn't exist");
+ }
+
+ String groupName = groupNameAttr.getNodeValue();
+ Set<String> roleSet = new HashSet<String>();
+
+ Node roles = groupAttrMap.getNamedItem(ROLES_ATTR);
+ if (roles != null) {
+ String value = roles.getNodeValue();
+ String[] roleSplit = value.split("\\s*,\\s*");
+ for (String role : roleSplit) {
+ roleSet.add(role);
+ }
+ }
+
+ groupRoles.put(groupName, roleSet);
+ logger.info("Group roles " + groupName + " added.");
+ }
+
+ @Override
+ public boolean validateUser(String username) {
+ return users.containsKey(username);
+ }
+
+ @Override
+ public Role getRole(String roleName) {
+ return roles.get(roleName);
+ }
+
+ @Override
+ public boolean validateGroup(String group) {
+ // Return true. Validation should be added when groups are added to the xml.
+ return true;
+ }
+
+ @Override
+ public boolean validateProxyUser(String proxyUser, User realUser) {
+ if (proxyUserMap.containsKey(realUser.getUserId())
+ && proxyUserMap.get(realUser.getUserId()).contains(proxyUser)) {
+ return true;
+ } else {
+ return false;
+ }
+ }
}
src/main/java/azkaban/utils/AbstractMailer.java 141(+73 -68)
diff --git a/src/main/java/azkaban/utils/AbstractMailer.java b/src/main/java/azkaban/utils/AbstractMailer.java
index 8d91456..01136cd 100644
--- a/src/main/java/azkaban/utils/AbstractMailer.java
+++ b/src/main/java/azkaban/utils/AbstractMailer.java
@@ -19,72 +19,77 @@ package azkaban.utils;
import java.util.Collection;
public class AbstractMailer {
- private String clientHostname;
- private int clientPort;
- private boolean usesSSL;
-
- private String mailHost;
- private String mailUser;
- private String mailPassword;
- private String mailSender;
- private String azkabanName;
-
- private String referenceURL;
-
- public AbstractMailer(Props props) {
- this.azkabanName = props.getString("azkaban.name", "azkaban");
- this.mailHost = props.getString("mail.host", "localhost");
- this.mailUser = props.getString("mail.user", "");
- this.mailPassword = props.getString("mail.password", "");
- this.mailSender = props.getString("mail.sender", "");
-
- this.clientHostname = props.get("server.hostname");
- this.clientPort = props.getInt("server.port");
- this.usesSSL = props.getBoolean("server.useSSL");
-
- if (usesSSL) {
- referenceURL = "https://" + clientHostname + (clientPort==443 ? "/" : ":" + clientPort + "/");
- }
- else {
- referenceURL = "http://" + clientHostname + (clientPort==80 ? "/" : ":" + clientPort + "/");
- }
- }
-
- public String getReferenceURL() {
- return referenceURL;
- }
-
- protected EmailMessage createEmailMessage(String subject, String mimetype, Collection<String> emailList) {
- EmailMessage message = new EmailMessage(mailHost, mailUser, mailPassword);
- message.setFromAddress(mailSender);
- message.addAllToAddress(emailList);
- message.setMimeType(mimetype);
- message.setSubject(subject);
-
- return message;
- }
-
- public EmailMessage prepareEmailMessage(String subject, String mimetype, Collection<String> emailList) {
- return createEmailMessage(subject, mimetype, emailList);
- }
-
- public String getAzkabanName() {
- return azkabanName;
- }
-
- public String getMailHost() {
- return mailHost;
- }
-
- public String getMailUser() {
- return mailUser;
- }
-
- public String getMailPassword() {
- return mailPassword;
- }
-
- public String getMailSender() {
- return mailSender;
- }
+ private String clientHostname;
+ private int clientPort;
+ private boolean usesSSL;
+
+ private String mailHost;
+ private String mailUser;
+ private String mailPassword;
+ private String mailSender;
+ private String azkabanName;
+
+ private String referenceURL;
+
+ public AbstractMailer(Props props) {
+ this.azkabanName = props.getString("azkaban.name", "azkaban");
+ this.mailHost = props.getString("mail.host", "localhost");
+ this.mailUser = props.getString("mail.user", "");
+ this.mailPassword = props.getString("mail.password", "");
+ this.mailSender = props.getString("mail.sender", "");
+
+ this.clientHostname = props.get("server.hostname");
+ this.clientPort = props.getInt("server.port");
+ this.usesSSL = props.getBoolean("server.useSSL");
+
+ if (usesSSL) {
+ referenceURL =
+ "https://" + clientHostname
+ + (clientPort == 443 ? "/" : ":" + clientPort + "/");
+ } else {
+ referenceURL =
+ "http://" + clientHostname
+ + (clientPort == 80 ? "/" : ":" + clientPort + "/");
+ }
+ }
+
+ public String getReferenceURL() {
+ return referenceURL;
+ }
+
+ protected EmailMessage createEmailMessage(String subject, String mimetype,
+ Collection<String> emailList) {
+ EmailMessage message = new EmailMessage(mailHost, mailUser, mailPassword);
+ message.setFromAddress(mailSender);
+ message.addAllToAddress(emailList);
+ message.setMimeType(mimetype);
+ message.setSubject(subject);
+
+ return message;
+ }
+
+ public EmailMessage prepareEmailMessage(String subject, String mimetype,
+ Collection<String> emailList) {
+ return createEmailMessage(subject, mimetype, emailList);
+ }
+
+ public String getAzkabanName() {
+ return azkabanName;
+ }
+
+ public String getMailHost() {
+ return mailHost;
+ }
+
+ public String getMailUser() {
+ return mailUser;
+ }
+
+ public String getMailPassword() {
+ return mailPassword;
+ }
+
+ public String getMailSender() {
+ return mailSender;
+ }
}
src/main/java/azkaban/utils/cache/Cache.java 336(+169 -167)
diff --git a/src/main/java/azkaban/utils/cache/Cache.java b/src/main/java/azkaban/utils/cache/Cache.java
index 5ba5e99..d08bf38 100644
--- a/src/main/java/azkaban/utils/cache/Cache.java
+++ b/src/main/java/azkaban/utils/cache/Cache.java
@@ -21,171 +21,173 @@ import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
public class Cache {
- private long nextUpdateTime = 0;
- private long updateFrequency = 1 * 60 * 1000;
- private int maxCacheSize = -1;
-
- private long expireTimeToLive = -1; // Never expires
- private long expireTimeToIdle = -1;
-
- private EjectionPolicy ejectionPolicy = EjectionPolicy.LRU;
- private CacheManager manager = null;
-
- private Map<Object, Element<?>> elementMap = new ConcurrentHashMap<Object, Element<?>>();
-
- public enum EjectionPolicy {
- LRU, FIFO
- }
-
- /* package */Cache(CacheManager manager) {
- this.manager = manager;
- }
-
- @SuppressWarnings("unchecked")
- public <T> T get(Object key) {
- Element<?> element = elementMap.get(key);
- if (element == null) {
- return null;
- }
- return (T) element.getElement();
- }
-
- public <T> void put(Object key, T item) {
- Element<T> elem = new Element<T>(key, item);
- elementMap.put(key, elem);
- }
-
- public boolean remove(Object key) {
- Element<?> elem = elementMap.remove(key);
- if (elem == null) {
- return false;
- }
-
- return true;
- }
-
- public Cache setMaxCacheSize(int size) {
- maxCacheSize = size;
- return this;
- }
-
- public Cache setEjectionPolicy(EjectionPolicy policy) {
- ejectionPolicy = policy;
- return this;
- }
-
- public Cache setUpdateFrequencyMs(long updateFrequencyMs) {
- this.updateFrequency = updateFrequencyMs;
- return this;
- }
-
- public Cache setExpiryTimeToLiveMs(long time) {
- this.expireTimeToLive = time;
- if (time > 0) {
- manager.update();
- }
-
- return this;
- }
-
- public Cache setExpiryIdleTimeMs(long time) {
- this.expireTimeToIdle = time;
- if (time > 0) {
- manager.update();
- }
- return this;
- }
-
- public int getSize() {
- return elementMap.size();
- }
-
- public long getExpireTimeToLive() {
- return expireTimeToLive;
- }
-
- public long getExpireTimeToIdle() {
- return expireTimeToIdle;
- }
-
- public synchronized <T> void insertElement(Object key, T item) {
- if (maxCacheSize < 0 || elementMap.size() < maxCacheSize) {
- Element<T> elem = new Element<T>(key, item);
- elementMap.put(key, elem);
- } else {
- internalExpireCache();
-
- Element<T> elem = new Element<T>(key, item);
- if (elementMap.size() < maxCacheSize) {
- elementMap.put(key, elem);
- } else {
- Element<?> element = getNextExpiryElement();
- if (element != null) {
- elementMap.remove(element.getKey());
- }
-
- elementMap.put(key, elem);
- }
- }
- }
-
- private Element<?> getNextExpiryElement() {
- if (ejectionPolicy == EjectionPolicy.LRU) {
- long latestAccessTime = Long.MAX_VALUE;
- Element<?> ejectionCandidate = null;
- for (Element<?> elem : elementMap.values()) {
- if (latestAccessTime > elem.getLastUpdateTime()) {
- latestAccessTime = elem.getLastUpdateTime();
- ejectionCandidate = elem;
- }
- }
-
- return ejectionCandidate;
- } else if (ejectionPolicy == EjectionPolicy.FIFO) {
- long earliestCreateTime = Long.MAX_VALUE;
- Element<?> ejectionCandidate = null;
- for (Element<?> elem : elementMap.values()) {
- if (earliestCreateTime > elem.getCreationTime()) {
- earliestCreateTime = elem.getCreationTime();
- ejectionCandidate = elem;
- }
- }
- return ejectionCandidate;
- }
-
- return null;
- }
-
- public synchronized void expireCache() {
- long currentTime = System.currentTimeMillis();
- if (nextUpdateTime < currentTime) {
- internalExpireCache();
- nextUpdateTime = currentTime + updateFrequency;
- }
- }
-
- private synchronized void internalExpireCache() {
- ArrayList<Element<?>> elems = new ArrayList<Element<?>>(elementMap.values());
-
- for (Element<?> elem : elems) {
- if (shouldExpire(elem)) {
- elementMap.remove(elem.getKey());
- }
- }
- }
-
- private boolean shouldExpire(Element<?> elem) {
- if (expireTimeToLive > -1) {
- if (System.currentTimeMillis() - elem.getCreationTime() > expireTimeToLive) {
- return true;
- }
- }
- if (expireTimeToIdle > -1) {
- if (System.currentTimeMillis() - elem.getLastUpdateTime() > expireTimeToIdle) {
- return true;
- }
- }
-
- return false;
- }
+ private long nextUpdateTime = 0;
+ private long updateFrequency = 1 * 60 * 1000;
+ private int maxCacheSize = -1;
+
+ private long expireTimeToLive = -1; // Never expires
+ private long expireTimeToIdle = -1;
+
+ private EjectionPolicy ejectionPolicy = EjectionPolicy.LRU;
+ private CacheManager manager = null;
+
+ private Map<Object, Element<?>> elementMap =
+ new ConcurrentHashMap<Object, Element<?>>();
+
+ public enum EjectionPolicy {
+ LRU, FIFO
+ }
+
+ /* package */Cache(CacheManager manager) {
+ this.manager = manager;
+ }
+
+ @SuppressWarnings("unchecked")
+ public <T> T get(Object key) {
+ Element<?> element = elementMap.get(key);
+ if (element == null) {
+ return null;
+ }
+ return (T) element.getElement();
+ }
+
+ public <T> void put(Object key, T item) {
+ Element<T> elem = new Element<T>(key, item);
+ elementMap.put(key, elem);
+ }
+
+ public boolean remove(Object key) {
+ Element<?> elem = elementMap.remove(key);
+ if (elem == null) {
+ return false;
+ }
+
+ return true;
+ }
+
+ public Cache setMaxCacheSize(int size) {
+ maxCacheSize = size;
+ return this;
+ }
+
+ public Cache setEjectionPolicy(EjectionPolicy policy) {
+ ejectionPolicy = policy;
+ return this;
+ }
+
+ public Cache setUpdateFrequencyMs(long updateFrequencyMs) {
+ this.updateFrequency = updateFrequencyMs;
+ return this;
+ }
+
+ public Cache setExpiryTimeToLiveMs(long time) {
+ this.expireTimeToLive = time;
+ if (time > 0) {
+ manager.update();
+ }
+
+ return this;
+ }
+
+ public Cache setExpiryIdleTimeMs(long time) {
+ this.expireTimeToIdle = time;
+ if (time > 0) {
+ manager.update();
+ }
+ return this;
+ }
+
+ public int getSize() {
+ return elementMap.size();
+ }
+
+ public long getExpireTimeToLive() {
+ return expireTimeToLive;
+ }
+
+ public long getExpireTimeToIdle() {
+ return expireTimeToIdle;
+ }
+
+ public synchronized <T> void insertElement(Object key, T item) {
+ if (maxCacheSize < 0 || elementMap.size() < maxCacheSize) {
+ Element<T> elem = new Element<T>(key, item);
+ elementMap.put(key, elem);
+ } else {
+ internalExpireCache();
+
+ Element<T> elem = new Element<T>(key, item);
+ if (elementMap.size() < maxCacheSize) {
+ elementMap.put(key, elem);
+ } else {
+ Element<?> element = getNextExpiryElement();
+ if (element != null) {
+ elementMap.remove(element.getKey());
+ }
+
+ elementMap.put(key, elem);
+ }
+ }
+ }
+
+ private Element<?> getNextExpiryElement() {
+ if (ejectionPolicy == EjectionPolicy.LRU) {
+ long latestAccessTime = Long.MAX_VALUE;
+ Element<?> ejectionCandidate = null;
+ for (Element<?> elem : elementMap.values()) {
+ if (latestAccessTime > elem.getLastUpdateTime()) {
+ latestAccessTime = elem.getLastUpdateTime();
+ ejectionCandidate = elem;
+ }
+ }
+
+ return ejectionCandidate;
+ } else if (ejectionPolicy == EjectionPolicy.FIFO) {
+ long earliestCreateTime = Long.MAX_VALUE;
+ Element<?> ejectionCandidate = null;
+ for (Element<?> elem : elementMap.values()) {
+ if (earliestCreateTime > elem.getCreationTime()) {
+ earliestCreateTime = elem.getCreationTime();
+ ejectionCandidate = elem;
+ }
+ }
+ return ejectionCandidate;
+ }
+
+ return null;
+ }
+
+ public synchronized void expireCache() {
+ long currentTime = System.currentTimeMillis();
+ if (nextUpdateTime < currentTime) {
+ internalExpireCache();
+ nextUpdateTime = currentTime + updateFrequency;
+ }
+ }
+
+ private synchronized void internalExpireCache() {
+ ArrayList<Element<?>> elems =
+ new ArrayList<Element<?>>(elementMap.values());
+
+ for (Element<?> elem : elems) {
+ if (shouldExpire(elem)) {
+ elementMap.remove(elem.getKey());
+ }
+ }
+ }
+
+ private boolean shouldExpire(Element<?> elem) {
+ if (expireTimeToLive > -1) {
+ if (System.currentTimeMillis() - elem.getCreationTime() > expireTimeToLive) {
+ return true;
+ }
+ }
+ if (expireTimeToIdle > -1) {
+ if (System.currentTimeMillis() - elem.getLastUpdateTime() > expireTimeToIdle) {
+ return true;
+ }
+ }
+
+ return false;
+ }
}
src/main/java/azkaban/utils/cache/CacheManager.java 218(+109 -109)
diff --git a/src/main/java/azkaban/utils/cache/CacheManager.java b/src/main/java/azkaban/utils/cache/CacheManager.java
index 139af88..724eaee 100644
--- a/src/main/java/azkaban/utils/cache/CacheManager.java
+++ b/src/main/java/azkaban/utils/cache/CacheManager.java
@@ -20,113 +20,113 @@ import java.util.HashSet;
import java.util.Set;
public class CacheManager {
- // Thread that expires caches at
- private static final long UPDATE_FREQUENCY = 30000; // Every 30 sec by default.
-
- private long updateFrequency = UPDATE_FREQUENCY;
- private Set<Cache> caches;
- private static CacheManager manager = null;
- private final CacheManagerThread updaterThread;
-
- private boolean activeExpiry = false;
-
- public static CacheManager getInstance() {
- if (manager == null) {
- manager = new CacheManager();
- }
-
- return manager;
- }
-
- private CacheManager() {
- updaterThread = new CacheManagerThread();
- caches = new HashSet<Cache>();
-
- updaterThread.start();
- }
-
- public static void setUpdateFrequency(long updateFreqMs) {
- manager.internalUpdateFrequency(updateFreqMs);
- }
-
- public static void shutdown() {
- manager.internalShutdown();
- }
-
- public Cache createCache() {
- Cache cache = new Cache(manager);
- manager.internalAddCache(cache);
- return cache;
- }
-
- public void removeCache(Cache cache) {
- manager.internalRemoveCache(cache);
- }
-
- private void internalUpdateFrequency(long updateFreq) {
- updateFrequency = updateFreq;
- updaterThread.interrupt();
- }
-
- private void internalAddCache(Cache cache) {
- caches.add(cache);
- updaterThread.interrupt();
- }
-
- private void internalRemoveCache(Cache cache) {
- caches.remove(cache);
- }
-
- private synchronized void internalShutdown() {
- updaterThread.shutdown();
- }
-
- /* package */synchronized void update() {
- boolean activeExpiry = false;
- for (Cache cache : caches) {
- if (cache.getExpireTimeToIdle() > 0
- || cache.getExpireTimeToLive() > 0) {
- activeExpiry = true;
- break;
- }
- }
-
- if (this.activeExpiry != activeExpiry && activeExpiry) {
- this.activeExpiry = activeExpiry;
- updaterThread.interrupt();
- }
- }
-
- private class CacheManagerThread extends Thread {
- private boolean shutdown = false;
-
- public void run() {
- while (!shutdown) {
- if (activeExpiry) {
- for (Cache cache : caches) {
- cache.expireCache();
- }
-
- synchronized (this) {
- try {
- wait(updateFrequency);
- } catch (InterruptedException e) {
- }
- }
- } else {
- synchronized (this) {
- try {
- wait();
- } catch (InterruptedException e) {
- }
- }
- }
- }
- }
-
- public void shutdown() {
- this.shutdown = true;
- updaterThread.interrupt();
- }
- }
+ // Thread that expires caches at
+ // Every 30 sec by default.
+ private static final long UPDATE_FREQUENCY = 30000;
+
+ private long updateFrequency = UPDATE_FREQUENCY;
+ private Set<Cache> caches;
+ private static CacheManager manager = null;
+ private final CacheManagerThread updaterThread;
+
+ private boolean activeExpiry = false;
+
+ public static CacheManager getInstance() {
+ if (manager == null) {
+ manager = new CacheManager();
+ }
+
+ return manager;
+ }
+
+ private CacheManager() {
+ updaterThread = new CacheManagerThread();
+ caches = new HashSet<Cache>();
+
+ updaterThread.start();
+ }
+
+ public static void setUpdateFrequency(long updateFreqMs) {
+ manager.internalUpdateFrequency(updateFreqMs);
+ }
+
+ public static void shutdown() {
+ manager.internalShutdown();
+ }
+
+ public Cache createCache() {
+ Cache cache = new Cache(manager);
+ manager.internalAddCache(cache);
+ return cache;
+ }
+
+ public void removeCache(Cache cache) {
+ manager.internalRemoveCache(cache);
+ }
+
+ private void internalUpdateFrequency(long updateFreq) {
+ updateFrequency = updateFreq;
+ updaterThread.interrupt();
+ }
+
+ private void internalAddCache(Cache cache) {
+ caches.add(cache);
+ updaterThread.interrupt();
+ }
+
+ private void internalRemoveCache(Cache cache) {
+ caches.remove(cache);
+ }
+
+ private synchronized void internalShutdown() {
+ updaterThread.shutdown();
+ }
+
+ /* package */synchronized void update() {
+ boolean activeExpiry = false;
+ for (Cache cache : caches) {
+ if (cache.getExpireTimeToIdle() > 0 || cache.getExpireTimeToLive() > 0) {
+ activeExpiry = true;
+ break;
+ }
+ }
+
+ if (this.activeExpiry != activeExpiry && activeExpiry) {
+ this.activeExpiry = activeExpiry;
+ updaterThread.interrupt();
+ }
+ }
+
+ private class CacheManagerThread extends Thread {
+ private boolean shutdown = false;
+
+ public void run() {
+ while (!shutdown) {
+ if (activeExpiry) {
+ for (Cache cache : caches) {
+ cache.expireCache();
+ }
+
+ synchronized (this) {
+ try {
+ wait(updateFrequency);
+ } catch (InterruptedException e) {
+ }
+ }
+ } else {
+ synchronized (this) {
+ try {
+ wait();
+ } catch (InterruptedException e) {
+ }
+ }
+ }
+ }
+ }
+
+ public void shutdown() {
+ this.shutdown = true;
+ updaterThread.interrupt();
+ }
+ }
}
src/main/java/azkaban/utils/cache/Element.java 46(+23 -23)
diff --git a/src/main/java/azkaban/utils/cache/Element.java b/src/main/java/azkaban/utils/cache/Element.java
index e064ac7..e1eb69d 100644
--- a/src/main/java/azkaban/utils/cache/Element.java
+++ b/src/main/java/azkaban/utils/cache/Element.java
@@ -17,32 +17,32 @@
package azkaban.utils.cache;
public class Element<T> {
- private Object key;
- private T element;
- private long creationTime = 0;
- private long lastAccessTime = 0;
+ private Object key;
+ private T element;
+ private long creationTime = 0;
+ private long lastAccessTime = 0;
- public Element(Object key, T element) {
- this.key = key;
- creationTime = System.currentTimeMillis();
- lastAccessTime = creationTime;
- this.element = element;
- }
+ public Element(Object key, T element) {
+ this.key = key;
+ creationTime = System.currentTimeMillis();
+ lastAccessTime = creationTime;
+ this.element = element;
+ }
- public Object getKey() {
- return key;
- }
+ public Object getKey() {
+ return key;
+ }
- public T getElement() {
- lastAccessTime = System.currentTimeMillis();
- return element;
- }
+ public T getElement() {
+ lastAccessTime = System.currentTimeMillis();
+ return element;
+ }
- public long getCreationTime() {
- return creationTime;
- }
+ public long getCreationTime() {
+ return creationTime;
+ }
- public long getLastUpdateTime() {
- return lastAccessTime;
- }
+ public long getLastUpdateTime() {
+ return lastAccessTime;
+ }
}
src/main/java/azkaban/utils/CircularBuffer.java 85(+43 -42)
diff --git a/src/main/java/azkaban/utils/CircularBuffer.java b/src/main/java/azkaban/utils/CircularBuffer.java
index c8e1934..39e30c9 100644
--- a/src/main/java/azkaban/utils/CircularBuffer.java
+++ b/src/main/java/azkaban/utils/CircularBuffer.java
@@ -24,50 +24,51 @@ import com.google.common.base.Joiner;
import com.google.common.collect.Iterators;
/**
- * A circular buffer of items of a given length. It will grow up to the give size as items are appended, then
- * it will begin to overwrite older items.
- *
+ * A circular buffer of items of a given length. It will grow up to the give
+ * size as items are appended, then it will begin to overwrite older items.
+ *
* @param <T> The type of the item contained.
*/
public class CircularBuffer<T> implements Iterable<T> {
-
- private final List<T> lines;
- private final int size;
- private int start;
-
- public CircularBuffer(int size) {
- this.lines = new ArrayList<T>();
- this.size = size;
- this.start = 0;
- }
-
- public void append(T line) {
- if(lines.size() < size) {
- lines.add(line);
- } else {
- lines.set(start, line);
- start = (start + 1) % size;
- }
- }
-
- @Override
- public String toString() {
- return "[" + Joiner.on(", ").join(lines) + "]";
- }
- public Iterator<T> iterator() {
- if(start == 0)
- return lines.iterator();
- else
- return Iterators.concat(lines.subList(start, lines.size()).iterator(), lines.subList(0, start).iterator());
- }
-
- public int getMaxSize() {
- return this.size;
- }
-
- public int getSize() {
- return this.lines.size();
- }
-
+ private final List<T> lines;
+ private final int size;
+ private int start;
+
+ public CircularBuffer(int size) {
+ this.lines = new ArrayList<T>();
+ this.size = size;
+ this.start = 0;
+ }
+
+ public void append(T line) {
+ if (lines.size() < size) {
+ lines.add(line);
+ } else {
+ lines.set(start, line);
+ start = (start + 1) % size;
+ }
+ }
+
+ @Override
+ public String toString() {
+ return "[" + Joiner.on(", ").join(lines) + "]";
+ }
+
+ public Iterator<T> iterator() {
+ if (start == 0)
+ return lines.iterator();
+ else
+ return Iterators.concat(lines.subList(start, lines.size()).iterator(),
+ lines.subList(0, start).iterator());
+ }
+
+ public int getMaxSize() {
+ return this.size;
+ }
+
+ public int getSize() {
+ return this.lines.size();
+ }
+
}
src/main/java/azkaban/utils/DirectoryFlowLoader.java 706(+358 -348)
diff --git a/src/main/java/azkaban/utils/DirectoryFlowLoader.java b/src/main/java/azkaban/utils/DirectoryFlowLoader.java
index 2bcd069..86bfd07 100644
--- a/src/main/java/azkaban/utils/DirectoryFlowLoader.java
+++ b/src/main/java/azkaban/utils/DirectoryFlowLoader.java
@@ -38,352 +38,362 @@ import azkaban.flow.Node;
import azkaban.flow.SpecialJobTypes;
public class DirectoryFlowLoader {
- private static final DirFilter DIR_FILTER = new DirFilter();
- private static final String PROPERTY_SUFFIX = ".properties";
- private static final String JOB_SUFFIX = ".job";
-
- private final Logger logger;
- private HashSet<String> rootNodes;
- private HashMap<String, Flow> flowMap;
- private HashMap<String, Node> nodeMap;
- private HashMap<String, Map<String, Edge>> nodeDependencies;
- private HashMap<String, Props> jobPropsMap;
-
- // Flow dependencies for embedded flows.
- private HashMap<String, Set<String>> flowDependencies;
-
- private ArrayList<FlowProps> flowPropsList;
- private ArrayList<Props> propsList;
- private Set<String> errors;
- private Set<String> duplicateJobs;
-
- public DirectoryFlowLoader(Logger logger) {
- this.logger = logger;
- }
-
- public Map<String, Flow> getFlowMap() {
- return flowMap;
- }
-
- public Set<String> getErrors() {
- return errors;
- }
-
- public Map<String, Props> getJobProps() {
- return jobPropsMap;
- }
-
- public List<Props> getProps() {
- return propsList;
- }
-
- public void loadProjectFlow(File baseDirectory) {
- propsList = new ArrayList<Props>();
- flowPropsList = new ArrayList<FlowProps>();
- jobPropsMap = new HashMap<String, Props>();
- nodeMap = new HashMap<String, Node>();
- flowMap = new HashMap<String, Flow>();
- errors = new HashSet<String>();
- duplicateJobs = new HashSet<String>();
- nodeDependencies = new HashMap<String, Map<String, Edge>>();
- rootNodes = new HashSet<String>();
- flowDependencies = new HashMap<String, Set<String>>();
-
- // Load all the props files and create the Node objects
- loadProjectFromDir(baseDirectory.getPath(), baseDirectory, null);
-
- // Create edges and find missing dependencies
- resolveDependencies();
-
- // Create the flows.
- buildFlowsFromDependencies();
-
- // Resolve embedded flows
- resolveEmbeddedFlows();
- }
-
- private void loadProjectFromDir(String base, File dir, Props parent) {
- File[] propertyFiles = dir.listFiles(new SuffixFilter(PROPERTY_SUFFIX));
- Arrays.sort(propertyFiles);
-
- for (File file: propertyFiles) {
- String relative = getRelativeFilePath(base, file.getPath());
- try {
- parent = new Props(parent, file);
- parent.setSource(relative);
-
- FlowProps flowProps = new FlowProps(parent);
- flowPropsList.add(flowProps);
- } catch (IOException e) {
- errors.add("Error loading properties " + file.getName() + ":" + e.getMessage());
- }
-
- logger.info("Adding " + relative);
- propsList.add(parent);
- }
-
- // Load all Job files. If there's a duplicate name, then we don't load
- File[] jobFiles = dir.listFiles(new SuffixFilter(JOB_SUFFIX));
- for (File file: jobFiles) {
- String jobName = getNameWithoutExtension(file);
- try {
- if (!duplicateJobs.contains(jobName)) {
- if (jobPropsMap.containsKey(jobName)) {
- errors.add("Duplicate job names found '" + jobName + "'.");
- duplicateJobs.add(jobName);
- jobPropsMap.remove(jobName);
- nodeMap.remove(jobName);
- }
- else {
- Props prop = new Props(parent, file);
- String relative = getRelativeFilePath(base, file.getPath());
- prop.setSource(relative);
-
- Node node = new Node(jobName);
- String type = prop.getString("type", null);
- if(type == null) {
- errors.add("Job doesn't have type set '" + jobName + "'.");
- }
-
- node.setType(type);
-
- node.setJobSource(relative);
- if (parent != null) {
- node.setPropsSource(parent.getSource());
- }
-
- // Force root node
- if(prop.getBoolean(CommonJobProperties.ROOT_NODE, false)) {
- rootNodes.add(jobName);
- }
-
- jobPropsMap.put(jobName, prop);
- nodeMap.put(jobName, node);
- }
- }
- } catch (IOException e) {
- errors.add("Error loading job file " + file.getName() + ":" + e.getMessage());
- }
- }
-
- File[] subDirs = dir.listFiles(DIR_FILTER);
- for (File file: subDirs) {
- loadProjectFromDir(base, file, parent);
- }
- }
-
- private void resolveEmbeddedFlows() {
- for (String flowId: flowDependencies.keySet()) {
- HashSet<String> visited = new HashSet<String>();
- resolveEmbeddedFlow(flowId, visited);
- }
- }
-
- private void resolveEmbeddedFlow(String flowId, Set<String> visited) {
- Set<String> embeddedFlow = flowDependencies.get(flowId);
- if (embeddedFlow == null) {
- return;
- }
-
- visited.add(flowId);
- for (String embeddedFlowId: embeddedFlow) {
- if (visited.contains(embeddedFlowId)) {
- errors.add("Embedded flow cycle found in " + flowId + "->" + embeddedFlowId);
- return;
- }
- else if (!flowMap.containsKey(embeddedFlowId)) {
- errors.add("Flow " + flowId + " depends on " + embeddedFlowId + " but can't be found.");
- return;
- }
- else {
- resolveEmbeddedFlow(embeddedFlowId, visited);
- }
- }
-
- visited.remove(flowId);
- }
-
- private void resolveDependencies() {
- // Add all the in edges and out edges. Catch bad dependencies and self referrals. Also collect list of nodes who are parents.
- for (Node node: nodeMap.values()) {
- Props props = jobPropsMap.get(node.getId());
-
- if (props == null) {
- logger.error("Job props not found!! For some reason.");
- continue;
- }
-
- List<String> dependencyList = props.getStringList(CommonJobProperties.DEPENDENCIES, (List<String>)null);
-
- if (dependencyList != null) {
- Map<String, Edge> dependencies = nodeDependencies.get(node.getId());
- if (dependencies == null) {
- dependencies = new HashMap<String, Edge>();
-
- for (String dependencyName : dependencyList) {
- dependencyName = dependencyName == null ? null : dependencyName.trim();
- if (dependencyName == null || dependencyName.isEmpty()) {
- continue;
- }
-
- Edge edge = new Edge(dependencyName, node.getId());
- Node dependencyNode = nodeMap.get(dependencyName);
- if (dependencyNode == null) {
- if (duplicateJobs.contains(dependencyName)) {
- edge.setError("Ambiguous Dependency. Duplicates found.");
- dependencies.put(dependencyName, edge);
- errors.add(node.getId() + " has ambiguous dependency " + dependencyName);
- }
- else {
- edge.setError("Dependency not found.");
- dependencies.put(dependencyName, edge);
- errors.add(node.getId() + " cannot find dependency " + dependencyName);
- }
- }
- else if (dependencyNode == node) {
- // We have a self cycle
- edge.setError("Self cycle found.");
- dependencies.put(dependencyName, edge);
- errors.add(node.getId() + " has a self cycle");
- }
- else {
- dependencies.put(dependencyName, edge);
- }
- }
-
- if (!dependencies.isEmpty()) {
- nodeDependencies.put(node.getId(), dependencies);
- }
- }
- }
- }
- }
-
- private void buildFlowsFromDependencies() {
- // Find all root nodes by finding ones without dependents.
- HashSet<String> nonRootNodes = new HashSet<String>();
- for (Map<String, Edge> edges: nodeDependencies.values()) {
- for (String sourceId: edges.keySet()) {
- nonRootNodes.add(sourceId);
- }
- }
-
- // Now create flows. Bad flows are marked invalid
- Set<String> visitedNodes = new HashSet<String>();
- for (Node base: nodeMap.values()) {
- // Root nodes can be discovered when parsing jobs
- if (rootNodes.contains(base.getId()) || !nonRootNodes.contains(base.getId())) {
- rootNodes.add(base.getId());
- Flow flow = new Flow(base.getId());
- Props jobProp = jobPropsMap.get(base.getId());
-
- // Dedup with sets
- @SuppressWarnings("unchecked")
- List<String> successEmailList = jobProp.getStringList(CommonJobProperties.SUCCESS_EMAILS, Collections.EMPTY_LIST);
- Set<String> successEmail = new HashSet<String>();
- for (String email: successEmailList) {
- successEmail.add(email.toLowerCase());
- }
-
- @SuppressWarnings("unchecked")
- List<String> failureEmailList = jobProp.getStringList(CommonJobProperties.FAILURE_EMAILS, Collections.EMPTY_LIST);
- Set<String> failureEmail = new HashSet<String>();
- for (String email: failureEmailList) {
- failureEmail.add(email.toLowerCase());
- }
-
- @SuppressWarnings("unchecked")
- List<String> notifyEmailList = jobProp.getStringList(CommonJobProperties.NOTIFY_EMAILS, Collections.EMPTY_LIST);
- for (String email: notifyEmailList) {
- email = email.toLowerCase();
- successEmail.add(email);
- failureEmail.add(email);
- }
-
- flow.addFailureEmails(failureEmail);
- flow.addSuccessEmails(successEmail);
-
- flow.addAllFlowProperties(flowPropsList);
- constructFlow(flow, base, visitedNodes);
- flow.initialize();
- flowMap.put(base.getId(), flow);
- }
- }
- }
-
- private void constructFlow(Flow flow, Node node, Set<String> visited) {
- visited.add(node.getId());
-
- flow.addNode(node);
- if (SpecialJobTypes.EMBEDDED_FLOW_TYPE.equals(node.getType())) {
- Props props = jobPropsMap.get(node.getId());
- String embeddedFlow = props.get(SpecialJobTypes.FLOW_NAME);
-
- Set<String> embeddedFlows = flowDependencies.get(flow.getId());
- if (embeddedFlows == null) {
- embeddedFlows = new HashSet<String>();
- flowDependencies.put(flow.getId(), embeddedFlows);
- }
-
- node.setEmbeddedFlowId(embeddedFlow);
- embeddedFlows.add(embeddedFlow);
- }
- Map<String, Edge> dependencies = nodeDependencies.get(node.getId());
-
- if (dependencies != null) {
- for (Edge edge: dependencies.values()) {
- if (edge.hasError()) {
- flow.addEdge(edge);
- }
- else if (visited.contains(edge.getSourceId())){
- // We have a cycle. We set it as an error edge
- edge = new Edge(edge.getSourceId(), node.getId());
- edge.setError("Cyclical dependencies found.");
- errors.add("Cyclical dependency found at " + edge.getId());
- flow.addEdge(edge);
- }
- else {
- // This should not be null
- flow.addEdge(edge);
- Node sourceNode = nodeMap.get(edge.getSourceId());
- constructFlow(flow, sourceNode, visited);
- }
- }
- }
-
- visited.remove(node.getId());
- }
-
- private String getNameWithoutExtension(File file) {
- String filename = file.getName();
- int index = filename.lastIndexOf('.');
-
- return index < 0 ? filename : filename.substring(0, index);
- }
-
- private String getRelativeFilePath(String basePath, String filePath) {
- return filePath.substring(basePath.length() + 1);
- }
-
- private static class DirFilter implements FileFilter {
- @Override
- public boolean accept(File pathname) {
- return pathname.isDirectory();
- }
- }
-
- private static class SuffixFilter implements FileFilter {
- private String suffix;
-
- public SuffixFilter(String suffix) {
- this.suffix = suffix;
- }
-
- @Override
- public boolean accept(File pathname) {
- String name = pathname.getName();
-
- return pathname.isFile() && !pathname.isHidden() && name.length() > suffix.length() && name.endsWith(suffix);
- }
- }
+ private static final DirFilter DIR_FILTER = new DirFilter();
+ private static final String PROPERTY_SUFFIX = ".properties";
+ private static final String JOB_SUFFIX = ".job";
+
+ private final Logger logger;
+ private HashSet<String> rootNodes;
+ private HashMap<String, Flow> flowMap;
+ private HashMap<String, Node> nodeMap;
+ private HashMap<String, Map<String, Edge>> nodeDependencies;
+ private HashMap<String, Props> jobPropsMap;
+
+ // Flow dependencies for embedded flows.
+ private HashMap<String, Set<String>> flowDependencies;
+
+ private ArrayList<FlowProps> flowPropsList;
+ private ArrayList<Props> propsList;
+ private Set<String> errors;
+ private Set<String> duplicateJobs;
+
+ public DirectoryFlowLoader(Logger logger) {
+ this.logger = logger;
+ }
+
+ public Map<String, Flow> getFlowMap() {
+ return flowMap;
+ }
+
+ public Set<String> getErrors() {
+ return errors;
+ }
+
+ public Map<String, Props> getJobProps() {
+ return jobPropsMap;
+ }
+
+ public List<Props> getProps() {
+ return propsList;
+ }
+
+ public void loadProjectFlow(File baseDirectory) {
+ propsList = new ArrayList<Props>();
+ flowPropsList = new ArrayList<FlowProps>();
+ jobPropsMap = new HashMap<String, Props>();
+ nodeMap = new HashMap<String, Node>();
+ flowMap = new HashMap<String, Flow>();
+ errors = new HashSet<String>();
+ duplicateJobs = new HashSet<String>();
+ nodeDependencies = new HashMap<String, Map<String, Edge>>();
+ rootNodes = new HashSet<String>();
+ flowDependencies = new HashMap<String, Set<String>>();
+
+ // Load all the props files and create the Node objects
+ loadProjectFromDir(baseDirectory.getPath(), baseDirectory, null);
+
+ // Create edges and find missing dependencies
+ resolveDependencies();
+
+ // Create the flows.
+ buildFlowsFromDependencies();
+
+ // Resolve embedded flows
+ resolveEmbeddedFlows();
+ }
+
+ private void loadProjectFromDir(String base, File dir, Props parent) {
+ File[] propertyFiles = dir.listFiles(new SuffixFilter(PROPERTY_SUFFIX));
+ Arrays.sort(propertyFiles);
+
+ for (File file : propertyFiles) {
+ String relative = getRelativeFilePath(base, file.getPath());
+ try {
+ parent = new Props(parent, file);
+ parent.setSource(relative);
+
+ FlowProps flowProps = new FlowProps(parent);
+ flowPropsList.add(flowProps);
+ } catch (IOException e) {
+ errors.add("Error loading properties " + file.getName() + ":"
+ + e.getMessage());
+ }
+
+ logger.info("Adding " + relative);
+ propsList.add(parent);
+ }
+
+ // Load all Job files. If there's a duplicate name, then we don't load
+ File[] jobFiles = dir.listFiles(new SuffixFilter(JOB_SUFFIX));
+ for (File file : jobFiles) {
+ String jobName = getNameWithoutExtension(file);
+ try {
+ if (!duplicateJobs.contains(jobName)) {
+ if (jobPropsMap.containsKey(jobName)) {
+ errors.add("Duplicate job names found '" + jobName + "'.");
+ duplicateJobs.add(jobName);
+ jobPropsMap.remove(jobName);
+ nodeMap.remove(jobName);
+ } else {
+ Props prop = new Props(parent, file);
+ String relative = getRelativeFilePath(base, file.getPath());
+ prop.setSource(relative);
+
+ Node node = new Node(jobName);
+ String type = prop.getString("type", null);
+ if (type == null) {
+ errors.add("Job doesn't have type set '" + jobName + "'.");
+ }
+
+ node.setType(type);
+
+ node.setJobSource(relative);
+ if (parent != null) {
+ node.setPropsSource(parent.getSource());
+ }
+
+ // Force root node
+ if (prop.getBoolean(CommonJobProperties.ROOT_NODE, false)) {
+ rootNodes.add(jobName);
+ }
+
+ jobPropsMap.put(jobName, prop);
+ nodeMap.put(jobName, node);
+ }
+ }
+ } catch (IOException e) {
+ errors.add("Error loading job file " + file.getName() + ":"
+ + e.getMessage());
+ }
+ }
+
+ File[] subDirs = dir.listFiles(DIR_FILTER);
+ for (File file : subDirs) {
+ loadProjectFromDir(base, file, parent);
+ }
+ }
+
+ private void resolveEmbeddedFlows() {
+ for (String flowId : flowDependencies.keySet()) {
+ HashSet<String> visited = new HashSet<String>();
+ resolveEmbeddedFlow(flowId, visited);
+ }
+ }
+
+ private void resolveEmbeddedFlow(String flowId, Set<String> visited) {
+ Set<String> embeddedFlow = flowDependencies.get(flowId);
+ if (embeddedFlow == null) {
+ return;
+ }
+
+ visited.add(flowId);
+ for (String embeddedFlowId : embeddedFlow) {
+ if (visited.contains(embeddedFlowId)) {
+ errors.add("Embedded flow cycle found in " + flowId + "->"
+ + embeddedFlowId);
+ return;
+ } else if (!flowMap.containsKey(embeddedFlowId)) {
+ errors.add("Flow " + flowId + " depends on " + embeddedFlowId
+ + " but can't be found.");
+ return;
+ } else {
+ resolveEmbeddedFlow(embeddedFlowId, visited);
+ }
+ }
+
+ visited.remove(flowId);
+ }
+
+ private void resolveDependencies() {
+ // Add all the in edges and out edges. Catch bad dependencies and self
+ // referrals. Also collect list of nodes who are parents.
+ for (Node node : nodeMap.values()) {
+ Props props = jobPropsMap.get(node.getId());
+
+ if (props == null) {
+ logger.error("Job props not found!! For some reason.");
+ continue;
+ }
+
+ List<String> dependencyList =
+ props.getStringList(CommonJobProperties.DEPENDENCIES,
+ (List<String>) null);
+
+ if (dependencyList != null) {
+ Map<String, Edge> dependencies = nodeDependencies.get(node.getId());
+ if (dependencies == null) {
+ dependencies = new HashMap<String, Edge>();
+
+ for (String dependencyName : dependencyList) {
+ dependencyName =
+ dependencyName == null ? null : dependencyName.trim();
+ if (dependencyName == null || dependencyName.isEmpty()) {
+ continue;
+ }
+
+ Edge edge = new Edge(dependencyName, node.getId());
+ Node dependencyNode = nodeMap.get(dependencyName);
+ if (dependencyNode == null) {
+ if (duplicateJobs.contains(dependencyName)) {
+ edge.setError("Ambiguous Dependency. Duplicates found.");
+ dependencies.put(dependencyName, edge);
+ errors.add(node.getId() + " has ambiguous dependency "
+ + dependencyName);
+ } else {
+ edge.setError("Dependency not found.");
+ dependencies.put(dependencyName, edge);
+ errors.add(node.getId() + " cannot find dependency "
+ + dependencyName);
+ }
+ } else if (dependencyNode == node) {
+ // We have a self cycle
+ edge.setError("Self cycle found.");
+ dependencies.put(dependencyName, edge);
+ errors.add(node.getId() + " has a self cycle");
+ } else {
+ dependencies.put(dependencyName, edge);
+ }
+ }
+
+ if (!dependencies.isEmpty()) {
+ nodeDependencies.put(node.getId(), dependencies);
+ }
+ }
+ }
+ }
+ }
+
+ private void buildFlowsFromDependencies() {
+ // Find all root nodes by finding ones without dependents.
+ HashSet<String> nonRootNodes = new HashSet<String>();
+ for (Map<String, Edge> edges : nodeDependencies.values()) {
+ for (String sourceId : edges.keySet()) {
+ nonRootNodes.add(sourceId);
+ }
+ }
+
+ // Now create flows. Bad flows are marked invalid
+ Set<String> visitedNodes = new HashSet<String>();
+ for (Node base : nodeMap.values()) {
+ // Root nodes can be discovered when parsing jobs
+ if (rootNodes.contains(base.getId())
+ || !nonRootNodes.contains(base.getId())) {
+ rootNodes.add(base.getId());
+ Flow flow = new Flow(base.getId());
+ Props jobProp = jobPropsMap.get(base.getId());
+
+ // Dedup with sets
+ @SuppressWarnings("unchecked")
+ List<String> successEmailList =
+ jobProp.getStringList(CommonJobProperties.SUCCESS_EMAILS,
+ Collections.EMPTY_LIST);
+ Set<String> successEmail = new HashSet<String>();
+ for (String email : successEmailList) {
+ successEmail.add(email.toLowerCase());
+ }
+
+ @SuppressWarnings("unchecked")
+ List<String> failureEmailList =
+ jobProp.getStringList(CommonJobProperties.FAILURE_EMAILS,
+ Collections.EMPTY_LIST);
+ Set<String> failureEmail = new HashSet<String>();
+ for (String email : failureEmailList) {
+ failureEmail.add(email.toLowerCase());
+ }
+
+ @SuppressWarnings("unchecked")
+ List<String> notifyEmailList =
+ jobProp.getStringList(CommonJobProperties.NOTIFY_EMAILS,
+ Collections.EMPTY_LIST);
+ for (String email : notifyEmailList) {
+ email = email.toLowerCase();
+ successEmail.add(email);
+ failureEmail.add(email);
+ }
+
+ flow.addFailureEmails(failureEmail);
+ flow.addSuccessEmails(successEmail);
+
+ flow.addAllFlowProperties(flowPropsList);
+ constructFlow(flow, base, visitedNodes);
+ flow.initialize();
+ flowMap.put(base.getId(), flow);
+ }
+ }
+ }
+
+ private void constructFlow(Flow flow, Node node, Set<String> visited) {
+ visited.add(node.getId());
+
+ flow.addNode(node);
+ if (SpecialJobTypes.EMBEDDED_FLOW_TYPE.equals(node.getType())) {
+ Props props = jobPropsMap.get(node.getId());
+ String embeddedFlow = props.get(SpecialJobTypes.FLOW_NAME);
+
+ Set<String> embeddedFlows = flowDependencies.get(flow.getId());
+ if (embeddedFlows == null) {
+ embeddedFlows = new HashSet<String>();
+ flowDependencies.put(flow.getId(), embeddedFlows);
+ }
+
+ node.setEmbeddedFlowId(embeddedFlow);
+ embeddedFlows.add(embeddedFlow);
+ }
+ Map<String, Edge> dependencies = nodeDependencies.get(node.getId());
+
+ if (dependencies != null) {
+ for (Edge edge : dependencies.values()) {
+ if (edge.hasError()) {
+ flow.addEdge(edge);
+ } else if (visited.contains(edge.getSourceId())) {
+ // We have a cycle. We set it as an error edge
+ edge = new Edge(edge.getSourceId(), node.getId());
+ edge.setError("Cyclical dependencies found.");
+ errors.add("Cyclical dependency found at " + edge.getId());
+ flow.addEdge(edge);
+ } else {
+ // This should not be null
+ flow.addEdge(edge);
+ Node sourceNode = nodeMap.get(edge.getSourceId());
+ constructFlow(flow, sourceNode, visited);
+ }
+ }
+ }
+
+ visited.remove(node.getId());
+ }
+
+ private String getNameWithoutExtension(File file) {
+ String filename = file.getName();
+ int index = filename.lastIndexOf('.');
+
+ return index < 0 ? filename : filename.substring(0, index);
+ }
+
+ private String getRelativeFilePath(String basePath, String filePath) {
+ return filePath.substring(basePath.length() + 1);
+ }
+
+ private static class DirFilter implements FileFilter {
+ @Override
+ public boolean accept(File pathname) {
+ return pathname.isDirectory();
+ }
+ }
+
+ private static class SuffixFilter implements FileFilter {
+ private String suffix;
+
+ public SuffixFilter(String suffix) {
+ this.suffix = suffix;
+ }
+
+ @Override
+ public boolean accept(File pathname) {
+ String name = pathname.getName();
+
+ return pathname.isFile() && !pathname.isHidden()
+ && name.length() > suffix.length() && name.endsWith(suffix);
+ }
+ }
}
src/main/java/azkaban/utils/Emailer.java 319(+165 -154)
diff --git a/src/main/java/azkaban/utils/Emailer.java b/src/main/java/azkaban/utils/Emailer.java
index a4e2ef8..9d08163 100644
--- a/src/main/java/azkaban/utils/Emailer.java
+++ b/src/main/java/azkaban/utils/Emailer.java
@@ -1,6 +1,3 @@
-
-package azkaban.utils;
-
/*
* Copyright 2012 LinkedIn Corp.
*
@@ -17,6 +14,7 @@ package azkaban.utils;
* the License.
*/
+package azkaban.utils;
import java.util.ArrayList;
import java.util.List;
@@ -38,155 +36,168 @@ import azkaban.utils.EmailMessage;
import azkaban.utils.Props;
public class Emailer extends AbstractMailer implements Alerter {
- private static Logger logger = Logger.getLogger(Emailer.class);
-
- private boolean testMode = false;
-
- private String clientHostname;
- private String clientPortNumber;
-
- private String mailHost;
- private String mailUser;
- private String mailPassword;
- private String mailSender;
- private String azkabanName;
-
- public Emailer(Props props) {
- super(props);
- this.azkabanName = props.getString("azkaban.name", "azkaban");
- this.mailHost = props.getString("mail.host", "localhost");
- this.mailUser = props.getString("mail.user", "");
- this.mailPassword = props.getString("mail.password", "");
- this.mailSender = props.getString("mail.sender", "");
-
- int mailTimeout = props.getInt("mail.timeout.millis", 10000);
- EmailMessage.setTimeout(mailTimeout);
- int connectionTimeout = props.getInt("mail.connection.timeout.millis", 10000);
- EmailMessage.setConnectionTimeout(connectionTimeout);
-
- this.clientHostname = props.getString("jetty.hostname", "localhost");
-
- if (props.getBoolean("jetty.use.ssl", true)) {
- this.clientPortNumber = props.getString("jetty.ssl.port");
- } else {
- this.clientPortNumber = props.getString("jetty.port");
- }
-
- testMode = props.getBoolean("test.mode", false);
- }
-
- @SuppressWarnings("unchecked")
- private void sendSlaAlertEmail(SlaOption slaOption, String slaMessage) {
- String subject = "Sla Violation Alert on " + getAzkabanName();
- String body = slaMessage;
- List<String> emailList = (List<String>) slaOption.getInfo().get(SlaOption.INFO_EMAIL_LIST);
- if (emailList != null && !emailList.isEmpty()) {
- EmailMessage message = super.createEmailMessage(
- subject,
- "text/html",
- emailList);
-
- message.setBody(body);
-
- if (!testMode) {
- try {
- message.sendEmail();
- } catch (MessagingException e) {
- logger.error("Email message send failed" , e);
- }
- }
- }
- }
-
- public void sendFirstErrorMessage(ExecutableFlow flow) {
- EmailMessage message = new EmailMessage(mailHost, mailUser, mailPassword);
- message.setFromAddress(mailSender);
-
- ExecutionOptions option = flow.getExecutionOptions();
-
- MailCreator mailCreator = DefaultMailCreator.getCreator(option.getMailCreator());
-
- logger.debug("ExecutorMailer using mail creator:" + mailCreator.getClass().getCanonicalName());
-
- boolean mailCreated = mailCreator.createFirstErrorMessage(flow, message, azkabanName, clientHostname, clientPortNumber);
-
- if (mailCreated && !testMode) {
- try {
- message.sendEmail();
- } catch (MessagingException e) {
- logger.error("Email message send failed", e);
- }
- }
- }
-
- public void sendErrorEmail(ExecutableFlow flow, String... extraReasons) {
- EmailMessage message = new EmailMessage(mailHost, mailUser, mailPassword);
- message.setFromAddress(mailSender);
-
- ExecutionOptions option = flow.getExecutionOptions();
-
- MailCreator mailCreator = DefaultMailCreator.getCreator(option.getMailCreator());
- logger.debug("ExecutorMailer using mail creator:" + mailCreator.getClass().getCanonicalName());
-
- boolean mailCreated = mailCreator.createErrorEmail(flow, message, azkabanName, clientHostname, clientPortNumber, extraReasons);
-
- if (mailCreated && !testMode) {
- try {
- message.sendEmail();
- } catch (MessagingException e) {
- logger.error("Email message send failed", e);
- }
- }
- }
-
- public void sendSuccessEmail(ExecutableFlow flow) {
- EmailMessage message = new EmailMessage(mailHost, mailUser, mailPassword);
- message.setFromAddress(mailSender);
-
- ExecutionOptions option = flow.getExecutionOptions();
-
- MailCreator mailCreator = DefaultMailCreator.getCreator(option.getMailCreator());
- logger.debug("ExecutorMailer using mail creator:" + mailCreator.getClass().getCanonicalName());
-
- boolean mailCreated = mailCreator.createSuccessEmail(flow, message, azkabanName, clientHostname, clientPortNumber);
-
- if (mailCreated && !testMode) {
- try {
- message.sendEmail();
- } catch (MessagingException e) {
- logger.error("Email message send failed", e);
- }
- }
- }
-
- public static List<String> findFailedJobs(ExecutableFlow flow) {
- ArrayList<String> failedJobs = new ArrayList<String>();
- for (ExecutableNode node : flow.getExecutableNodes()) {
- if (node.getStatus() == Status.FAILED) {
- failedJobs.add(node.getId());
- }
- }
- return failedJobs;
- }
-
- @Override
- public void alertOnSuccess(ExecutableFlow exflow) throws Exception {
- sendSuccessEmail(exflow);
- }
-
- @Override
- public void alertOnError(ExecutableFlow exflow, String ... extraReasons) throws Exception {
- sendErrorEmail(exflow, extraReasons);
- }
-
- @Override
- public void alertOnFirstError(ExecutableFlow exflow) throws Exception {
- sendFirstErrorMessage(exflow);
- }
-
- @Override
- public void alertOnSla(SlaOption slaOption, String slaMessage)
- throws Exception {
- sendSlaAlertEmail(slaOption, slaMessage);
- }
+ private static Logger logger = Logger.getLogger(Emailer.class);
+
+ private boolean testMode = false;
+
+ private String clientHostname;
+ private String clientPortNumber;
+
+ private String mailHost;
+ private String mailUser;
+ private String mailPassword;
+ private String mailSender;
+ private String azkabanName;
+
+ public Emailer(Props props) {
+ super(props);
+ this.azkabanName = props.getString("azkaban.name", "azkaban");
+ this.mailHost = props.getString("mail.host", "localhost");
+ this.mailUser = props.getString("mail.user", "");
+ this.mailPassword = props.getString("mail.password", "");
+ this.mailSender = props.getString("mail.sender", "");
+
+ int mailTimeout = props.getInt("mail.timeout.millis", 10000);
+ EmailMessage.setTimeout(mailTimeout);
+ int connectionTimeout =
+ props.getInt("mail.connection.timeout.millis", 10000);
+ EmailMessage.setConnectionTimeout(connectionTimeout);
+
+ this.clientHostname = props.getString("jetty.hostname", "localhost");
+
+ if (props.getBoolean("jetty.use.ssl", true)) {
+ this.clientPortNumber = props.getString("jetty.ssl.port");
+ } else {
+ this.clientPortNumber = props.getString("jetty.port");
+ }
+
+ testMode = props.getBoolean("test.mode", false);
+ }
+
+ @SuppressWarnings("unchecked")
+ private void sendSlaAlertEmail(SlaOption slaOption, String slaMessage) {
+ String subject = "Sla Violation Alert on " + getAzkabanName();
+ String body = slaMessage;
+ List<String> emailList =
+ (List<String>) slaOption.getInfo().get(SlaOption.INFO_EMAIL_LIST);
+ if (emailList != null && !emailList.isEmpty()) {
+ EmailMessage message =
+ super.createEmailMessage(subject, "text/html", emailList);
+
+ message.setBody(body);
+
+ if (!testMode) {
+ try {
+ message.sendEmail();
+ } catch (MessagingException e) {
+ logger.error("Email message send failed", e);
+ }
+ }
+ }
+ }
+
+ public void sendFirstErrorMessage(ExecutableFlow flow) {
+ EmailMessage message = new EmailMessage(mailHost, mailUser, mailPassword);
+ message.setFromAddress(mailSender);
+
+ ExecutionOptions option = flow.getExecutionOptions();
+
+ MailCreator mailCreator =
+ DefaultMailCreator.getCreator(option.getMailCreator());
+
+ logger.debug("ExecutorMailer using mail creator:"
+ + mailCreator.getClass().getCanonicalName());
+
+ boolean mailCreated =
+ mailCreator.createFirstErrorMessage(flow, message, azkabanName,
+ clientHostname, clientPortNumber);
+
+ if (mailCreated && !testMode) {
+ try {
+ message.sendEmail();
+ } catch (MessagingException e) {
+ logger.error("Email message send failed", e);
+ }
+ }
+ }
+
+ public void sendErrorEmail(ExecutableFlow flow, String... extraReasons) {
+ EmailMessage message = new EmailMessage(mailHost, mailUser, mailPassword);
+ message.setFromAddress(mailSender);
+
+ ExecutionOptions option = flow.getExecutionOptions();
+
+ MailCreator mailCreator =
+ DefaultMailCreator.getCreator(option.getMailCreator());
+ logger.debug("ExecutorMailer using mail creator:"
+ + mailCreator.getClass().getCanonicalName());
+
+ boolean mailCreated =
+ mailCreator.createErrorEmail(flow, message, azkabanName,
+ clientHostname, clientPortNumber, extraReasons);
+
+ if (mailCreated && !testMode) {
+ try {
+ message.sendEmail();
+ } catch (MessagingException e) {
+ logger.error("Email message send failed", e);
+ }
+ }
+ }
+
+ public void sendSuccessEmail(ExecutableFlow flow) {
+ EmailMessage message = new EmailMessage(mailHost, mailUser, mailPassword);
+ message.setFromAddress(mailSender);
+
+ ExecutionOptions option = flow.getExecutionOptions();
+
+ MailCreator mailCreator =
+ DefaultMailCreator.getCreator(option.getMailCreator());
+ logger.debug("ExecutorMailer using mail creator:"
+ + mailCreator.getClass().getCanonicalName());
+
+ boolean mailCreated =
+ mailCreator.createSuccessEmail(flow, message, azkabanName,
+ clientHostname, clientPortNumber);
+
+ if (mailCreated && !testMode) {
+ try {
+ message.sendEmail();
+ } catch (MessagingException e) {
+ logger.error("Email message send failed", e);
+ }
+ }
+ }
+
+ public static List<String> findFailedJobs(ExecutableFlow flow) {
+ ArrayList<String> failedJobs = new ArrayList<String>();
+ for (ExecutableNode node : flow.getExecutableNodes()) {
+ if (node.getStatus() == Status.FAILED) {
+ failedJobs.add(node.getId());
+ }
+ }
+ return failedJobs;
+ }
+
+ @Override
+ public void alertOnSuccess(ExecutableFlow exflow) throws Exception {
+ sendSuccessEmail(exflow);
+ }
+
+ @Override
+ public void alertOnError(ExecutableFlow exflow, String... extraReasons)
+ throws Exception {
+ sendErrorEmail(exflow, extraReasons);
+ }
+
+ @Override
+ public void alertOnFirstError(ExecutableFlow exflow) throws Exception {
+ sendFirstErrorMessage(exflow);
+ }
+
+ @Override
+ public void alertOnSla(SlaOption slaOption, String slaMessage)
+ throws Exception {
+ sendSlaAlertEmail(slaOption, slaMessage);
+ }
}
src/main/java/azkaban/utils/EmailMessage.java 361(+180 -181)
diff --git a/src/main/java/azkaban/utils/EmailMessage.java b/src/main/java/azkaban/utils/EmailMessage.java
index bbcfdd1..604590b 100644
--- a/src/main/java/azkaban/utils/EmailMessage.java
+++ b/src/main/java/azkaban/utils/EmailMessage.java
@@ -39,185 +39,184 @@ import javax.mail.internet.MimeMultipart;
import com.sun.mail.smtp.SMTPTransport;
public class EmailMessage {
- private static String protocol = "smtp";
- private List<String> _toAddress = new ArrayList<String>();
- private String _mailHost;
- private String _mailUser;
- private String _mailPassword;
- private String _subject;
- private String _fromAddress;
- private String _mimeType = "text/plain";
- private StringBuffer _body = new StringBuffer();
- private static int _mailTimeout = 10000;
- private static int _connectionTimeout = 10000;
-
- private ArrayList<BodyPart> _attachments = new ArrayList<BodyPart>();
-
- public EmailMessage() {
- this("localhost", "", "");
- }
-
- public EmailMessage(String host, String user, String password) {
- _mailUser = user;
- _mailHost = host;
- _mailPassword = password;
- }
-
- public static void setTimeout(int timeoutMillis) {
- _mailTimeout = timeoutMillis;
- }
-
- public static void setConnectionTimeout(int timeoutMillis) {
- _connectionTimeout = timeoutMillis;
- }
-
- public EmailMessage setMailHost(String host) {
- _mailHost = host;
- return this;
- }
-
- public EmailMessage setMailUser(String user) {
- _mailUser = user;
- return this;
- }
-
- public EmailMessage setMailPassword(String password) {
- _mailPassword = password;
- return this;
- }
-
- public EmailMessage addAllToAddress(Collection<? extends String> addresses) {
- _toAddress.addAll(addresses);
- return this;
- }
-
- public EmailMessage addToAddress(String address) {
- _toAddress.add(address);
- return this;
- }
-
- public EmailMessage setSubject(String subject) {
- _subject = subject;
- return this;
- }
-
- public EmailMessage setFromAddress(String fromAddress) {
- _fromAddress = fromAddress;
- return this;
- }
-
- public EmailMessage addAttachment(File file) throws MessagingException {
- return addAttachment(file.getName(), file);
- }
-
- public EmailMessage addAttachment(String attachmentName, File file)
- throws MessagingException {
- BodyPart attachmentPart = new MimeBodyPart();
- DataSource fileDataSource = new FileDataSource(file);
- attachmentPart.setDataHandler(new DataHandler(fileDataSource));
- attachmentPart.setFileName(attachmentName);
- _attachments.add(attachmentPart);
- return this;
- }
-
- public EmailMessage addAttachment(String attachmentName, InputStream stream)
- throws MessagingException {
- BodyPart attachmentPart = new MimeBodyPart(stream);
- attachmentPart.setFileName(attachmentName);
- _attachments.add(attachmentPart);
- return this;
- }
-
- private void checkSettings() {
- if (_mailHost == null) {
- throw new RuntimeException("Mail host not set.");
- }
-
-// if (_mailUser == null) {
-// throw new RuntimeException("Mail user not set.");
-// }
-//
-// if (_mailPassword == null) {
-// throw new RuntimeException("Mail password not set.");
-// }
-
- if (_fromAddress == null || _fromAddress.length() == 0) {
- throw new RuntimeException("From address not set.");
- }
-
- if (_subject == null) {
- throw new RuntimeException("Subject cannot be null");
- }
-
- if (_toAddress.size() == 0) {
- throw new RuntimeException("T");
- }
- }
-
- public void sendEmail() throws MessagingException {
- checkSettings();
- Properties props = new Properties();
-// props.setProperty("mail.transport.protocol", "smtp");
- props.put("mail."+protocol+".host", _mailHost);
- props.put("mail."+protocol+".auth", "true");
- props.put("mail.user", _mailUser);
- props.put("mail.password", _mailPassword);
- props.put("mail."+protocol+".timeout", _mailTimeout);
- props.put("mail."+protocol+".connectiontimeout", _connectionTimeout);
-
- Session session = Session.getInstance(props, null);
- Message message = new MimeMessage(session);
- InternetAddress from = new InternetAddress(_fromAddress, false);
- message.setFrom(from);
- for (String toAddr : _toAddress)
- message.addRecipient(Message.RecipientType.TO, new InternetAddress(
- toAddr, false));
- message.setSubject(_subject);
- message.setSentDate(new Date());
-
- if (_attachments.size() > 0) {
- MimeMultipart multipart = new MimeMultipart("related");
-
- BodyPart messageBodyPart = new MimeBodyPart();
- messageBodyPart.setContent(_body.toString(), _mimeType);
- multipart.addBodyPart(messageBodyPart);
-
- // Add attachments
- for (BodyPart part : _attachments) {
- multipart.addBodyPart(part);
- }
-
- message.setContent(multipart);
- } else {
- message.setContent(_body.toString(), _mimeType);
- }
-
-// Transport transport = session.getTransport();
-
- SMTPTransport t = (SMTPTransport) session.getTransport(protocol);
- t.connect(_mailHost, _mailUser, _mailPassword);
- t.sendMessage(message,
- message.getRecipients(Message.RecipientType.TO));
- t.close();
- }
-
- public void setBody(String body) {
- setBody(body, _mimeType);
- }
-
- public void setBody(String body, String mimeType) {
- _body = new StringBuffer(body);
- _mimeType = mimeType;
- }
-
- public EmailMessage setMimeType(String mimeType) {
- _mimeType = mimeType;
- return this;
- }
-
- public EmailMessage println(Object str) {
- _body.append(str);
-
- return this;
- }
+ private static String protocol = "smtp";
+ private List<String> _toAddress = new ArrayList<String>();
+ private String _mailHost;
+ private String _mailUser;
+ private String _mailPassword;
+ private String _subject;
+ private String _fromAddress;
+ private String _mimeType = "text/plain";
+ private StringBuffer _body = new StringBuffer();
+ private static int _mailTimeout = 10000;
+ private static int _connectionTimeout = 10000;
+
+ private ArrayList<BodyPart> _attachments = new ArrayList<BodyPart>();
+
+ public EmailMessage() {
+ this("localhost", "", "");
+ }
+
+ public EmailMessage(String host, String user, String password) {
+ _mailUser = user;
+ _mailHost = host;
+ _mailPassword = password;
+ }
+
+ public static void setTimeout(int timeoutMillis) {
+ _mailTimeout = timeoutMillis;
+ }
+
+ public static void setConnectionTimeout(int timeoutMillis) {
+ _connectionTimeout = timeoutMillis;
+ }
+
+ public EmailMessage setMailHost(String host) {
+ _mailHost = host;
+ return this;
+ }
+
+ public EmailMessage setMailUser(String user) {
+ _mailUser = user;
+ return this;
+ }
+
+ public EmailMessage setMailPassword(String password) {
+ _mailPassword = password;
+ return this;
+ }
+
+ public EmailMessage addAllToAddress(Collection<? extends String> addresses) {
+ _toAddress.addAll(addresses);
+ return this;
+ }
+
+ public EmailMessage addToAddress(String address) {
+ _toAddress.add(address);
+ return this;
+ }
+
+ public EmailMessage setSubject(String subject) {
+ _subject = subject;
+ return this;
+ }
+
+ public EmailMessage setFromAddress(String fromAddress) {
+ _fromAddress = fromAddress;
+ return this;
+ }
+
+ public EmailMessage addAttachment(File file) throws MessagingException {
+ return addAttachment(file.getName(), file);
+ }
+
+ public EmailMessage addAttachment(String attachmentName, File file)
+ throws MessagingException {
+ BodyPart attachmentPart = new MimeBodyPart();
+ DataSource fileDataSource = new FileDataSource(file);
+ attachmentPart.setDataHandler(new DataHandler(fileDataSource));
+ attachmentPart.setFileName(attachmentName);
+ _attachments.add(attachmentPart);
+ return this;
+ }
+
+ public EmailMessage addAttachment(String attachmentName, InputStream stream)
+ throws MessagingException {
+ BodyPart attachmentPart = new MimeBodyPart(stream);
+ attachmentPart.setFileName(attachmentName);
+ _attachments.add(attachmentPart);
+ return this;
+ }
+
+ private void checkSettings() {
+ if (_mailHost == null) {
+ throw new RuntimeException("Mail host not set.");
+ }
+
+ // if (_mailUser == null) {
+ // throw new RuntimeException("Mail user not set.");
+ // }
+ //
+ // if (_mailPassword == null) {
+ // throw new RuntimeException("Mail password not set.");
+ // }
+
+ if (_fromAddress == null || _fromAddress.length() == 0) {
+ throw new RuntimeException("From address not set.");
+ }
+
+ if (_subject == null) {
+ throw new RuntimeException("Subject cannot be null");
+ }
+
+ if (_toAddress.size() == 0) {
+ throw new RuntimeException("T");
+ }
+ }
+
+ public void sendEmail() throws MessagingException {
+ checkSettings();
+ Properties props = new Properties();
+ // props.setProperty("mail.transport.protocol", "smtp");
+ props.put("mail." + protocol + ".host", _mailHost);
+ props.put("mail." + protocol + ".auth", "true");
+ props.put("mail.user", _mailUser);
+ props.put("mail.password", _mailPassword);
+ props.put("mail." + protocol + ".timeout", _mailTimeout);
+ props.put("mail." + protocol + ".connectiontimeout", _connectionTimeout);
+
+ Session session = Session.getInstance(props, null);
+ Message message = new MimeMessage(session);
+ InternetAddress from = new InternetAddress(_fromAddress, false);
+ message.setFrom(from);
+ for (String toAddr : _toAddress)
+ message.addRecipient(Message.RecipientType.TO, new InternetAddress(
+ toAddr, false));
+ message.setSubject(_subject);
+ message.setSentDate(new Date());
+
+ if (_attachments.size() > 0) {
+ MimeMultipart multipart = new MimeMultipart("related");
+
+ BodyPart messageBodyPart = new MimeBodyPart();
+ messageBodyPart.setContent(_body.toString(), _mimeType);
+ multipart.addBodyPart(messageBodyPart);
+
+ // Add attachments
+ for (BodyPart part : _attachments) {
+ multipart.addBodyPart(part);
+ }
+
+ message.setContent(multipart);
+ } else {
+ message.setContent(_body.toString(), _mimeType);
+ }
+
+ // Transport transport = session.getTransport();
+
+ SMTPTransport t = (SMTPTransport) session.getTransport(protocol);
+ t.connect(_mailHost, _mailUser, _mailPassword);
+ t.sendMessage(message, message.getRecipients(Message.RecipientType.TO));
+ t.close();
+ }
+
+ public void setBody(String body) {
+ setBody(body, _mimeType);
+ }
+
+ public void setBody(String body, String mimeType) {
+ _body = new StringBuffer(body);
+ _mimeType = mimeType;
+ }
+
+ public EmailMessage setMimeType(String mimeType) {
+ _mimeType = mimeType;
+ return this;
+ }
+
+ public EmailMessage println(Object str) {
+ _body.append(str);
+
+ return this;
+ }
}
src/main/java/azkaban/utils/FileIOUtils.java 817(+413 -404)
diff --git a/src/main/java/azkaban/utils/FileIOUtils.java b/src/main/java/azkaban/utils/FileIOUtils.java
index c3aaee4..048d2e0 100644
--- a/src/main/java/azkaban/utils/FileIOUtils.java
+++ b/src/main/java/azkaban/utils/FileIOUtils.java
@@ -34,410 +34,419 @@ import java.util.StringTokenizer;
import org.apache.commons.io.IOUtils;
/**
- * Runs a few unix commands. Created this so that I can move to JNI in the future.
+ * Runs a few unix commands. Created this so that I can move to JNI in the
+ * future.
*/
public class FileIOUtils {
-
- public static class PrefixSuffixFileFilter implements FileFilter {
- private String prefix;
- private String suffix;
-
- public PrefixSuffixFileFilter(String prefix, String suffix) {
- this.prefix = prefix;
- this.suffix = suffix;
- }
-
- @Override
- public boolean accept(File pathname) {
- if (!pathname.isFile() || pathname.isHidden()) {
- return false;
- }
-
- String name = pathname.getName();
- int length = name.length();
- if (suffix.length() > length || prefix.length() > length ) {
- return false;
- }
-
- return name.startsWith(prefix) && name.endsWith(suffix);
- }
- }
-
- public static String getSourcePathFromClass(Class<?> containedClass) {
- File file = new File(containedClass.getProtectionDomain().getCodeSource().getLocation().getPath());
-
- if (!file.isDirectory() && file.getName().endsWith(".class")) {
- String name = containedClass.getName();
- StringTokenizer tokenizer = new StringTokenizer(name, ".");
- while(tokenizer.hasMoreTokens()) {
- tokenizer.nextElement();
- file = file.getParentFile();
- }
- return file.getPath();
- }
- else {
- return containedClass.getProtectionDomain().getCodeSource().getLocation().getPath();
- }
- }
-
- /**
- * Run a unix command that will symlink files, and recurse into directories.
- */
- public static void createDeepSymlink(File sourceDir, File destDir) throws IOException {
- if (!sourceDir.exists()) {
- throw new IOException("Source directory " + sourceDir.getPath() + " doesn't exist");
- }
- else if (!destDir.exists()) {
- throw new IOException("Destination directory " + destDir.getPath() + " doesn't exist");
- }
- else if (sourceDir.isFile() && destDir.isFile()) {
- throw new IOException("Source or Destination is not a directory.");
- }
-
- Set<String> paths = new HashSet<String>();
- createDirsFindFiles(sourceDir, sourceDir, destDir, paths);
-
- StringBuffer buffer = new StringBuffer();
- //buffer.append("sh '");
- for (String path: paths) {
- File sourceLink = new File(sourceDir, path);
- path = "." + path;
-
- buffer.append("ln -s ").append(sourceLink.getAbsolutePath()).append("/*").append(" ").append(path).append(";");
- }
-
- //buffer.append("'");
- String command = buffer.toString();
- //System.out.println(command);
- ProcessBuilder builder = new ProcessBuilder().command("sh", "-c", command);
- builder.directory(destDir);
-
- // XXX what about stopping threads ??
- Process process = builder.start();
- try {
- NullLogger errorLogger = new NullLogger(process.getErrorStream());
- NullLogger inputLogger = new NullLogger(process.getInputStream());
- errorLogger.start();
- inputLogger.start();
-
- try {
- if (process.waitFor() < 0) {
- // Assume that the error will be in standard out. Otherwise it'll be in standard in.
- String errorMessage = errorLogger.getLastMessages();
- if (errorMessage.isEmpty()) {
- errorMessage = inputLogger.getLastMessages();
- }
-
- throw new IOException(errorMessage);
- }
-
- // System.out.println(errorLogger.getLastMessages());
- } catch (InterruptedException e) {
- e.printStackTrace();
- }
- } finally {
- IOUtils.closeQuietly(process.getInputStream());
- IOUtils.closeQuietly(process.getOutputStream());
- IOUtils.closeQuietly(process.getErrorStream());
- }
- }
-
- private static void createDirsFindFiles(File baseDir, File sourceDir, File destDir, Set<String> paths) {
- File[] srcList = sourceDir.listFiles();
- String path = getRelativePath(baseDir, sourceDir);
- paths.add(path);
-
- for (File file: srcList) {
- if (file.isDirectory()) {
- File newDestDir = new File(destDir, file.getName());
- newDestDir.mkdirs();
- createDirsFindFiles(baseDir, file, newDestDir, paths);
- }
- }
- }
-
- private static String getRelativePath(File basePath, File sourceDir) {
- return sourceDir.getPath().substring(basePath.getPath().length());
- }
-
- private static class NullLogger extends Thread {
- private final BufferedReader inputReader;
- private CircularBuffer<String> buffer = new CircularBuffer<String>(5);
-
- public NullLogger(InputStream stream) {
- inputReader = new BufferedReader(new InputStreamReader(stream));
- }
-
- public void run() {
- try {
- while (!Thread.currentThread().isInterrupted()) {
- String line = inputReader.readLine();
- if (line == null) {
- return;
- }
- buffer.append(line);
- }
- } catch (IOException e) {
- e.printStackTrace();
- }
- }
-
- public String getLastMessages() {
- StringBuffer messageBuffer = new StringBuffer();
- for (String message: buffer) {
- messageBuffer.append(message);
- messageBuffer.append("\n");
- }
-
- return messageBuffer.toString();
- }
- }
-
- public static Pair<Integer, Integer> readUtf8File(File file, int offset, int length, OutputStream stream) throws IOException {
- byte[] buffer = new byte[length];
-
- FileInputStream fileStream = new FileInputStream(file);
-
- long skipped = fileStream.skip(offset);
- if (skipped < offset) {
- fileStream.close();
- return new Pair<Integer,Integer>(0, 0);
- }
-
- BufferedInputStream inputStream = null;
- try {
- inputStream = new BufferedInputStream(fileStream);
- inputStream.read(buffer);
- }
- finally {
- IOUtils.closeQuietly(inputStream);
- }
-
- Pair<Integer, Integer> utf8Range = getUtf8Range(buffer, 0, length);
- stream.write(buffer, utf8Range.getFirst(), utf8Range.getSecond());
-
- return new Pair<Integer,Integer>(offset + utf8Range.getFirst(), utf8Range.getSecond());
- }
-
- public static LogData readUtf8File(File file, int fileOffset, int length) throws IOException {
- byte[] buffer = new byte[length];
- FileInputStream fileStream = new FileInputStream(file);
-
- long skipped = fileStream.skip(fileOffset);
- if (skipped < fileOffset) {
- fileStream.close();
- return new LogData(fileOffset, 0, "");
- }
-
- BufferedInputStream inputStream = null;
- int read = 0;
- try {
- inputStream = new BufferedInputStream(fileStream);
- read = inputStream.read(buffer);
- }
- finally {
- IOUtils.closeQuietly(inputStream);
- }
-
- if (read <= 0) {
- return new LogData(fileOffset, 0, "");
- }
- Pair<Integer, Integer> utf8Range = getUtf8Range(buffer, 0, read);
- String outputString = new String(buffer, utf8Range.getFirst(), utf8Range.getSecond());
-
- return new LogData(fileOffset + utf8Range.getFirst(), utf8Range.getSecond(), outputString);
- }
-
- public static JobMetaData readUtf8MetaDataFile(File file, int fileOffset, int length) throws IOException {
- byte[] buffer = new byte[length];
- FileInputStream fileStream = new FileInputStream(file);
-
- long skipped = fileStream.skip(fileOffset);
- if (skipped < fileOffset) {
- fileStream.close();
- return new JobMetaData(fileOffset, 0, "");
- }
-
- BufferedInputStream inputStream = null;
- int read = 0;
- try {
- inputStream = new BufferedInputStream(fileStream);
- read = inputStream.read(buffer);
- }
- finally {
- IOUtils.closeQuietly(inputStream);
- }
-
- if (read <= 0) {
- return new JobMetaData(fileOffset, 0, "");
- }
- Pair<Integer, Integer> utf8Range = getUtf8Range(buffer, 0, read);
- String outputString = new String(buffer, utf8Range.getFirst(), utf8Range.getSecond());
-
- return new JobMetaData(fileOffset + utf8Range.getFirst(), utf8Range.getSecond(), outputString);
- }
-
- /**
- * Returns first and length.
- */
- public static Pair<Integer, Integer> getUtf8Range(byte[] buffer, int offset, int length) {
- int start = getUtf8ByteStart(buffer, offset);
- int end = getUtf8ByteEnd(buffer, offset + length - 1);
-
- return new Pair<Integer, Integer>(start, end - start + 1);
- }
-
- private static int getUtf8ByteStart(byte[] buffer, int offset) {
- // If it's a proper utf-8, we should find it within the next 6 bytes.
- for (int i = offset; i < offset + 6 && i < buffer.length; i++) {
- byte b = buffer[i];
- // check the mask 0x80 is 0, which is a proper ascii
- if ((0x80 & b) == 0) {
- return i;
- }
- else if ((0xC0 & b) == 0xC0) {
- return i;
- }
- }
-
- // Don't know what it is, will just set it as 0
- return offset;
- }
-
- private static int getUtf8ByteEnd(byte[] buffer, int offset) {
- // If it's a proper utf-8, we should find it within the previous 12 bytes.
- for (int i = offset; i > offset - 11 && i >= 0; i--) {
- byte b = buffer[i];
- // check the mask 0x80 is 0, which is a proper ascii. Just return
- if ((0x80 & b) == 0) {
- return i;
- }
-
- if ((b & 0xE0) == 0xC0) { // two byte utf8 char. bits 110x xxxx
- if (offset - i >= 1) {
- // There is 1 following byte we're good.
- return i + 1;
- }
- }
- else if ((b & 0xF0) == 0xE0) { // three byte utf8 char. bits 1110 xxxx
- if (offset - i >= 2) {
- // There is 1 following byte we're good.
- return i + 2;
- }
- }
- else if ((b & 0xF8) == 0xF0) { // four byte utf8 char. bits 1111 0xxx
- if (offset - i >= 3) {
- // There is 1 following byte we're good.
- return i + 3;
- }
- }
- else if ((b & 0xFC) >= 0xF8) { // five byte utf8 char. bits 1111 10xx
- if (offset - i == 4) {
- // There is 1 following byte we're good.
- return i + 4;
- }
- }
- else if ((b & 0xFE) == 0xFC) { // six byte utf8 char. bits 1111 110x
- if (offset - i >= 5) {
- // There is 1 following byte we're good.
- return i + 5;
- }
- }
- }
-
- // Don't know what it is, will just set it as 0
- return offset;
- }
-
- public static class LogData {
- private int offset;
- private int length;
- private String data;
-
- public LogData(int offset, int length, String data) {
- this.offset = offset;
- this.length = length;
- this.data = data;
- }
-
- public int getOffset() {
- return offset;
- }
-
- public int getLength() {
- return length;
- }
-
- public String getData() {
- return data;
- }
-
- public Map<String,Object> toObject() {
- HashMap<String,Object> map = new HashMap<String,Object>();
- map.put("offset", offset);
- map.put("length", length);
- map.put("data", data);
-
- return map;
- }
-
- public static LogData createLogDataFromObject(Map<String,Object> map) {
- int offset = (Integer)map.get("offset");
- int length = (Integer)map.get("length");
- String data = (String)map.get("data");
-
- return new LogData(offset,length, data);
- }
-
- @Override
- public String toString() {
- return "[offset=" + offset + ",length="+length + ",data=" + data + "]";
- }
- }
-
- public static class JobMetaData {
- private int offset;
- private int length;
- private String data;
-
- public JobMetaData(int offset, int length, String data) {
- this.offset = offset;
- this.length = length;
- this.data = data;
- }
-
- public int getOffset() {
- return offset;
- }
-
- public int getLength() {
- return length;
- }
-
- public String getData() {
- return data;
- }
-
- public Map<String,Object> toObject() {
- HashMap<String,Object> map = new HashMap<String,Object>();
- map.put("offset", offset);
- map.put("length", length);
- map.put("data", data);
-
- return map;
- }
-
- public static JobMetaData createJobMetaDataFromObject(Map<String,Object> map) {
- int offset = (Integer)map.get("offset");
- int length = (Integer)map.get("length");
- String data = (String)map.get("data");
-
- return new JobMetaData(offset,length, data);
- }
-
- @Override
- public String toString() {
- return "[offset=" + offset + ",length="+length + ",data=" + data + "]";
- }
- }
+
+ public static class PrefixSuffixFileFilter implements FileFilter {
+ private String prefix;
+ private String suffix;
+
+ public PrefixSuffixFileFilter(String prefix, String suffix) {
+ this.prefix = prefix;
+ this.suffix = suffix;
+ }
+
+ @Override
+ public boolean accept(File pathname) {
+ if (!pathname.isFile() || pathname.isHidden()) {
+ return false;
+ }
+
+ String name = pathname.getName();
+ int length = name.length();
+ if (suffix.length() > length || prefix.length() > length) {
+ return false;
+ }
+
+ return name.startsWith(prefix) && name.endsWith(suffix);
+ }
+ }
+
+ public static String getSourcePathFromClass(Class<?> containedClass) {
+ File file =
+ new File(containedClass.getProtectionDomain().getCodeSource()
+ .getLocation().getPath());
+
+ if (!file.isDirectory() && file.getName().endsWith(".class")) {
+ String name = containedClass.getName();
+ StringTokenizer tokenizer = new StringTokenizer(name, ".");
+ while (tokenizer.hasMoreTokens()) {
+ tokenizer.nextElement();
+ file = file.getParentFile();
+ }
+ return file.getPath();
+ } else {
+ return containedClass.getProtectionDomain().getCodeSource().getLocation()
+ .getPath();
+ }
+ }
+
+ /**
+ * Run a unix command that will symlink files, and recurse into directories.
+ */
+ public static void createDeepSymlink(File sourceDir, File destDir)
+ throws IOException {
+ if (!sourceDir.exists()) {
+ throw new IOException("Source directory " + sourceDir.getPath()
+ + " doesn't exist");
+ } else if (!destDir.exists()) {
+ throw new IOException("Destination directory " + destDir.getPath()
+ + " doesn't exist");
+ } else if (sourceDir.isFile() && destDir.isFile()) {
+ throw new IOException("Source or Destination is not a directory.");
+ }
+
+ Set<String> paths = new HashSet<String>();
+ createDirsFindFiles(sourceDir, sourceDir, destDir, paths);
+
+ StringBuffer buffer = new StringBuffer();
+ // buffer.append("sh '");
+ for (String path : paths) {
+ File sourceLink = new File(sourceDir, path);
+ path = "." + path;
+
+ buffer.append("ln -s ").append(sourceLink.getAbsolutePath()).append("/*")
+ .append(" ").append(path).append(";");
+ }
+
+ // buffer.append("'");
+ String command = buffer.toString();
+ // System.out.println(command);
+ ProcessBuilder builder = new ProcessBuilder().command("sh", "-c", command);
+ builder.directory(destDir);
+
+ // XXX what about stopping threads ??
+ Process process = builder.start();
+ try {
+ NullLogger errorLogger = new NullLogger(process.getErrorStream());
+ NullLogger inputLogger = new NullLogger(process.getInputStream());
+ errorLogger.start();
+ inputLogger.start();
+
+ try {
+ if (process.waitFor() < 0) {
+ // Assume that the error will be in standard out. Otherwise it'll be
+ // in standard in.
+ String errorMessage = errorLogger.getLastMessages();
+ if (errorMessage.isEmpty()) {
+ errorMessage = inputLogger.getLastMessages();
+ }
+
+ throw new IOException(errorMessage);
+ }
+
+ // System.out.println(errorLogger.getLastMessages());
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ } finally {
+ IOUtils.closeQuietly(process.getInputStream());
+ IOUtils.closeQuietly(process.getOutputStream());
+ IOUtils.closeQuietly(process.getErrorStream());
+ }
+ }
+
+ private static void createDirsFindFiles(File baseDir, File sourceDir,
+ File destDir, Set<String> paths) {
+ File[] srcList = sourceDir.listFiles();
+ String path = getRelativePath(baseDir, sourceDir);
+ paths.add(path);
+
+ for (File file : srcList) {
+ if (file.isDirectory()) {
+ File newDestDir = new File(destDir, file.getName());
+ newDestDir.mkdirs();
+ createDirsFindFiles(baseDir, file, newDestDir, paths);
+ }
+ }
+ }
+
+ private static String getRelativePath(File basePath, File sourceDir) {
+ return sourceDir.getPath().substring(basePath.getPath().length());
+ }
+
+ private static class NullLogger extends Thread {
+ private final BufferedReader inputReader;
+ private CircularBuffer<String> buffer = new CircularBuffer<String>(5);
+
+ public NullLogger(InputStream stream) {
+ inputReader = new BufferedReader(new InputStreamReader(stream));
+ }
+
+ public void run() {
+ try {
+ while (!Thread.currentThread().isInterrupted()) {
+ String line = inputReader.readLine();
+ if (line == null) {
+ return;
+ }
+ buffer.append(line);
+ }
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+
+ public String getLastMessages() {
+ StringBuffer messageBuffer = new StringBuffer();
+ for (String message : buffer) {
+ messageBuffer.append(message);
+ messageBuffer.append("\n");
+ }
+
+ return messageBuffer.toString();
+ }
+ }
+
+ public static Pair<Integer, Integer> readUtf8File(File file, int offset,
+ int length, OutputStream stream) throws IOException {
+ byte[] buffer = new byte[length];
+
+ FileInputStream fileStream = new FileInputStream(file);
+
+ long skipped = fileStream.skip(offset);
+ if (skipped < offset) {
+ fileStream.close();
+ return new Pair<Integer, Integer>(0, 0);
+ }
+
+ BufferedInputStream inputStream = null;
+ try {
+ inputStream = new BufferedInputStream(fileStream);
+ inputStream.read(buffer);
+ } finally {
+ IOUtils.closeQuietly(inputStream);
+ }
+
+ Pair<Integer, Integer> utf8Range = getUtf8Range(buffer, 0, length);
+ stream.write(buffer, utf8Range.getFirst(), utf8Range.getSecond());
+
+ return new Pair<Integer, Integer>(offset + utf8Range.getFirst(),
+ utf8Range.getSecond());
+ }
+
+ public static LogData readUtf8File(File file, int fileOffset, int length)
+ throws IOException {
+ byte[] buffer = new byte[length];
+ FileInputStream fileStream = new FileInputStream(file);
+
+ long skipped = fileStream.skip(fileOffset);
+ if (skipped < fileOffset) {
+ fileStream.close();
+ return new LogData(fileOffset, 0, "");
+ }
+
+ BufferedInputStream inputStream = null;
+ int read = 0;
+ try {
+ inputStream = new BufferedInputStream(fileStream);
+ read = inputStream.read(buffer);
+ } finally {
+ IOUtils.closeQuietly(inputStream);
+ }
+
+ if (read <= 0) {
+ return new LogData(fileOffset, 0, "");
+ }
+ Pair<Integer, Integer> utf8Range = getUtf8Range(buffer, 0, read);
+ String outputString =
+ new String(buffer, utf8Range.getFirst(), utf8Range.getSecond());
+
+ return new LogData(fileOffset + utf8Range.getFirst(),
+ utf8Range.getSecond(), outputString);
+ }
+
+ public static JobMetaData readUtf8MetaDataFile(File file, int fileOffset,
+ int length) throws IOException {
+ byte[] buffer = new byte[length];
+ FileInputStream fileStream = new FileInputStream(file);
+
+ long skipped = fileStream.skip(fileOffset);
+ if (skipped < fileOffset) {
+ fileStream.close();
+ return new JobMetaData(fileOffset, 0, "");
+ }
+
+ BufferedInputStream inputStream = null;
+ int read = 0;
+ try {
+ inputStream = new BufferedInputStream(fileStream);
+ read = inputStream.read(buffer);
+ } finally {
+ IOUtils.closeQuietly(inputStream);
+ }
+
+ if (read <= 0) {
+ return new JobMetaData(fileOffset, 0, "");
+ }
+ Pair<Integer, Integer> utf8Range = getUtf8Range(buffer, 0, read);
+ String outputString =
+ new String(buffer, utf8Range.getFirst(), utf8Range.getSecond());
+
+ return new JobMetaData(fileOffset + utf8Range.getFirst(),
+ utf8Range.getSecond(), outputString);
+ }
+
+ /**
+ * Returns first and length.
+ */
+ public static Pair<Integer, Integer> getUtf8Range(byte[] buffer, int offset,
+ int length) {
+ int start = getUtf8ByteStart(buffer, offset);
+ int end = getUtf8ByteEnd(buffer, offset + length - 1);
+
+ return new Pair<Integer, Integer>(start, end - start + 1);
+ }
+
+ private static int getUtf8ByteStart(byte[] buffer, int offset) {
+ // If it's a proper utf-8, we should find it within the next 6 bytes.
+ for (int i = offset; i < offset + 6 && i < buffer.length; i++) {
+ byte b = buffer[i];
+ // check the mask 0x80 is 0, which is a proper ascii
+ if ((0x80 & b) == 0) {
+ return i;
+ } else if ((0xC0 & b) == 0xC0) {
+ return i;
+ }
+ }
+
+ // Don't know what it is, will just set it as 0
+ return offset;
+ }
+
+ private static int getUtf8ByteEnd(byte[] buffer, int offset) {
+ // If it's a proper utf-8, we should find it within the previous 12 bytes.
+ for (int i = offset; i > offset - 11 && i >= 0; i--) {
+ byte b = buffer[i];
+ // check the mask 0x80 is 0, which is a proper ascii. Just return
+ if ((0x80 & b) == 0) {
+ return i;
+ }
+
+ if ((b & 0xE0) == 0xC0) { // two byte utf8 char. bits 110x xxxx
+ if (offset - i >= 1) {
+ // There is 1 following byte we're good.
+ return i + 1;
+ }
+ } else if ((b & 0xF0) == 0xE0) { // three byte utf8 char. bits 1110 xxxx
+ if (offset - i >= 2) {
+ // There is 1 following byte we're good.
+ return i + 2;
+ }
+ } else if ((b & 0xF8) == 0xF0) { // four byte utf8 char. bits 1111 0xxx
+ if (offset - i >= 3) {
+ // There is 1 following byte we're good.
+ return i + 3;
+ }
+ } else if ((b & 0xFC) >= 0xF8) { // five byte utf8 char. bits 1111 10xx
+ if (offset - i == 4) {
+ // There is 1 following byte we're good.
+ return i + 4;
+ }
+ } else if ((b & 0xFE) == 0xFC) { // six byte utf8 char. bits 1111 110x
+ if (offset - i >= 5) {
+ // There is 1 following byte we're good.
+ return i + 5;
+ }
+ }
+ }
+
+ // Don't know what it is, will just set it as 0
+ return offset;
+ }
+
+ public static class LogData {
+ private int offset;
+ private int length;
+ private String data;
+
+ public LogData(int offset, int length, String data) {
+ this.offset = offset;
+ this.length = length;
+ this.data = data;
+ }
+
+ public int getOffset() {
+ return offset;
+ }
+
+ public int getLength() {
+ return length;
+ }
+
+ public String getData() {
+ return data;
+ }
+
+ public Map<String, Object> toObject() {
+ HashMap<String, Object> map = new HashMap<String, Object>();
+ map.put("offset", offset);
+ map.put("length", length);
+ map.put("data", data);
+
+ return map;
+ }
+
+ public static LogData createLogDataFromObject(Map<String, Object> map) {
+ int offset = (Integer) map.get("offset");
+ int length = (Integer) map.get("length");
+ String data = (String) map.get("data");
+
+ return new LogData(offset, length, data);
+ }
+
+ @Override
+ public String toString() {
+ return "[offset=" + offset + ",length=" + length + ",data=" + data + "]";
+ }
+ }
+
+ public static class JobMetaData {
+ private int offset;
+ private int length;
+ private String data;
+
+ public JobMetaData(int offset, int length, String data) {
+ this.offset = offset;
+ this.length = length;
+ this.data = data;
+ }
+
+ public int getOffset() {
+ return offset;
+ }
+
+ public int getLength() {
+ return length;
+ }
+
+ public String getData() {
+ return data;
+ }
+
+ public Map<String, Object> toObject() {
+ HashMap<String, Object> map = new HashMap<String, Object>();
+ map.put("offset", offset);
+ map.put("length", length);
+ map.put("data", data);
+
+ return map;
+ }
+
+ public static JobMetaData createJobMetaDataFromObject(
+ Map<String, Object> map) {
+ int offset = (Integer) map.get("offset");
+ int length = (Integer) map.get("length");
+ String data = (String) map.get("data");
+
+ return new JobMetaData(offset, length, data);
+ }
+
+ @Override
+ public String toString() {
+ return "[offset=" + offset + ",length=" + length + ",data=" + data + "]";
+ }
+ }
}
src/main/java/azkaban/utils/GZIPUtils.java 69(+36 -33)
diff --git a/src/main/java/azkaban/utils/GZIPUtils.java b/src/main/java/azkaban/utils/GZIPUtils.java
index 2d42c4e..b1d6abb 100644
--- a/src/main/java/azkaban/utils/GZIPUtils.java
+++ b/src/main/java/azkaban/utils/GZIPUtils.java
@@ -25,40 +25,43 @@ import java.util.zip.GZIPOutputStream;
import org.apache.commons.io.IOUtils;
public class GZIPUtils {
-
- public static byte[] gzipString(String str, String encType) throws IOException {
- byte[] stringData = str.getBytes(encType);
-
- return gzipBytes(stringData);
- }
-
- public static byte[] gzipBytes(byte[] bytes) throws IOException {
- return gzipBytes(bytes, 0, bytes.length);
- }
-
- public static byte[] gzipBytes(byte[] bytes, int offset, int length) throws IOException {
- ByteArrayOutputStream byteOutputStream = new ByteArrayOutputStream();
- GZIPOutputStream gzipStream = null;
- gzipStream = new GZIPOutputStream(byteOutputStream);
+ public static byte[] gzipString(String str, String encType)
+ throws IOException {
+ byte[] stringData = str.getBytes(encType);
- gzipStream.write(bytes, offset, length);
- gzipStream.close();
- return byteOutputStream.toByteArray();
- }
-
- public static byte[] unGzipBytes(byte[] bytes) throws IOException {
- ByteArrayInputStream byteInputStream = new ByteArrayInputStream(bytes);
- GZIPInputStream gzipInputStream = new GZIPInputStream(byteInputStream);
-
- ByteArrayOutputStream byteOutputStream = new ByteArrayOutputStream();
- IOUtils.copy(gzipInputStream, byteOutputStream);
+ return gzipBytes(stringData);
+ }
- return byteOutputStream.toByteArray();
- }
-
- public static String unGzipString(byte[] bytes, String encType) throws IOException {
- byte[] response = unGzipBytes(bytes);
- return new String(response, encType);
- }
+ public static byte[] gzipBytes(byte[] bytes) throws IOException {
+ return gzipBytes(bytes, 0, bytes.length);
+ }
+
+ public static byte[] gzipBytes(byte[] bytes, int offset, int length)
+ throws IOException {
+ ByteArrayOutputStream byteOutputStream = new ByteArrayOutputStream();
+ GZIPOutputStream gzipStream = null;
+
+ gzipStream = new GZIPOutputStream(byteOutputStream);
+
+ gzipStream.write(bytes, offset, length);
+ gzipStream.close();
+ return byteOutputStream.toByteArray();
+ }
+
+ public static byte[] unGzipBytes(byte[] bytes) throws IOException {
+ ByteArrayInputStream byteInputStream = new ByteArrayInputStream(bytes);
+ GZIPInputStream gzipInputStream = new GZIPInputStream(byteInputStream);
+
+ ByteArrayOutputStream byteOutputStream = new ByteArrayOutputStream();
+ IOUtils.copy(gzipInputStream, byteOutputStream);
+
+ return byteOutputStream.toByteArray();
+ }
+
+ public static String unGzipString(byte[] bytes, String encType)
+ throws IOException {
+ byte[] response = unGzipBytes(bytes);
+ return new String(response, encType);
+ }
}
src/main/java/azkaban/utils/JSONUtils.java 453(+226 -227)
diff --git a/src/main/java/azkaban/utils/JSONUtils.java b/src/main/java/azkaban/utils/JSONUtils.java
index 324d8ef..cafb9d9 100644
--- a/src/main/java/azkaban/utils/JSONUtils.java
+++ b/src/main/java/azkaban/utils/JSONUtils.java
@@ -36,240 +36,239 @@ import org.codehaus.jackson.map.ObjectWriter;
public class JSONUtils {
- /**
- * The constructor. Cannot construct this class.
- */
- private JSONUtils() {
- }
-
- public static String toJSON(Object obj) {
- return toJSON(obj, false);
- }
+ /**
+ * The constructor. Cannot construct this class.
+ */
+ private JSONUtils() {
+ }
- public static String toJSON(Object obj, boolean prettyPrint) {
- ObjectMapper mapper = new ObjectMapper();
+ public static String toJSON(Object obj) {
+ return toJSON(obj, false);
+ }
- try {
- if (prettyPrint) {
- ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
- return writer.writeValueAsString(obj);
- }
- return mapper.writeValueAsString(obj);
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
+ public static String toJSON(Object obj, boolean prettyPrint) {
+ ObjectMapper mapper = new ObjectMapper();
- public static void toJSON(Object obj, OutputStream stream) {
- toJSON(obj, stream, false);
- }
-
- public static void toJSON(Object obj, OutputStream stream, boolean prettyPrint) {
- ObjectMapper mapper = new ObjectMapper();
- try {
- if (prettyPrint) {
- ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
- writer.writeValue(stream, obj);
- return;
- }
- mapper.writeValue(stream, obj);
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- public static void toJSON(Object obj, File file) throws IOException {
- toJSON(obj, file, false);
- }
-
- public static void toJSON(Object obj, File file, boolean prettyPrint) throws IOException {
- BufferedOutputStream stream = new BufferedOutputStream(new FileOutputStream(file));
- try {
- toJSON(obj, stream, prettyPrint);
- } finally {
- stream.close();
- }
- }
-
- public static Object parseJSONFromStringQuiet(String json) {
- try {
- return parseJSONFromString(json);
- } catch (IOException e) {
- e.printStackTrace();
- return null;
- }
- }
-
- public static Object parseJSONFromString(String json) throws IOException {
- ObjectMapper mapper = new ObjectMapper();
- JsonFactory factory = new JsonFactory();
- JsonParser parser = factory.createJsonParser(json);
- JsonNode node = mapper.readTree(parser);
+ try {
+ if (prettyPrint) {
+ ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
+ return writer.writeValueAsString(obj);
+ }
+ return mapper.writeValueAsString(obj);
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
- return toObjectFromJSONNode(node);
- }
+ public static void toJSON(Object obj, OutputStream stream) {
+ toJSON(obj, stream, false);
+ }
- public static Object parseJSONFromFile(File file) throws IOException {
- ObjectMapper mapper = new ObjectMapper();
- JsonFactory factory = new JsonFactory();
- JsonParser parser = factory.createJsonParser(file);
- JsonNode node = mapper.readTree(parser);
+ public static void toJSON(Object obj, OutputStream stream, boolean prettyPrint) {
+ ObjectMapper mapper = new ObjectMapper();
+ try {
+ if (prettyPrint) {
+ ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
+ writer.writeValue(stream, obj);
+ return;
+ }
+ mapper.writeValue(stream, obj);
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
- return toObjectFromJSONNode(node);
- }
-
- public static Object parseJSONFromReader(Reader reader) throws IOException {
- ObjectMapper mapper = new ObjectMapper();
- JsonFactory factory = new JsonFactory();
- JsonParser parser = factory.createJsonParser(reader);
- JsonNode node = mapper.readTree(parser);
+ public static void toJSON(Object obj, File file) throws IOException {
+ toJSON(obj, file, false);
+ }
- return toObjectFromJSONNode(node);
- }
+ public static void toJSON(Object obj, File file, boolean prettyPrint)
+ throws IOException {
+ BufferedOutputStream stream =
+ new BufferedOutputStream(new FileOutputStream(file));
+ try {
+ toJSON(obj, stream, prettyPrint);
+ } finally {
+ stream.close();
+ }
+ }
- private static Object toObjectFromJSONNode(JsonNode node) {
- if (node.isObject()) {
- HashMap<String, Object> obj = new HashMap<String, Object>();
- Iterator<String> iter = node.getFieldNames();
- while (iter.hasNext()) {
- String fieldName = iter.next();
- JsonNode subNode = node.get(fieldName);
- Object subObj = toObjectFromJSONNode(subNode);
- obj.put(fieldName, subObj);
- }
+ public static Object parseJSONFromStringQuiet(String json) {
+ try {
+ return parseJSONFromString(json);
+ } catch (IOException e) {
+ e.printStackTrace();
+ return null;
+ }
+ }
- return obj;
- } else if (node.isArray()) {
- ArrayList<Object> array = new ArrayList<Object>();
- Iterator<JsonNode> iter = node.getElements();
- while (iter.hasNext()) {
- JsonNode element = iter.next();
- Object subObject = toObjectFromJSONNode(element);
- array.add(subObject);
- }
- return array;
- } else if (node.isTextual()) {
- return node.asText();
- } else if (node.isNumber()) {
- if (node.isInt()) {
- return node.asInt();
- } else if (node.isLong()) {
- return node.asLong();
- } else if (node.isDouble()) {
- return node.asDouble();
- } else {
- System.err.println("ERROR What is this!? "
- + node.getNumberType());
- return null;
- }
- } else if (node.isBoolean()) {
- return node.asBoolean();
- } else {
- return null;
- }
- }
-
- public static long getLongFromObject(Object obj) {
- if (obj instanceof Integer) {
- return Long.valueOf((Integer)obj);
- }
-
- return (Long)obj;
- }
-
- /*
- * Writes json to a stream without using any external dependencies.
- *
- * This is useful for plugins or extensions that want to write properties to a writer
- * without having to import the jackson, or json libraries. The properties are expected
- * to be a map of String keys and String values.
- *
- * The other json writing methods are more robust and will handle more cases.
- */
- public static void writePropsNoJarDependency(Map<String, String> properties, Writer writer) throws IOException {
- writer.write("{\n");
- int size = properties.size();
-
- for (Map.Entry<String, String> entry: properties.entrySet()) {
- // tab the space
- writer.write('\t');
- // Write key
- writer.write(quoteAndClean(entry.getKey()));
- writer.write(':');
- writer.write(quoteAndClean(entry.getValue()));
-
- size -= 1;
- // Add comma only if it's not the last one
- if (size > 0) {
- writer.write(',');
- }
- writer.write('\n');
- }
- writer.write("}");
- }
-
- private static String quoteAndClean(String str) {
- if (str == null || str.isEmpty()) {
- return "\"\"";
- }
-
- StringBuffer buffer = new StringBuffer(str.length());
- buffer.append('"');
- for (int i = 0; i < str.length(); ++i) {
- char ch = str.charAt(i);
-
- switch(ch) {
- case '\b':
- buffer.append("\\b");
- break;
- case '\t':
- buffer.append("\\t");
- break;
- case '\n':
- buffer.append("\\n");
- break;
- case '\f':
- buffer.append("\\f");
- break;
- case '\r':
- buffer.append("\\r");
- break;
- case '"':
- case '\\':
- case '/':
- buffer.append('\\');
- buffer.append(ch);
- break;
- default:
- if (isCharSpecialUnicode(ch)) {
- buffer.append("\\u");
- String hexCode = Integer.toHexString(ch);
- int lengthHexCode = hexCode.length();
- if (lengthHexCode < 4){
- buffer.append("0000".substring(0, 4 - lengthHexCode));
- }
- buffer.append(hexCode);
- }
- else {
- buffer.append(ch);
- }
- }
- }
- buffer.append('"');
- return buffer.toString();
- }
-
- private static boolean isCharSpecialUnicode(char ch) {
- if (ch < ' ') {
- return true;
- }
- else if ( ch >= '\u0080' && ch < '\u00a0') {
- return true;
- }
- else if ( ch >= '\u2000' && ch < '\u2100') {
- return true;
- }
-
- return false;
- }
+ public static Object parseJSONFromString(String json) throws IOException {
+ ObjectMapper mapper = new ObjectMapper();
+ JsonFactory factory = new JsonFactory();
+ JsonParser parser = factory.createJsonParser(json);
+ JsonNode node = mapper.readTree(parser);
+
+ return toObjectFromJSONNode(node);
+ }
+
+ public static Object parseJSONFromFile(File file) throws IOException {
+ ObjectMapper mapper = new ObjectMapper();
+ JsonFactory factory = new JsonFactory();
+ JsonParser parser = factory.createJsonParser(file);
+ JsonNode node = mapper.readTree(parser);
+
+ return toObjectFromJSONNode(node);
+ }
+
+ public static Object parseJSONFromReader(Reader reader) throws IOException {
+ ObjectMapper mapper = new ObjectMapper();
+ JsonFactory factory = new JsonFactory();
+ JsonParser parser = factory.createJsonParser(reader);
+ JsonNode node = mapper.readTree(parser);
+
+ return toObjectFromJSONNode(node);
+ }
+
+ private static Object toObjectFromJSONNode(JsonNode node) {
+ if (node.isObject()) {
+ HashMap<String, Object> obj = new HashMap<String, Object>();
+ Iterator<String> iter = node.getFieldNames();
+ while (iter.hasNext()) {
+ String fieldName = iter.next();
+ JsonNode subNode = node.get(fieldName);
+ Object subObj = toObjectFromJSONNode(subNode);
+ obj.put(fieldName, subObj);
+ }
+
+ return obj;
+ } else if (node.isArray()) {
+ ArrayList<Object> array = new ArrayList<Object>();
+ Iterator<JsonNode> iter = node.getElements();
+ while (iter.hasNext()) {
+ JsonNode element = iter.next();
+ Object subObject = toObjectFromJSONNode(element);
+ array.add(subObject);
+ }
+ return array;
+ } else if (node.isTextual()) {
+ return node.asText();
+ } else if (node.isNumber()) {
+ if (node.isInt()) {
+ return node.asInt();
+ } else if (node.isLong()) {
+ return node.asLong();
+ } else if (node.isDouble()) {
+ return node.asDouble();
+ } else {
+ System.err.println("ERROR What is this!? " + node.getNumberType());
+ return null;
+ }
+ } else if (node.isBoolean()) {
+ return node.asBoolean();
+ } else {
+ return null;
+ }
+ }
+
+ public static long getLongFromObject(Object obj) {
+ if (obj instanceof Integer) {
+ return Long.valueOf((Integer) obj);
+ }
+
+ return (Long) obj;
+ }
+
+ /*
+ * Writes json to a stream without using any external dependencies.
+ *
+ * This is useful for plugins or extensions that want to write properties to a
+ * writer without having to import the jackson, or json libraries. The
+ * properties are expected to be a map of String keys and String values.
+ *
+ * The other json writing methods are more robust and will handle more cases.
+ */
+ public static void writePropsNoJarDependency(Map<String, String> properties,
+ Writer writer) throws IOException {
+ writer.write("{\n");
+ int size = properties.size();
+
+ for (Map.Entry<String, String> entry : properties.entrySet()) {
+ // tab the space
+ writer.write('\t');
+ // Write key
+ writer.write(quoteAndClean(entry.getKey()));
+ writer.write(':');
+ writer.write(quoteAndClean(entry.getValue()));
+
+ size -= 1;
+ // Add comma only if it's not the last one
+ if (size > 0) {
+ writer.write(',');
+ }
+ writer.write('\n');
+ }
+ writer.write("}");
+ }
+
+ private static String quoteAndClean(String str) {
+ if (str == null || str.isEmpty()) {
+ return "\"\"";
+ }
+
+ StringBuffer buffer = new StringBuffer(str.length());
+ buffer.append('"');
+ for (int i = 0; i < str.length(); ++i) {
+ char ch = str.charAt(i);
+
+ switch (ch) {
+ case '\b':
+ buffer.append("\\b");
+ break;
+ case '\t':
+ buffer.append("\\t");
+ break;
+ case '\n':
+ buffer.append("\\n");
+ break;
+ case '\f':
+ buffer.append("\\f");
+ break;
+ case '\r':
+ buffer.append("\\r");
+ break;
+ case '"':
+ case '\\':
+ case '/':
+ buffer.append('\\');
+ buffer.append(ch);
+ break;
+ default:
+ if (isCharSpecialUnicode(ch)) {
+ buffer.append("\\u");
+ String hexCode = Integer.toHexString(ch);
+ int lengthHexCode = hexCode.length();
+ if (lengthHexCode < 4) {
+ buffer.append("0000".substring(0, 4 - lengthHexCode));
+ }
+ buffer.append(hexCode);
+ } else {
+ buffer.append(ch);
+ }
+ }
+ }
+ buffer.append('"');
+ return buffer.toString();
+ }
+
+ private static boolean isCharSpecialUnicode(char ch) {
+ if (ch < ' ') {
+ return true;
+ } else if (ch >= '\u0080' && ch < '\u00a0') {
+ return true;
+ } else if (ch >= '\u2000' && ch < '\u2100') {
+ return true;
+ }
+
+ return false;
+ }
}
src/main/java/azkaban/utils/LogGobbler.java 107(+54 -53)
diff --git a/src/main/java/azkaban/utils/LogGobbler.java b/src/main/java/azkaban/utils/LogGobbler.java
index e053b64..6176744 100644
--- a/src/main/java/azkaban/utils/LogGobbler.java
+++ b/src/main/java/azkaban/utils/LogGobbler.java
@@ -26,63 +26,64 @@ import org.apache.log4j.Logger;
import com.google.common.base.Joiner;
public class LogGobbler extends Thread {
- private final BufferedReader inputReader;
- private final Logger logger;
- private final Level loggingLevel;
- private final CircularBuffer<String> buffer;
+ private final BufferedReader inputReader;
+ private final Logger logger;
+ private final Level loggingLevel;
+ private final CircularBuffer<String> buffer;
- public LogGobbler(final Reader inputReader, final Logger logger, final Level level, final int bufferLines) {
- this.inputReader = new BufferedReader(inputReader);
- this.logger = logger;
- this.loggingLevel = level;
- buffer = new CircularBuffer<String>(bufferLines);
- }
+ public LogGobbler(final Reader inputReader, final Logger logger,
+ final Level level, final int bufferLines) {
+ this.inputReader = new BufferedReader(inputReader);
+ this.logger = logger;
+ this.loggingLevel = level;
+ buffer = new CircularBuffer<String>(bufferLines);
+ }
- @Override
- public void run() {
- try {
- while (!Thread.currentThread().isInterrupted()) {
- String line = inputReader.readLine();
- if (line == null) {
- return;
- }
+ @Override
+ public void run() {
+ try {
+ while (!Thread.currentThread().isInterrupted()) {
+ String line = inputReader.readLine();
+ if (line == null) {
+ return;
+ }
- buffer.append(line);
- log(line);
- }
- } catch (IOException e) {
- error("Error reading from logging stream:", e);
- }
- }
+ buffer.append(line);
+ log(line);
+ }
+ } catch (IOException e) {
+ error("Error reading from logging stream:", e);
+ }
+ }
- private void log(String message) {
- if (logger != null) {
- logger.log(loggingLevel, message);
- }
- }
-
- private void error(String message, Exception e) {
- if (logger != null) {
- logger.error(message, e);
- }
- }
-
- private void info(String message, Exception e) {
- if (logger != null) {
- logger.info(message, e);
- }
- }
-
- public void awaitCompletion(final long waitMs) {
- try {
- join(waitMs);
- } catch (InterruptedException e) {
- info("I/O thread interrupted.", e);
- }
- }
+ private void log(String message) {
+ if (logger != null) {
+ logger.log(loggingLevel, message);
+ }
+ }
- public String getRecentLog() {
- return Joiner.on(System.getProperty("line.separator")).join(buffer);
- }
+ private void error(String message, Exception e) {
+ if (logger != null) {
+ logger.error(message, e);
+ }
+ }
+
+ private void info(String message, Exception e) {
+ if (logger != null) {
+ logger.info(message, e);
+ }
+ }
+
+ public void awaitCompletion(final long waitMs) {
+ try {
+ join(waitMs);
+ } catch (InterruptedException e) {
+ info("I/O thread interrupted.", e);
+ }
+ }
+
+ public String getRecentLog() {
+ return Joiner.on(System.getProperty("line.separator")).join(buffer);
+ }
}
src/main/java/azkaban/utils/Md5Hasher.java 66(+33 -33)
diff --git a/src/main/java/azkaban/utils/Md5Hasher.java b/src/main/java/azkaban/utils/Md5Hasher.java
index f7e55e7..5baaf7b 100644
--- a/src/main/java/azkaban/utils/Md5Hasher.java
+++ b/src/main/java/azkaban/utils/Md5Hasher.java
@@ -28,37 +28,37 @@ import java.security.NoSuchAlgorithmException;
* Helper class that will find the md5 hash for files.
*/
public class Md5Hasher {
-
- private static final int BYTE_BUFFER_SIZE = 1024;
-
- public static MessageDigest getMd5Digest() {
- MessageDigest digest = null;
- try {
- digest = MessageDigest.getInstance("MD5");
- } catch (NoSuchAlgorithmException e) {
- // Should never get here.
- }
-
- return digest;
- }
-
- public static byte[] md5Hash(File file) throws IOException {
- MessageDigest digest = getMd5Digest();
-
- FileInputStream fStream = new FileInputStream(file);
- BufferedInputStream bStream = new BufferedInputStream(fStream);
- DigestInputStream blobStream = new DigestInputStream(bStream, digest);
-
- byte[] buffer = new byte[BYTE_BUFFER_SIZE];
-
- int num = 0;
- do {
- num = blobStream.read(buffer);
- } while (num > 0);
-
- bStream.close();
-
- return digest.digest();
- }
-
+
+ private static final int BYTE_BUFFER_SIZE = 1024;
+
+ public static MessageDigest getMd5Digest() {
+ MessageDigest digest = null;
+ try {
+ digest = MessageDigest.getInstance("MD5");
+ } catch (NoSuchAlgorithmException e) {
+ // Should never get here.
+ }
+
+ return digest;
+ }
+
+ public static byte[] md5Hash(File file) throws IOException {
+ MessageDigest digest = getMd5Digest();
+
+ FileInputStream fStream = new FileInputStream(file);
+ BufferedInputStream bStream = new BufferedInputStream(fStream);
+ DigestInputStream blobStream = new DigestInputStream(bStream, digest);
+
+ byte[] buffer = new byte[BYTE_BUFFER_SIZE];
+
+ int num = 0;
+ do {
+ num = blobStream.read(buffer);
+ } while (num > 0);
+
+ bStream.close();
+
+ return digest.digest();
+ }
+
}
src/main/java/azkaban/utils/Pair.java 94(+47 -47)
diff --git a/src/main/java/azkaban/utils/Pair.java b/src/main/java/azkaban/utils/Pair.java
index b3564a7..5c28784 100644
--- a/src/main/java/azkaban/utils/Pair.java
+++ b/src/main/java/azkaban/utils/Pair.java
@@ -23,56 +23,56 @@ package azkaban.utils;
* @param <S>
*/
public class Pair<F, S> {
- private final F first;
- private final S second;
+ private final F first;
+ private final S second;
- public Pair(F first, S second) {
- this.first = first;
- this.second = second;
- }
+ public Pair(F first, S second) {
+ this.first = first;
+ this.second = second;
+ }
- public F getFirst() {
- return first;
- }
+ public F getFirst() {
+ return first;
+ }
- public S getSecond() {
- return second;
- }
+ public S getSecond() {
+ return second;
+ }
- @Override
- public int hashCode() {
- final int prime = 31;
- int result = 1;
- result = prime * result + ((first == null) ? 0 : first.hashCode());
- result = prime * result + ((second == null) ? 0 : second.hashCode());
- return result;
- }
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + ((first == null) ? 0 : first.hashCode());
+ result = prime * result + ((second == null) ? 0 : second.hashCode());
+ return result;
+ }
- @Override
- public boolean equals(Object obj) {
- if (this == obj)
- return true;
- if (obj == null)
- return false;
- if (getClass() != obj.getClass())
- return false;
- @SuppressWarnings("rawtypes")
- Pair other = (Pair) obj;
- if (first == null) {
- if (other.first != null)
- return false;
- } else if (!first.equals(other.first))
- return false;
- if (second == null) {
- if (other.second != null)
- return false;
- } else if (!second.equals(other.second))
- return false;
- return true;
- }
-
- @Override
- public String toString() {
- return "{" + first.toString() + "," + second.toString() + "}";
- }
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj)
+ return true;
+ if (obj == null)
+ return false;
+ if (getClass() != obj.getClass())
+ return false;
+ @SuppressWarnings("rawtypes")
+ Pair other = (Pair) obj;
+ if (first == null) {
+ if (other.first != null)
+ return false;
+ } else if (!first.equals(other.first))
+ return false;
+ if (second == null) {
+ if (other.second != null)
+ return false;
+ } else if (!second.equals(other.second))
+ return false;
+ return true;
+ }
+
+ @Override
+ public String toString() {
+ return "{" + first.toString() + "," + second.toString() + "}";
+ }
}
src/main/java/azkaban/utils/Props.java 1865(+924 -941)
diff --git a/src/main/java/azkaban/utils/Props.java b/src/main/java/azkaban/utils/Props.java
index b3dacf8..beee964 100644
--- a/src/main/java/azkaban/utils/Props.java
+++ b/src/main/java/azkaban/utils/Props.java
@@ -42,950 +42,933 @@ import org.apache.log4j.Logger;
* functions and Exception throwing. This class is not threadsafe.
*/
public class Props {
- private final Map<String, String> _current;
- private Props _parent;
- private String source = null;
-
- /**
- * Constructor for empty props with empty parent.
- */
- public Props() {
- this(null);
- }
-
- /**
- * Constructor for empty Props with parent override.
- *
- * @param parent
- */
- public Props(Props parent) {
- this._current = new HashMap<String, String>();
- this._parent = parent;
- }
-
- /**
- * Load props from a file.
- *
- * @param parent
- * @param file
- * @throws IOException
- */
- public Props(Props parent, String filepath) throws IOException {
- this(parent, new File(filepath));
- }
-
- /**
- * Load props from a file.
- *
- * @param parent
- * @param file
- * @throws IOException
- */
- public Props(Props parent, File file) throws IOException {
- this(parent);
- setSource(file.getPath());
-
- InputStream input = new BufferedInputStream(new FileInputStream(file));
- try {
- loadFrom(input);
- } catch (IOException e) {
- throw e;
- } finally {
- input.close();
- }
- }
-
- /**
- * Create props from property input streams
- *
- * @param parent
- * @param inputStreams
- * @throws IOException
- */
- public Props(Props parent, InputStream inputStream) throws IOException {
- this(parent);
- loadFrom(inputStream);
- }
-
- /**
- *
- * @param inputStream
- * @throws IOException
- */
- private void loadFrom(InputStream inputStream) throws IOException {
- Properties properties = new Properties();
- properties.load(inputStream);
- this.put(properties);
- }
-
- /**
- * Create properties from maps of properties
- *
- * @param parent
- * @param props
- */
- public Props(Props parent, Map<String, String>... props) {
- this(parent);
- for (int i = props.length - 1; i >= 0; i--) {
- this.putAll(props[i]);
- }
- }
-
- /**
- * Create properties from Properties objects
- *
- * @param parent
- * @param properties
- */
- public Props(Props parent, Properties... properties) {
- this(parent);
- for (int i = properties.length - 1; i >= 0; i--) {
- this.put(properties[i]);
- }
- }
-
- /**
- * Create a Props object with the contents set to that of props.
- *
- * @param parent
- * @param props
- */
- public Props(Props parent, Props props) {
- this(parent);
- if (props != null) {
- putAll(props);
- }
- }
-
- public void setEarliestAncestor(Props parent) {
- Props props = getEarliestAncestor();
- props.setParent(parent);
- }
-
- public Props getEarliestAncestor() {
- if (_parent == null) {
- return this;
- }
-
- return _parent.getEarliestAncestor();
- }
-
- /**
- * Create a Props with a null parent from a list of key value pairing. i.e.
- * [key1, value1, key2, value2 ...]
- *
- * @param args
- * @return
- */
- public static Props of(String... args) {
- return of((Props) null, args);
- }
-
- /**
- * Create a Props from a list of key value pairing. i.e. [key1, value1,
- * key2, value2 ...]
- *
- * @param args
- * @return
- */
- @SuppressWarnings("unchecked")
- public static Props of(Props parent, String... args) {
- if (args.length % 2 != 0) {
- throw new IllegalArgumentException(
- "Must have an equal number of keys and values.");
- }
-
- Map<String, String> vals = new HashMap<String, String>(args.length / 2);
-
- for (int i = 0; i < args.length; i += 2) {
- vals.put(args[i], args[i + 1]);
- }
- return new Props(parent, vals);
- }
-
- /**
- * Clear the current Props, but leaves the parent untouched.
- */
- public void clearLocal() {
- _current.clear();
- }
-
- /**
- * Check key in current Props then search in parent
- *
- * @param k
- * @return
- */
- public boolean containsKey(Object k) {
- return _current.containsKey(k)
- || (_parent != null && _parent.containsKey(k));
- }
-
- /**
- * Check value in current Props then search in parent
- *
- * @param value
- * @return
- */
- public boolean containsValue(Object value) {
- return _current.containsValue(value)
- || (_parent != null && _parent.containsValue(value));
- }
-
- /**
- * Return value if available in current Props otherwise return from parent
- *
- * @param key
- * @return
- */
- public String get(Object key) {
- if (_current.containsKey(key)) {
- return _current.get(key);
- } else if (_parent != null) {
- return _parent.get(key);
- } else {
- return null;
- }
- }
-
- /**
- * Get the key set from the current Props
- *
- * @return
- */
- public Set<String> localKeySet() {
- return _current.keySet();
- }
-
- /**
- * Get parent Props
- *
- * @return
- */
- public Props getParent() {
- return _parent;
- }
-
- /**
- * Put the given string value for the string key. This method performs any
- * variable substitution in the value replacing any occurance of ${name}
- * with the value of get("name").
- *
- * @param key
- * The key to put the value to
- * @param value
- * The value to do substitution on and store
- *
- * @throws IllegalArgumentException
- * If the variable given for substitution is not a valid key in
- * this Props.
- */
- public String put(String key, String value) {
- return _current.put(key, value);
- }
-
- /**
- * Put the given Properties into the Props. This method performs any
- * variable substitution in the value replacing any occurrence of ${name}
- * with the value of get("name"). get() is called first on the Props and
- * next on the Properties object.
- *
- * @param properties
- * The properties to put
- *
- * @throws IllegalArgumentException
- * If the variable given for substitution is not a valid key in
- * this Props.
- */
- public void put(Properties properties) {
- for (String propName : properties.stringPropertyNames()) {
- _current.put(propName, properties.getProperty(propName));
- }
- }
-
- /**
- * Put integer
- *
- * @param key
- * @param value
- * @return
- */
- public String put(String key, Integer value) {
- return _current.put(key, value.toString());
- }
-
- /**
- * Put Long. Stores as String.
- *
- * @param key
- * @param value
- * @return
- */
- public String put(String key, Long value) {
- return _current.put(key, value.toString());
- }
-
- /**
- * Put Double. Stores as String.
- *
- * @param key
- * @param value
- * @return
- */
- public String put(String key, Double value) {
- return _current.put(key, value.toString());
- }
-
- /**
- * Put everything in the map into the props.
- *
- * @param m
- */
- public void putAll(Map<? extends String, ? extends String> m) {
- if (m == null) {
- return;
- }
-
- for (Map.Entry<? extends String, ? extends String> entry : m.entrySet()) {
- this.put(entry.getKey(), entry.getValue());
- }
- }
-
- /**
- * Put all properties in the props into the current props. Will handle null
- * p.
- *
- * @param p
- */
- public void putAll(Props p) {
- if (p == null) {
- return;
- }
-
- for (String key : p.getKeySet()) {
- this.put(key, p.get(key));
- }
- }
-
- /**
- * Puts only the local props from p into the current properties
- *
- * @param p
- */
- public void putLocal(Props p) {
- for (String key : p.localKeySet()) {
- this.put(key, p.get(key));
- }
- }
-
- /**
- * Remove only the local value of key s, and not the parents.
- *
- * @param s
- * @return
- */
- public String removeLocal(Object s) {
- return _current.remove(s);
- }
-
- /**
- * The number of unique keys defined by this Props and all parent Props
- */
- public int size() {
- return getKeySet().size();
- }
-
- /**
- * The number of unique keys defined by this Props (keys defined only in
- * parent Props are not counted)
- */
- public int localSize() {
- return _current.size();
- }
-
- /**
- * Attempts to return the Class that corresponds to the Props value. If the
- * class doesn't exit, an IllegalArgumentException will be thrown.
- *
- * @param key
- * @return
- */
- public Class<?> getClass(String key) {
- try {
- if (containsKey(key)) {
- return Class.forName(get(key));
- } else {
- throw new UndefinedPropertyException(
- "Missing required property '" + key + "'");
- }
- } catch (ClassNotFoundException e) {
- throw new IllegalArgumentException(e);
- }
- }
-
- public Class<?> getClass(String key, boolean initialize, ClassLoader cl) {
- try {
- if (containsKey(key)) {
- return Class.forName(get(key), initialize, cl);
- } else {
- throw new UndefinedPropertyException(
- "Missing required property '" + key + "'");
- }
- } catch (ClassNotFoundException e) {
- throw new IllegalArgumentException(e);
- }
- }
-
- /**
- * Gets the class from the Props. If it doesn't exist, it will return the
- * defaultClass
- *
- * @param key
- * @param c
- * @return
- */
- public Class<?> getClass(String key, Class<?> defaultClass) {
- if (containsKey(key)) {
- return getClass(key);
- } else {
- return defaultClass;
- }
- }
-
- /**
- * Gets the string from the Props. If it doesn't exist, it will return the
- * defaultValue
- *
- * @param key
- * @param defaultValue
- * @return
- */
- public String getString(String key, String defaultValue) {
- if (containsKey(key)) {
- return get(key);
- } else {
- return defaultValue;
- }
- }
-
- /**
- * Gets the string from the Props. If it doesn't exist, throw and
- * UndefinedPropertiesException
- *
- * @param key
- * @param defaultValue
- * @return
- */
- public String getString(String key) {
- if (containsKey(key)) {
- return get(key);
- } else {
- throw new UndefinedPropertyException("Missing required property '"
- + key + "'");
- }
- }
-
- /**
- * Returns a list of strings with the comma as the separator of the value
- *
- * @param key
- * @return
- */
- public List<String> getStringList(String key) {
- return getStringList(key, "\\s*,\\s*");
- }
-
- /**
- * Returns a list of strings with the sep as the separator of the value
- *
- * @param key
- * @param sep
- * @return
- */
- public List<String> getStringList(String key, String sep) {
- String val = get(key);
- if (val == null || val.trim().length() == 0) {
- return Collections.emptyList();
- }
-
- if (containsKey(key)) {
- return Arrays.asList(val.split(sep));
- } else {
- throw new UndefinedPropertyException("Missing required property '"
- + key + "'");
- }
- }
-
- /**
- * Returns a list of strings with the comma as the separator of the value.
- * If the value is null, it'll return the defaultValue.
- *
- * @param key
- * @return
- */
- public List<String> getStringList(String key, List<String> defaultValue) {
- if (containsKey(key)) {
- return getStringList(key);
- } else {
- return defaultValue;
- }
- }
-
- /**
- * Returns a list of strings with the sep as the separator of the value. If
- * the value is null, it'll return the defaultValue.
- *
- * @param key
- * @return
- */
- public List<String> getStringList(String key, List<String> defaultValue,
- String sep) {
- if (containsKey(key)) {
- return getStringList(key, sep);
- } else {
- return defaultValue;
- }
- }
-
- /**
- * Returns true if the value equals "true". If the value is null, then the
- * default value is returned.
- *
- * @param key
- * @param defaultValue
- * @return
- */
- public boolean getBoolean(String key, boolean defaultValue) {
- if (containsKey(key)) {
- return "true".equalsIgnoreCase(get(key).trim());
- } else {
- return defaultValue;
- }
- }
-
- /**
- * Returns true if the value equals "true". If the value is null, then an
- * UndefinedPropertyException is thrown.
- *
- * @param key
- * @return
- */
- public boolean getBoolean(String key) {
- if (containsKey(key))
- return "true".equalsIgnoreCase(get(key));
- else
- throw new UndefinedPropertyException("Missing required property '"
- + key + "'");
- }
-
- /**
- * Returns the long representation of the value. If the value is null, then
- * the default value is returned. If the value isn't a long, then a parse
- * exception will be thrown.
- *
- * @param key
- * @param defaultValue
- * @return
- */
- public long getLong(String name, long defaultValue) {
- if (containsKey(name)) {
- return Long.parseLong(get(name));
- } else {
- return defaultValue;
- }
- }
-
- /**
- * Returns the long representation of the value. If the value is null, then
- * a UndefinedPropertyException will be thrown. If the value isn't a long,
- * then a parse exception will be thrown.
- *
- * @param key
- * @return
- */
- public long getLong(String name) {
- if (containsKey(name)) {
- return Long.parseLong(get(name));
- } else {
- throw new UndefinedPropertyException("Missing required property '"
- + name + "'");
- }
- }
-
- /**
- * Returns the int representation of the value. If the value is null, then
- * the default value is returned. If the value isn't a int, then a parse
- * exception will be thrown.
- *
- * @param key
- * @param defaultValue
- * @return
- */
- public int getInt(String name, int defaultValue) {
- if (containsKey(name)) {
- return Integer.parseInt(get(name).trim());
- } else {
- return defaultValue;
- }
- }
-
- /**
- * Returns the int representation of the value. If the value is null, then a
- * UndefinedPropertyException will be thrown. If the value isn't a int, then
- * a parse exception will be thrown.
- *
- * @param key
- * @return
- */
- public int getInt(String name) {
- if (containsKey(name)) {
- return Integer.parseInt(get(name).trim());
- } else {
- throw new UndefinedPropertyException("Missing required property '"
- + name + "'");
- }
- }
-
- /**
- * Returns the double representation of the value. If the value is null,
- * then the default value is returned. If the value isn't a double, then a
- * parse exception will be thrown.
- *
- * @param key
- * @param defaultValue
- * @return
- */
- public double getDouble(String name, double defaultValue) {
- if (containsKey(name)) {
- return Double.parseDouble(get(name).trim());
- } else {
- return defaultValue;
- }
- }
-
- /**
- * Returns the double representation of the value. If the value is null,
- * then a UndefinedPropertyException will be thrown. If the value isn't a
- * double, then a parse exception will be thrown.
- *
- * @param key
- * @return
- */
- public double getDouble(String name) {
- if (containsKey(name)) {
- return Double.parseDouble(get(name).trim());
- } else {
- throw new UndefinedPropertyException("Missing required property '"
- + name + "'");
- }
- }
-
- /**
- * Returns the uri representation of the value. If the value is null, then
- * the default value is returned. If the value isn't a uri, then a
- * IllegalArgumentException will be thrown.
- *
- * @param key
- * @param defaultValue
- * @return
- */
- public URI getUri(String name) {
- if (containsKey(name)) {
- try {
- return new URI(get(name));
- } catch (URISyntaxException e) {
- throw new IllegalArgumentException(e.getMessage());
- }
- } else {
- throw new UndefinedPropertyException("Missing required property '"
- + name + "'");
- }
- }
-
- /**
- * Returns the double representation of the value. If the value is null,
- * then the default value is returned. If the value isn't a uri, then a
- * IllegalArgumentException will be thrown.
- *
- * @param key
- * @param defaultValue
- * @return
- */
- public URI getUri(String name, URI defaultValue) {
- if (containsKey(name)) {
- return getUri(name);
- } else {
- return defaultValue;
- }
- }
-
- public URI getUri(String name, String defaultValue) {
- try {
- return getUri(name, new URI(defaultValue));
- } catch (URISyntaxException e) {
- throw new IllegalArgumentException(e.getMessage());
- }
- }
-
- /**
- * Store only those properties defined at this local level
- *
- * @param file
- * The file to write to
- * @throws IOException
- * If the file can't be found or there is an io error
- */
- public void storeLocal(File file) throws IOException {
- BufferedOutputStream out = new BufferedOutputStream(
- new FileOutputStream(file));
- try {
- storeLocal(out);
- } finally {
- out.close();
- }
- }
-
- /**
- * Returns a copy of only the local values of this props
- *
- * @return
- */
- @SuppressWarnings("unchecked")
- public Props local() {
- return new Props(null, _current);
- }
-
- /**
- * Store only those properties defined at this local level
- *
- * @param out
- * The output stream to write to
- * @throws IOException
- * If the file can't be found or there is an io error
- */
- public void storeLocal(OutputStream out) throws IOException {
- Properties p = new Properties();
- for (String key : _current.keySet()) {
- p.setProperty(key, get(key));
- }
- p.store(out, null);
- }
-
- /**
- * Returns a java.util.Properties file populated with the stuff in here.
- *
- * @return
- */
- public Properties toProperties() {
- Properties p = new Properties();
- for (String key : _current.keySet()) {
- p.setProperty(key, get(key));
- }
-
- return p;
- }
-
- /**
- * Store all properties, those local and also those in parent props
- *
- * @param file
- * The file to store to
- * @throws IOException
- * If there is an error writing
- */
- public void storeFlattened(File file) throws IOException {
- BufferedOutputStream out = new BufferedOutputStream(
- new FileOutputStream(file));
- try {
- storeFlattened(out);
- } finally {
- out.close();
- }
- }
-
- /**
- * Store all properties, those local and also those in parent props
- *
- * @param out
- * The stream to write to
- * @throws IOException
- * If there is an error writing
- */
- public void storeFlattened(OutputStream out) throws IOException {
- Properties p = new Properties();
- for (Props curr = this; curr != null; curr = curr.getParent()) {
- for (String key : curr.localKeySet()) {
- if (!p.containsKey(key)) {
- p.setProperty(key, get(key));
- }
- }
- }
-
- p.store(out, null);
- }
-
- /**
- * Get a map of all properties by string prefix
- *
- * @param prefix
- * The string prefix
- */
- public Map<String, String> getMapByPrefix(String prefix) {
- Map<String, String> values = new HashMap<String, String>();
-
- if (_parent != null) {
- for (Map.Entry<String, String> entry : _parent.getMapByPrefix(
- prefix).entrySet()) {
- values.put(entry.getKey(), entry.getValue());
- }
- }
-
- for (String key : this.localKeySet()) {
- if (key.startsWith(prefix)) {
- values.put(key.substring(prefix.length()), get(key));
- }
- }
- return values;
- }
-
- /**
- * Returns a set of all keys, including the parents
- *
- * @return
- */
- public Set<String> getKeySet() {
- HashSet<String> keySet = new HashSet<String>();
-
- keySet.addAll(localKeySet());
-
- if (_parent != null) {
- keySet.addAll(_parent.getKeySet());
- }
-
- return keySet;
- }
-
- /**
- * Logs the property in the given logger
- *
- * @param logger
- * @param comment
- */
- public void logProperties(Logger logger, String comment) {
- logger.info(comment);
-
- for (String key : getKeySet()) {
- logger.info(" key=" + key + " value=" + get(key));
- }
- }
-
- /**
- * Clones the Props p object and all of its parents.
- *
- * @param p
- * @return
- */
- public static Props clone(Props p) {
- return copyNext(p);
- }
-
- /**
- *
- * @param source
- * @return
- */
- private static Props copyNext(Props source) {
- Props priorNodeCopy = null;
- if (source.getParent() != null) {
- priorNodeCopy = copyNext(source.getParent());
- }
- Props dest = new Props(priorNodeCopy);
- for (String key : source.localKeySet()) {
- dest.put(key, source.get(key));
- }
-
- return dest;
- }
-
- /**
+ private final Map<String, String> _current;
+ private Props _parent;
+ private String source = null;
+
+ /**
+ * Constructor for empty props with empty parent.
+ */
+ public Props() {
+ this(null);
+ }
+
+ /**
+ * Constructor for empty Props with parent override.
+ *
+ * @param parent
+ */
+ public Props(Props parent) {
+ this._current = new HashMap<String, String>();
+ this._parent = parent;
+ }
+
+ /**
+ * Load props from a file.
+ *
+ * @param parent
+ * @param file
+ * @throws IOException
+ */
+ public Props(Props parent, String filepath) throws IOException {
+ this(parent, new File(filepath));
+ }
+
+ /**
+ * Load props from a file.
+ *
+ * @param parent
+ * @param file
+ * @throws IOException
+ */
+ public Props(Props parent, File file) throws IOException {
+ this(parent);
+ setSource(file.getPath());
+
+ InputStream input = new BufferedInputStream(new FileInputStream(file));
+ try {
+ loadFrom(input);
+ } catch (IOException e) {
+ throw e;
+ } finally {
+ input.close();
+ }
+ }
+
+ /**
+ * Create props from property input streams
+ *
+ * @param parent
+ * @param inputStreams
+ * @throws IOException
+ */
+ public Props(Props parent, InputStream inputStream) throws IOException {
+ this(parent);
+ loadFrom(inputStream);
+ }
+
+ /**
+ *
+ * @param inputStream
+ * @throws IOException
+ */
+ private void loadFrom(InputStream inputStream) throws IOException {
+ Properties properties = new Properties();
+ properties.load(inputStream);
+ this.put(properties);
+ }
+
+ /**
+ * Create properties from maps of properties
+ *
+ * @param parent
+ * @param props
+ */
+ public Props(Props parent, Map<String, String>... props) {
+ this(parent);
+ for (int i = props.length - 1; i >= 0; i--) {
+ this.putAll(props[i]);
+ }
+ }
+
+ /**
+ * Create properties from Properties objects
+ *
+ * @param parent
+ * @param properties
+ */
+ public Props(Props parent, Properties... properties) {
+ this(parent);
+ for (int i = properties.length - 1; i >= 0; i--) {
+ this.put(properties[i]);
+ }
+ }
+
+ /**
+ * Create a Props object with the contents set to that of props.
+ *
+ * @param parent
+ * @param props
+ */
+ public Props(Props parent, Props props) {
+ this(parent);
+ if (props != null) {
+ putAll(props);
+ }
+ }
+
+ public void setEarliestAncestor(Props parent) {
+ Props props = getEarliestAncestor();
+ props.setParent(parent);
+ }
+
+ public Props getEarliestAncestor() {
+ if (_parent == null) {
+ return this;
+ }
+
+ return _parent.getEarliestAncestor();
+ }
+
+ /**
+ * Create a Props with a null parent from a list of key value pairing. i.e.
+ * [key1, value1, key2, value2 ...]
+ *
+ * @param args
+ * @return
+ */
+ public static Props of(String... args) {
+ return of((Props) null, args);
+ }
+
+ /**
+ * Create a Props from a list of key value pairing. i.e. [key1, value1, key2,
+ * value2 ...]
+ *
+ * @param args
+ * @return
+ */
+ @SuppressWarnings("unchecked")
+ public static Props of(Props parent, String... args) {
+ if (args.length % 2 != 0) {
+ throw new IllegalArgumentException(
+ "Must have an equal number of keys and values.");
+ }
+
+ Map<String, String> vals = new HashMap<String, String>(args.length / 2);
+
+ for (int i = 0; i < args.length; i += 2) {
+ vals.put(args[i], args[i + 1]);
+ }
+ return new Props(parent, vals);
+ }
+
+ /**
+ * Clear the current Props, but leaves the parent untouched.
+ */
+ public void clearLocal() {
+ _current.clear();
+ }
+
+ /**
+ * Check key in current Props then search in parent
+ *
+ * @param k
+ * @return
+ */
+ public boolean containsKey(Object k) {
+ return _current.containsKey(k)
+ || (_parent != null && _parent.containsKey(k));
+ }
+
+ /**
+ * Check value in current Props then search in parent
+ *
+ * @param value
+ * @return
+ */
+ public boolean containsValue(Object value) {
+ return _current.containsValue(value)
+ || (_parent != null && _parent.containsValue(value));
+ }
+
+ /**
+ * Return value if available in current Props otherwise return from parent
+ *
+ * @param key
+ * @return
+ */
+ public String get(Object key) {
+ if (_current.containsKey(key)) {
+ return _current.get(key);
+ } else if (_parent != null) {
+ return _parent.get(key);
+ } else {
+ return null;
+ }
+ }
+
+ /**
+ * Get the key set from the current Props
+ *
+ * @return
+ */
+ public Set<String> localKeySet() {
+ return _current.keySet();
+ }
+
+ /**
+ * Get parent Props
+ *
+ * @return
+ */
+ public Props getParent() {
+ return _parent;
+ }
+
+ /**
+ * Put the given string value for the string key. This method performs any
+ * variable substitution in the value replacing any occurance of ${name} with
+ * the value of get("name").
+ *
+ * @param key The key to put the value to
+ * @param value The value to do substitution on and store
+ *
+ * @throws IllegalArgumentException If the variable given for substitution is
+ * not a valid key in this Props.
+ */
+ public String put(String key, String value) {
+ return _current.put(key, value);
+ }
+
+ /**
+ * Put the given Properties into the Props. This method performs any variable
+ * substitution in the value replacing any occurrence of ${name} with the
+ * value of get("name"). get() is called first on the Props and next on the
+ * Properties object.
+ *
+ * @param properties The properties to put
+ *
+ * @throws IllegalArgumentException If the variable given for substitution is
+ * not a valid key in this Props.
+ */
+ public void put(Properties properties) {
+ for (String propName : properties.stringPropertyNames()) {
+ _current.put(propName, properties.getProperty(propName));
+ }
+ }
+
+ /**
+ * Put integer
+ *
+ * @param key
+ * @param value
+ * @return
+ */
+ public String put(String key, Integer value) {
+ return _current.put(key, value.toString());
+ }
+
+ /**
+ * Put Long. Stores as String.
+ *
+ * @param key
+ * @param value
+ * @return
+ */
+ public String put(String key, Long value) {
+ return _current.put(key, value.toString());
+ }
+
+ /**
+ * Put Double. Stores as String.
+ *
+ * @param key
+ * @param value
+ * @return
+ */
+ public String put(String key, Double value) {
+ return _current.put(key, value.toString());
+ }
+
+ /**
+ * Put everything in the map into the props.
+ *
+ * @param m
+ */
+ public void putAll(Map<? extends String, ? extends String> m) {
+ if (m == null) {
+ return;
+ }
+
+ for (Map.Entry<? extends String, ? extends String> entry : m.entrySet()) {
+ this.put(entry.getKey(), entry.getValue());
+ }
+ }
+
+ /**
+ * Put all properties in the props into the current props. Will handle null p.
+ *
+ * @param p
+ */
+ public void putAll(Props p) {
+ if (p == null) {
+ return;
+ }
+
+ for (String key : p.getKeySet()) {
+ this.put(key, p.get(key));
+ }
+ }
+
+ /**
+ * Puts only the local props from p into the current properties
+ *
+ * @param p
+ */
+ public void putLocal(Props p) {
+ for (String key : p.localKeySet()) {
+ this.put(key, p.get(key));
+ }
+ }
+
+ /**
+ * Remove only the local value of key s, and not the parents.
+ *
+ * @param s
+ * @return
+ */
+ public String removeLocal(Object s) {
+ return _current.remove(s);
+ }
+
+ /**
+ * The number of unique keys defined by this Props and all parent Props
+ */
+ public int size() {
+ return getKeySet().size();
+ }
+
+ /**
+ * The number of unique keys defined by this Props (keys defined only in
+ * parent Props are not counted)
+ */
+ public int localSize() {
+ return _current.size();
+ }
+
+ /**
+ * Attempts to return the Class that corresponds to the Props value. If the
+ * class doesn't exit, an IllegalArgumentException will be thrown.
+ *
+ * @param key
+ * @return
+ */
+ public Class<?> getClass(String key) {
+ try {
+ if (containsKey(key)) {
+ return Class.forName(get(key));
+ } else {
+ throw new UndefinedPropertyException("Missing required property '"
+ + key + "'");
+ }
+ } catch (ClassNotFoundException e) {
+ throw new IllegalArgumentException(e);
+ }
+ }
+
+ public Class<?> getClass(String key, boolean initialize, ClassLoader cl) {
+ try {
+ if (containsKey(key)) {
+ return Class.forName(get(key), initialize, cl);
+ } else {
+ throw new UndefinedPropertyException("Missing required property '"
+ + key + "'");
+ }
+ } catch (ClassNotFoundException e) {
+ throw new IllegalArgumentException(e);
+ }
+ }
+
+ /**
+ * Gets the class from the Props. If it doesn't exist, it will return the
+ * defaultClass
+ *
+ * @param key
+ * @param c
+ * @return
+ */
+ public Class<?> getClass(String key, Class<?> defaultClass) {
+ if (containsKey(key)) {
+ return getClass(key);
+ } else {
+ return defaultClass;
+ }
+ }
+
+ /**
+ * Gets the string from the Props. If it doesn't exist, it will return the
+ * defaultValue
+ *
+ * @param key
+ * @param defaultValue
+ * @return
+ */
+ public String getString(String key, String defaultValue) {
+ if (containsKey(key)) {
+ return get(key);
+ } else {
+ return defaultValue;
+ }
+ }
+
+ /**
+ * Gets the string from the Props. If it doesn't exist, throw and
+ * UndefinedPropertiesException
+ *
+ * @param key
+ * @param defaultValue
+ * @return
+ */
+ public String getString(String key) {
+ if (containsKey(key)) {
+ return get(key);
+ } else {
+ throw new UndefinedPropertyException("Missing required property '" + key
+ + "'");
+ }
+ }
+
+ /**
+ * Returns a list of strings with the comma as the separator of the value
+ *
+ * @param key
+ * @return
+ */
+ public List<String> getStringList(String key) {
+ return getStringList(key, "\\s*,\\s*");
+ }
+
+ /**
+ * Returns a list of strings with the sep as the separator of the value
+ *
+ * @param key
+ * @param sep
+ * @return
+ */
+ public List<String> getStringList(String key, String sep) {
+ String val = get(key);
+ if (val == null || val.trim().length() == 0) {
+ return Collections.emptyList();
+ }
+
+ if (containsKey(key)) {
+ return Arrays.asList(val.split(sep));
+ } else {
+ throw new UndefinedPropertyException("Missing required property '" + key
+ + "'");
+ }
+ }
+
+ /**
+ * Returns a list of strings with the comma as the separator of the value. If
+ * the value is null, it'll return the defaultValue.
+ *
+ * @param key
+ * @return
+ */
+ public List<String> getStringList(String key, List<String> defaultValue) {
+ if (containsKey(key)) {
+ return getStringList(key);
+ } else {
+ return defaultValue;
+ }
+ }
+
+ /**
+ * Returns a list of strings with the sep as the separator of the value. If
+ * the value is null, it'll return the defaultValue.
+ *
+ * @param key
+ * @return
+ */
+ public List<String> getStringList(String key, List<String> defaultValue,
+ String sep) {
+ if (containsKey(key)) {
+ return getStringList(key, sep);
+ } else {
+ return defaultValue;
+ }
+ }
+
+ /**
+ * Returns true if the value equals "true". If the value is null, then the
+ * default value is returned.
+ *
+ * @param key
+ * @param defaultValue
+ * @return
+ */
+ public boolean getBoolean(String key, boolean defaultValue) {
+ if (containsKey(key)) {
+ return "true".equalsIgnoreCase(get(key).trim());
+ } else {
+ return defaultValue;
+ }
+ }
+
+ /**
+ * Returns true if the value equals "true". If the value is null, then an
+ * UndefinedPropertyException is thrown.
+ *
+ * @param key
+ * @return
+ */
+ public boolean getBoolean(String key) {
+ if (containsKey(key))
+ return "true".equalsIgnoreCase(get(key));
+ else
+ throw new UndefinedPropertyException("Missing required property '" + key
+ + "'");
+ }
+
+ /**
+ * Returns the long representation of the value. If the value is null, then
+ * the default value is returned. If the value isn't a long, then a parse
+ * exception will be thrown.
+ *
+ * @param key
+ * @param defaultValue
+ * @return
+ */
+ public long getLong(String name, long defaultValue) {
+ if (containsKey(name)) {
+ return Long.parseLong(get(name));
+ } else {
+ return defaultValue;
+ }
+ }
+
+ /**
+ * Returns the long representation of the value. If the value is null, then a
+ * UndefinedPropertyException will be thrown. If the value isn't a long, then
+ * a parse exception will be thrown.
+ *
+ * @param key
+ * @return
+ */
+ public long getLong(String name) {
+ if (containsKey(name)) {
+ return Long.parseLong(get(name));
+ } else {
+ throw new UndefinedPropertyException("Missing required property '" + name
+ + "'");
+ }
+ }
+
+ /**
+ * Returns the int representation of the value. If the value is null, then the
+ * default value is returned. If the value isn't a int, then a parse exception
+ * will be thrown.
+ *
+ * @param key
+ * @param defaultValue
+ * @return
+ */
+ public int getInt(String name, int defaultValue) {
+ if (containsKey(name)) {
+ return Integer.parseInt(get(name).trim());
+ } else {
+ return defaultValue;
+ }
+ }
+
+ /**
+ * Returns the int representation of the value. If the value is null, then a
+ * UndefinedPropertyException will be thrown. If the value isn't a int, then a
+ * parse exception will be thrown.
+ *
+ * @param key
+ * @return
+ */
+ public int getInt(String name) {
+ if (containsKey(name)) {
+ return Integer.parseInt(get(name).trim());
+ } else {
+ throw new UndefinedPropertyException("Missing required property '" + name
+ + "'");
+ }
+ }
+
+ /**
+ * Returns the double representation of the value. If the value is null, then
+ * the default value is returned. If the value isn't a double, then a parse
+ * exception will be thrown.
+ *
+ * @param key
+ * @param defaultValue
+ * @return
+ */
+ public double getDouble(String name, double defaultValue) {
+ if (containsKey(name)) {
+ return Double.parseDouble(get(name).trim());
+ } else {
+ return defaultValue;
+ }
+ }
+
+ /**
+ * Returns the double representation of the value. If the value is null, then
+ * a UndefinedPropertyException will be thrown. If the value isn't a double,
+ * then a parse exception will be thrown.
+ *
+ * @param key
+ * @return
+ */
+ public double getDouble(String name) {
+ if (containsKey(name)) {
+ return Double.parseDouble(get(name).trim());
+ } else {
+ throw new UndefinedPropertyException("Missing required property '" + name
+ + "'");
+ }
+ }
+
+ /**
+ * Returns the uri representation of the value. If the value is null, then the
+ * default value is returned. If the value isn't a uri, then a
+ * IllegalArgumentException will be thrown.
+ *
+ * @param key
+ * @param defaultValue
+ * @return
+ */
+ public URI getUri(String name) {
+ if (containsKey(name)) {
+ try {
+ return new URI(get(name));
+ } catch (URISyntaxException e) {
+ throw new IllegalArgumentException(e.getMessage());
+ }
+ } else {
+ throw new UndefinedPropertyException("Missing required property '" + name
+ + "'");
+ }
+ }
+
+ /**
+ * Returns the double representation of the value. If the value is null, then
+ * the default value is returned. If the value isn't a uri, then a
+ * IllegalArgumentException will be thrown.
+ *
+ * @param key
+ * @param defaultValue
+ * @return
+ */
+ public URI getUri(String name, URI defaultValue) {
+ if (containsKey(name)) {
+ return getUri(name);
+ } else {
+ return defaultValue;
+ }
+ }
+
+ public URI getUri(String name, String defaultValue) {
+ try {
+ return getUri(name, new URI(defaultValue));
+ } catch (URISyntaxException e) {
+ throw new IllegalArgumentException(e.getMessage());
+ }
+ }
+
+ /**
+ * Store only those properties defined at this local level
+ *
+ * @param file The file to write to
+ * @throws IOException If the file can't be found or there is an io error
+ */
+ public void storeLocal(File file) throws IOException {
+ BufferedOutputStream out =
+ new BufferedOutputStream(new FileOutputStream(file));
+ try {
+ storeLocal(out);
+ } finally {
+ out.close();
+ }
+ }
+
+ /**
+ * Returns a copy of only the local values of this props
+ *
+ * @return
+ */
+ @SuppressWarnings("unchecked")
+ public Props local() {
+ return new Props(null, _current);
+ }
+
+ /**
+ * Store only those properties defined at this local level
+ *
+ * @param out The output stream to write to
+ * @throws IOException If the file can't be found or there is an io error
+ */
+ public void storeLocal(OutputStream out) throws IOException {
+ Properties p = new Properties();
+ for (String key : _current.keySet()) {
+ p.setProperty(key, get(key));
+ }
+ p.store(out, null);
+ }
+
+ /**
+ * Returns a java.util.Properties file populated with the stuff in here.
+ *
+ * @return
+ */
+ public Properties toProperties() {
+ Properties p = new Properties();
+ for (String key : _current.keySet()) {
+ p.setProperty(key, get(key));
+ }
+
+ return p;
+ }
+
+ /**
+ * Store all properties, those local and also those in parent props
+ *
+ * @param file The file to store to
+ * @throws IOException If there is an error writing
+ */
+ public void storeFlattened(File file) throws IOException {
+ BufferedOutputStream out =
+ new BufferedOutputStream(new FileOutputStream(file));
+ try {
+ storeFlattened(out);
+ } finally {
+ out.close();
+ }
+ }
+
+ /**
+ * Store all properties, those local and also those in parent props
+ *
+ * @param out The stream to write to
+ * @throws IOException If there is an error writing
+ */
+ public void storeFlattened(OutputStream out) throws IOException {
+ Properties p = new Properties();
+ for (Props curr = this; curr != null; curr = curr.getParent()) {
+ for (String key : curr.localKeySet()) {
+ if (!p.containsKey(key)) {
+ p.setProperty(key, get(key));
+ }
+ }
+ }
+
+ p.store(out, null);
+ }
+
+ /**
+ * Get a map of all properties by string prefix
+ *
+ * @param prefix The string prefix
+ */
+ public Map<String, String> getMapByPrefix(String prefix) {
+ Map<String, String> values = new HashMap<String, String>();
+
+ if (_parent != null) {
+ for (Map.Entry<String, String> entry : _parent.getMapByPrefix(prefix)
+ .entrySet()) {
+ values.put(entry.getKey(), entry.getValue());
+ }
+ }
+
+ for (String key : this.localKeySet()) {
+ if (key.startsWith(prefix)) {
+ values.put(key.substring(prefix.length()), get(key));
+ }
+ }
+ return values;
+ }
+
+ /**
+ * Returns a set of all keys, including the parents
+ *
+ * @return
+ */
+ public Set<String> getKeySet() {
+ HashSet<String> keySet = new HashSet<String>();
+
+ keySet.addAll(localKeySet());
+
+ if (_parent != null) {
+ keySet.addAll(_parent.getKeySet());
+ }
+
+ return keySet;
+ }
+
+ /**
+ * Logs the property in the given logger
+ *
+ * @param logger
+ * @param comment
+ */
+ public void logProperties(Logger logger, String comment) {
+ logger.info(comment);
+
+ for (String key : getKeySet()) {
+ logger.info(" key=" + key + " value=" + get(key));
+ }
+ }
+
+ /**
+ * Clones the Props p object and all of its parents.
+ *
+ * @param p
+ * @return
+ */
+ public static Props clone(Props p) {
+ return copyNext(p);
+ }
+
+ /**
+ *
+ * @param source
+ * @return
+ */
+ private static Props copyNext(Props source) {
+ Props priorNodeCopy = null;
+ if (source.getParent() != null) {
+ priorNodeCopy = copyNext(source.getParent());
+ }
+ Props dest = new Props(priorNodeCopy);
+ for (String key : source.localKeySet()) {
+ dest.put(key, source.get(key));
+ }
+
+ return dest;
+ }
+
+ /**
*/
- @Override
- public boolean equals(Object o) {
- if (o == this) {
- return true;
- } else if (o == null) {
- return false;
- } else if (o.getClass() != Props.class) {
- return false;
- }
-
- Props p = (Props) o;
- return _current.equals(p._current)
- && Utils.equals(this._parent, p._parent);
- }
-
- /**
- * Returns true if the properties are equivalent, regardless of the
- * hierarchy.
- *
- * @param p
- * @return
- */
- public boolean equalsProps(Props p) {
- if (p == null) {
- return false;
- }
-
- final Set<String> myKeySet = getKeySet();
- for (String s : myKeySet) {
- if (!get(s).equals(p.get(s))) {
- return false;
- }
- }
-
- return myKeySet.size() == p.getKeySet().size();
- }
-
- /**
+ @Override
+ public boolean equals(Object o) {
+ if (o == this) {
+ return true;
+ } else if (o == null) {
+ return false;
+ } else if (o.getClass() != Props.class) {
+ return false;
+ }
+
+ Props p = (Props) o;
+ return _current.equals(p._current) && Utils.equals(this._parent, p._parent);
+ }
+
+ /**
+ * Returns true if the properties are equivalent, regardless of the hierarchy.
+ *
+ * @param p
+ * @return
+ */
+ public boolean equalsProps(Props p) {
+ if (p == null) {
+ return false;
+ }
+
+ final Set<String> myKeySet = getKeySet();
+ for (String s : myKeySet) {
+ if (!get(s).equals(p.get(s))) {
+ return false;
+ }
+ }
+
+ return myKeySet.size() == p.getKeySet().size();
+ }
+
+ /**
*
*/
- @Override
- public int hashCode() {
- int code = this._current.hashCode();
- if (_parent != null)
- code += _parent.hashCode();
- return code;
- }
-
- /**
+ @Override
+ public int hashCode() {
+ int code = this._current.hashCode();
+ if (_parent != null)
+ code += _parent.hashCode();
+ return code;
+ }
+
+ /**
*
*/
- @Override
- public String toString() {
- StringBuilder builder = new StringBuilder("{");
- for (Map.Entry<String, String> entry : this._current.entrySet()) {
- builder.append(entry.getKey());
- builder.append(": ");
- builder.append(entry.getValue());
- builder.append(", ");
- }
- if (_parent != null) {
- builder.append(" parent = ");
- builder.append(_parent.toString());
- }
- builder.append("}");
- return builder.toString();
- }
-
- public String getSource() {
- return source;
- }
-
- public void setSource(String source) {
- this.source = source;
- }
-
- public void setParent(Props prop) {
- this._parent = prop;
- }
+ @Override
+ public String toString() {
+ StringBuilder builder = new StringBuilder("{");
+ for (Map.Entry<String, String> entry : this._current.entrySet()) {
+ builder.append(entry.getKey());
+ builder.append(": ");
+ builder.append(entry.getValue());
+ builder.append(", ");
+ }
+ if (_parent != null) {
+ builder.append(" parent = ");
+ builder.append(_parent.toString());
+ }
+ builder.append("}");
+ return builder.toString();
+ }
+
+ public String getSource() {
+ return source;
+ }
+
+ public void setSource(String source) {
+ this.source = source;
+ }
+
+ public void setParent(Props prop) {
+ this._parent = prop;
+ }
}
src/main/java/azkaban/utils/PropsUtils.java 634(+318 -316)
diff --git a/src/main/java/azkaban/utils/PropsUtils.java b/src/main/java/azkaban/utils/PropsUtils.java
index 6b05f62..22b47cc 100644
--- a/src/main/java/azkaban/utils/PropsUtils.java
+++ b/src/main/java/azkaban/utils/PropsUtils.java
@@ -40,320 +40,322 @@ import org.joda.time.DateTime;
public class PropsUtils {
- /**
- * Load job schedules from the given directories ] * @param dir The
- * directory to look in
- *
- * @param suffixes
- * File suffixes to load
- * @return The loaded set of schedules
- */
- public static Props loadPropsInDir(File dir, String... suffixes) {
- return loadPropsInDir(null, dir, suffixes);
- }
-
- /**
- * Load job schedules from the given directories
- *
- * @param parent
- * The parent properties for these properties
- * @param dir
- * The directory to look in
- * @param suffixes
- * File suffixes to load
- * @return The loaded set of schedules
- */
- public static Props loadPropsInDir(Props parent, File dir, String... suffixes) {
- try {
- Props props = new Props(parent);
- File[] files = dir.listFiles();
- Arrays.sort(files);
- if (files != null) {
- for (File f : files) {
- if (f.isFile() && endsWith(f, suffixes)) {
- props.putAll(new Props(null, f.getAbsolutePath()));
- }
- }
- }
- return props;
- } catch (IOException e) {
- throw new RuntimeException("Error loading properties.", e);
- }
- }
-
- public static Props loadProps(Props parent, File ... propFiles) {
- try {
- Props props = new Props(parent);
- for (File f: propFiles) {
- if (f.isFile()) {
- props = new Props(props, f);
- }
- }
-
- return props;
- }
- catch (IOException e) {
- throw new RuntimeException("Error loading properties.", e);
- }
- }
-
- /**
- * Load job schedules from the given directories
- *
- * @param dirs
- * The directories to check for properties
- * @param suffixes
- * The suffixes to load
- * @return The properties
- */
- public static Props loadPropsInDirs(List<File> dirs, String... suffixes) {
- Props props = new Props();
- for (File dir : dirs) {
- props.putLocal(loadPropsInDir(dir, suffixes));
- }
- return props;
- }
-
- /**
- * Load properties from the given path
- *
- * @param jobPath
- * The path to load from
- * @param props
- * The parent properties for loaded properties
- * @param suffixes
- * The suffixes of files to load
- */
- public static void loadPropsBySuffix(File jobPath, Props props,
- String... suffixes) {
- try {
- if (jobPath.isDirectory()) {
- File[] files = jobPath.listFiles();
- if (files != null) {
- for (File file : files)
- loadPropsBySuffix(file, props, suffixes);
- }
- } else if (endsWith(jobPath, suffixes)) {
- props.putAll(new Props(null, jobPath.getAbsolutePath()));
- }
- } catch (IOException e) {
- throw new RuntimeException("Error loading schedule properties.", e);
- }
- }
-
- public static boolean endsWith(File file, String... suffixes) {
- for (String suffix : suffixes)
- if (file.getName().endsWith(suffix))
- return true;
- return false;
- }
-
- private static final Pattern VARIABLE_REPLACEMENT_PATTERN = Pattern.compile("\\$\\{([a-zA-Z_.0-9]+)\\}");
-
- public static Props resolveProps(Props props) {
- if (props == null) return null;
-
- Props resolvedProps = new Props();
-
- LinkedHashSet<String> visitedVariables = new LinkedHashSet<String>();
- for (String key : props.getKeySet()) {
- String value = props.get(key);
-
- visitedVariables.add(key);
- String replacedValue = resolveVariableReplacement(value, props, visitedVariables);
- visitedVariables.clear();
-
- resolvedProps.put(key, replacedValue);
- }
-
- for (String key : resolvedProps.getKeySet()) {
- String value = resolvedProps.get(key);
- String expressedValue = resolveVariableExpression(value);
- resolvedProps.put(key, expressedValue);
- }
-
- return resolvedProps;
- };
-
- private static String resolveVariableReplacement(String value, Props props, LinkedHashSet<String> visitedVariables) {
- StringBuffer buffer = new StringBuffer();
- int startIndex = 0;
-
- Matcher matcher = VARIABLE_REPLACEMENT_PATTERN.matcher(value);
- while (matcher.find(startIndex)) {
- if (startIndex < matcher.start()) {
- // Copy everything up front to the buffer
- buffer.append(value.substring(startIndex, matcher.start()));
- }
-
- String subVariable = matcher.group(1);
- // Detected a cycle
- if (visitedVariables.contains(subVariable)) {
- throw new IllegalArgumentException(
- String.format("Circular variable substitution found: [%s] -> [%s]",
- StringUtils.join(visitedVariables, "->"), subVariable));
- }
- else {
- // Add substitute variable and recurse.
- String replacement = props.get(subVariable);
- visitedVariables.add(subVariable);
-
- if (replacement == null) {
- throw new UndefinedPropertyException(
- String.format("Could not find variable substitution for variable(s) [%s]",
- StringUtils.join(visitedVariables, "->")));
- }
-
- buffer.append(resolveVariableReplacement(replacement, props, visitedVariables));
- visitedVariables.remove(subVariable);
- }
-
- startIndex = matcher.end();
- }
-
- if (startIndex < value.length()) {
- buffer.append(value.substring(startIndex));
- }
-
- return buffer.toString();
- }
-
- private static String resolveVariableExpression(String value) {
- JexlEngine jexl = new JexlEngine();
- return resolveVariableExpression(value, value.length(), jexl);
- }
-
- /**
- * Function that looks for expressions to parse.
- * It parses backwards to capture embedded expressions
- *
- * @param value
- * @param last
- * @param jexl
- * @return
- */
- private static String resolveVariableExpression(String value, int last, JexlEngine jexl) {
- int lastIndex = value.lastIndexOf("$(", last);
- if (lastIndex == -1) {
- return value;
- }
-
- // Want to check that everything is well formed, and that
- // we properly capture $( ...(...)...).
- int bracketCount = 0;
- int nextClosed = lastIndex + 2;
- for (; nextClosed < value.length(); ++nextClosed) {
- if (value.charAt(nextClosed) == '(') {
- bracketCount++;
- }
- else if (value.charAt(nextClosed) == ')') {
- bracketCount--;
- if (bracketCount == -1) {
- break;
- }
- }
- }
-
- if (nextClosed == value.length()) {
- throw new IllegalArgumentException("Expression " + value + " not well formed.");
- }
-
- String innerExpression = value.substring(lastIndex + 2, nextClosed);
- Object result = null;
- try {
- Expression e = jexl.createExpression(innerExpression);
- result = e.evaluate(new MapContext());
- }
- catch (JexlException e) {
- throw new IllegalArgumentException("Expression " + value + " not well formed. " + e.getMessage(), e);
- }
-
- if (result == null) {
- // for backward compatibility it is best to return value
- return value;
- }
-
- String newValue = value.substring(0, lastIndex) + result.toString() + value.substring(nextClosed + 1);
- return resolveVariableExpression(newValue, lastIndex, jexl);
- }
-
- public static Props addCommonFlowProperties(Props parentProps, final ExecutableFlowBase flow) {
- Props props = new Props(parentProps);
-
- props.put(CommonJobProperties.FLOW_ID, flow.getFlowId());
- props.put(CommonJobProperties.EXEC_ID, flow.getExecutionId());
- props.put(CommonJobProperties.PROJECT_ID, flow.getProjectId());
- props.put(CommonJobProperties.PROJECT_VERSION, flow.getVersion());
- props.put(CommonJobProperties.FLOW_UUID, UUID.randomUUID().toString());
-
- DateTime loadTime = new DateTime();
-
- props.put(CommonJobProperties.FLOW_START_TIMESTAMP, loadTime.toString());
- props.put(CommonJobProperties.FLOW_START_YEAR, loadTime.toString("yyyy"));
- props.put(CommonJobProperties.FLOW_START_MONTH, loadTime.toString("MM"));
- props.put(CommonJobProperties.FLOW_START_DAY, loadTime.toString("dd"));
- props.put(CommonJobProperties.FLOW_START_HOUR, loadTime.toString("HH"));
- props.put(CommonJobProperties.FLOW_START_MINUTE, loadTime.toString("mm"));
- props.put(CommonJobProperties.FLOW_START_SECOND, loadTime.toString("ss"));
- props.put(CommonJobProperties.FLOW_START_MILLISSECOND, loadTime.toString("SSS"));
- props.put(CommonJobProperties.FLOW_START_TIMEZONE, loadTime.toString("ZZZZ"));
- return props;
- }
-
- public static String toJSONString(Props props, boolean localOnly) {
- Map<String, String> map = toStringMap(props, localOnly);
- return JSONUtils.toJSON(map);
- }
-
- public static Map<String, String> toStringMap(Props props, boolean localOnly) {
- HashMap<String, String> map = new HashMap<String, String>();
- Set<String> keyset = localOnly ? props.localKeySet() : props.getKeySet();
-
- for (String key: keyset) {
- String value = props.get(key);
- map.put(key, value);
- }
-
- return map;
- }
-
- @SuppressWarnings("unchecked")
- public static Props fromJSONString(String json) {
- try {
- Map<String, String> obj = (Map<String, String>)JSONUtils.parseJSONFromString(json);
- Props props = new Props(null, obj);
- return props;
- } catch (IOException e) {
- return null;
- }
- }
-
- @SuppressWarnings("unchecked")
- public static Props fromHierarchicalMap(Map<String, Object> propsMap) {
- if (propsMap == null) {
- return null;
- }
-
- String source = (String)propsMap.get("source");
- Map<String, String> propsParams = (Map<String,String>)propsMap.get("props");
-
- Map<String,Object> parent = (Map<String,Object>)propsMap.get("parent");
- Props parentProps = fromHierarchicalMap(parent);
-
- Props props = new Props(parentProps, propsParams);
- props.setSource(source);
- return props;
- }
-
- public static Map<String,Object> toHierarchicalMap(Props props) {
- Map<String,Object> propsMap = new HashMap<String,Object>();
- propsMap.put("source", props.getSource());
- propsMap.put("props", toStringMap(props, true));
-
- if (props.getParent() != null) {
- propsMap.put("parent", toHierarchicalMap(props.getParent()));
- }
-
- return propsMap;
- }
+ /**
+ * Load job schedules from the given directories ] * @param dir The directory
+ * to look in
+ *
+ * @param suffixes File suffixes to load
+ * @return The loaded set of schedules
+ */
+ public static Props loadPropsInDir(File dir, String... suffixes) {
+ return loadPropsInDir(null, dir, suffixes);
+ }
+
+ /**
+ * Load job schedules from the given directories
+ *
+ * @param parent The parent properties for these properties
+ * @param dir The directory to look in
+ * @param suffixes File suffixes to load
+ * @return The loaded set of schedules
+ */
+ public static Props loadPropsInDir(Props parent, File dir, String... suffixes) {
+ try {
+ Props props = new Props(parent);
+ File[] files = dir.listFiles();
+ Arrays.sort(files);
+ if (files != null) {
+ for (File f : files) {
+ if (f.isFile() && endsWith(f, suffixes)) {
+ props.putAll(new Props(null, f.getAbsolutePath()));
+ }
+ }
+ }
+ return props;
+ } catch (IOException e) {
+ throw new RuntimeException("Error loading properties.", e);
+ }
+ }
+
+ public static Props loadProps(Props parent, File... propFiles) {
+ try {
+ Props props = new Props(parent);
+ for (File f : propFiles) {
+ if (f.isFile()) {
+ props = new Props(props, f);
+ }
+ }
+
+ return props;
+ } catch (IOException e) {
+ throw new RuntimeException("Error loading properties.", e);
+ }
+ }
+
+ /**
+ * Load job schedules from the given directories
+ *
+ * @param dirs The directories to check for properties
+ * @param suffixes The suffixes to load
+ * @return The properties
+ */
+ public static Props loadPropsInDirs(List<File> dirs, String... suffixes) {
+ Props props = new Props();
+ for (File dir : dirs) {
+ props.putLocal(loadPropsInDir(dir, suffixes));
+ }
+ return props;
+ }
+
+ /**
+ * Load properties from the given path
+ *
+ * @param jobPath The path to load from
+ * @param props The parent properties for loaded properties
+ * @param suffixes The suffixes of files to load
+ */
+ public static void loadPropsBySuffix(File jobPath, Props props,
+ String... suffixes) {
+ try {
+ if (jobPath.isDirectory()) {
+ File[] files = jobPath.listFiles();
+ if (files != null) {
+ for (File file : files)
+ loadPropsBySuffix(file, props, suffixes);
+ }
+ } else if (endsWith(jobPath, suffixes)) {
+ props.putAll(new Props(null, jobPath.getAbsolutePath()));
+ }
+ } catch (IOException e) {
+ throw new RuntimeException("Error loading schedule properties.", e);
+ }
+ }
+
+ public static boolean endsWith(File file, String... suffixes) {
+ for (String suffix : suffixes)
+ if (file.getName().endsWith(suffix))
+ return true;
+ return false;
+ }
+
+ private static final Pattern VARIABLE_REPLACEMENT_PATTERN = Pattern
+ .compile("\\$\\{([a-zA-Z_.0-9]+)\\}");
+
+ public static Props resolveProps(Props props) {
+ if (props == null)
+ return null;
+
+ Props resolvedProps = new Props();
+
+ LinkedHashSet<String> visitedVariables = new LinkedHashSet<String>();
+ for (String key : props.getKeySet()) {
+ String value = props.get(key);
+
+ visitedVariables.add(key);
+ String replacedValue =
+ resolveVariableReplacement(value, props, visitedVariables);
+ visitedVariables.clear();
+
+ resolvedProps.put(key, replacedValue);
+ }
+
+ for (String key : resolvedProps.getKeySet()) {
+ String value = resolvedProps.get(key);
+ String expressedValue = resolveVariableExpression(value);
+ resolvedProps.put(key, expressedValue);
+ }
+
+ return resolvedProps;
+ };
+
+ private static String resolveVariableReplacement(String value, Props props,
+ LinkedHashSet<String> visitedVariables) {
+ StringBuffer buffer = new StringBuffer();
+ int startIndex = 0;
+
+ Matcher matcher = VARIABLE_REPLACEMENT_PATTERN.matcher(value);
+ while (matcher.find(startIndex)) {
+ if (startIndex < matcher.start()) {
+ // Copy everything up front to the buffer
+ buffer.append(value.substring(startIndex, matcher.start()));
+ }
+
+ String subVariable = matcher.group(1);
+ // Detected a cycle
+ if (visitedVariables.contains(subVariable)) {
+ throw new IllegalArgumentException(String.format(
+ "Circular variable substitution found: [%s] -> [%s]",
+ StringUtils.join(visitedVariables, "->"), subVariable));
+ } else {
+ // Add substitute variable and recurse.
+ String replacement = props.get(subVariable);
+ visitedVariables.add(subVariable);
+
+ if (replacement == null) {
+ throw new UndefinedPropertyException(String.format(
+ "Could not find variable substitution for variable(s) [%s]",
+ StringUtils.join(visitedVariables, "->")));
+ }
+
+ buffer.append(resolveVariableReplacement(replacement, props,
+ visitedVariables));
+ visitedVariables.remove(subVariable);
+ }
+
+ startIndex = matcher.end();
+ }
+
+ if (startIndex < value.length()) {
+ buffer.append(value.substring(startIndex));
+ }
+
+ return buffer.toString();
+ }
+
+ private static String resolveVariableExpression(String value) {
+ JexlEngine jexl = new JexlEngine();
+ return resolveVariableExpression(value, value.length(), jexl);
+ }
+
+ /**
+ * Function that looks for expressions to parse. It parses backwards to
+ * capture embedded expressions
+ *
+ * @param value
+ * @param last
+ * @param jexl
+ * @return
+ */
+ private static String resolveVariableExpression(String value, int last,
+ JexlEngine jexl) {
+ int lastIndex = value.lastIndexOf("$(", last);
+ if (lastIndex == -1) {
+ return value;
+ }
+
+ // Want to check that everything is well formed, and that
+ // we properly capture $( ...(...)...).
+ int bracketCount = 0;
+ int nextClosed = lastIndex + 2;
+ for (; nextClosed < value.length(); ++nextClosed) {
+ if (value.charAt(nextClosed) == '(') {
+ bracketCount++;
+ } else if (value.charAt(nextClosed) == ')') {
+ bracketCount--;
+ if (bracketCount == -1) {
+ break;
+ }
+ }
+ }
+
+ if (nextClosed == value.length()) {
+ throw new IllegalArgumentException("Expression " + value
+ + " not well formed.");
+ }
+
+ String innerExpression = value.substring(lastIndex + 2, nextClosed);
+ Object result = null;
+ try {
+ Expression e = jexl.createExpression(innerExpression);
+ result = e.evaluate(new MapContext());
+ } catch (JexlException e) {
+ throw new IllegalArgumentException("Expression " + value
+ + " not well formed. " + e.getMessage(), e);
+ }
+
+ if (result == null) {
+ // for backward compatibility it is best to return value
+ return value;
+ }
+
+ String newValue =
+ value.substring(0, lastIndex) + result.toString()
+ + value.substring(nextClosed + 1);
+ return resolveVariableExpression(newValue, lastIndex, jexl);
+ }
+
+ public static Props addCommonFlowProperties(Props parentProps,
+ final ExecutableFlowBase flow) {
+ Props props = new Props(parentProps);
+
+ props.put(CommonJobProperties.FLOW_ID, flow.getFlowId());
+ props.put(CommonJobProperties.EXEC_ID, flow.getExecutionId());
+ props.put(CommonJobProperties.PROJECT_ID, flow.getProjectId());
+ props.put(CommonJobProperties.PROJECT_VERSION, flow.getVersion());
+ props.put(CommonJobProperties.FLOW_UUID, UUID.randomUUID().toString());
+
+ DateTime loadTime = new DateTime();
+
+ props.put(CommonJobProperties.FLOW_START_TIMESTAMP, loadTime.toString());
+ props.put(CommonJobProperties.FLOW_START_YEAR, loadTime.toString("yyyy"));
+ props.put(CommonJobProperties.FLOW_START_MONTH, loadTime.toString("MM"));
+ props.put(CommonJobProperties.FLOW_START_DAY, loadTime.toString("dd"));
+ props.put(CommonJobProperties.FLOW_START_HOUR, loadTime.toString("HH"));
+ props.put(CommonJobProperties.FLOW_START_MINUTE, loadTime.toString("mm"));
+ props.put(CommonJobProperties.FLOW_START_SECOND, loadTime.toString("ss"));
+ props.put(CommonJobProperties.FLOW_START_MILLISSECOND,
+ loadTime.toString("SSS"));
+ props.put(CommonJobProperties.FLOW_START_TIMEZONE,
+ loadTime.toString("ZZZZ"));
+ return props;
+ }
+
+ public static String toJSONString(Props props, boolean localOnly) {
+ Map<String, String> map = toStringMap(props, localOnly);
+ return JSONUtils.toJSON(map);
+ }
+
+ public static Map<String, String> toStringMap(Props props, boolean localOnly) {
+ HashMap<String, String> map = new HashMap<String, String>();
+ Set<String> keyset = localOnly ? props.localKeySet() : props.getKeySet();
+
+ for (String key : keyset) {
+ String value = props.get(key);
+ map.put(key, value);
+ }
+
+ return map;
+ }
+
+ @SuppressWarnings("unchecked")
+ public static Props fromJSONString(String json) {
+ try {
+ Map<String, String> obj =
+ (Map<String, String>) JSONUtils.parseJSONFromString(json);
+ Props props = new Props(null, obj);
+ return props;
+ } catch (IOException e) {
+ return null;
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ public static Props fromHierarchicalMap(Map<String, Object> propsMap) {
+ if (propsMap == null) {
+ return null;
+ }
+
+ String source = (String) propsMap.get("source");
+ Map<String, String> propsParams =
+ (Map<String, String>) propsMap.get("props");
+
+ Map<String, Object> parent = (Map<String, Object>) propsMap.get("parent");
+ Props parentProps = fromHierarchicalMap(parent);
+
+ Props props = new Props(parentProps, propsParams);
+ props.setSource(source);
+ return props;
+ }
+
+ public static Map<String, Object> toHierarchicalMap(Props props) {
+ Map<String, Object> propsMap = new HashMap<String, Object>();
+ propsMap.put("source", props.getSource());
+ propsMap.put("props", toStringMap(props, true));
+
+ if (props.getParent() != null) {
+ propsMap.put("parent", toHierarchicalMap(props.getParent()));
+ }
+
+ return propsMap;
+ }
}
diff --git a/src/main/java/azkaban/utils/SplitterOutputStream.java b/src/main/java/azkaban/utils/SplitterOutputStream.java
index 896d900..caf8a18 100644
--- a/src/main/java/azkaban/utils/SplitterOutputStream.java
+++ b/src/main/java/azkaban/utils/SplitterOutputStream.java
@@ -22,64 +22,64 @@ import java.util.ArrayList;
import java.util.List;
public class SplitterOutputStream extends OutputStream {
- List<OutputStream> outputs;
+ List<OutputStream> outputs;
- public SplitterOutputStream(OutputStream... outputs) {
- this.outputs = new ArrayList<OutputStream>(outputs.length);
- for (OutputStream output : outputs) {
- this.outputs.add(output);
- }
- }
+ public SplitterOutputStream(OutputStream... outputs) {
+ this.outputs = new ArrayList<OutputStream>(outputs.length);
+ for (OutputStream output : outputs) {
+ this.outputs.add(output);
+ }
+ }
- @Override
- public void write(int b) throws IOException {
- for (OutputStream output : outputs) {
- output.write(b);
- }
- }
+ @Override
+ public void write(int b) throws IOException {
+ for (OutputStream output : outputs) {
+ output.write(b);
+ }
+ }
- @Override
- public void write(byte[] b) throws IOException {
- for (OutputStream output : outputs) {
- output.write(b);
- }
- }
+ @Override
+ public void write(byte[] b) throws IOException {
+ for (OutputStream output : outputs) {
+ output.write(b);
+ }
+ }
- @Override
- public void write(byte[] b, int off, int len) throws IOException {
- for (OutputStream output : outputs) {
- output.write(b, off, len);
- }
- }
+ @Override
+ public void write(byte[] b, int off, int len) throws IOException {
+ for (OutputStream output : outputs) {
+ output.write(b, off, len);
+ }
+ }
- @Override
- public void flush() throws IOException {
- IOException exception = null;
- for (OutputStream output : outputs) {
- try {
- output.flush();
- } catch (IOException e) {
- exception = e;
- }
- }
- if (exception != null) {
- throw exception;
- }
- }
+ @Override
+ public void flush() throws IOException {
+ IOException exception = null;
+ for (OutputStream output : outputs) {
+ try {
+ output.flush();
+ } catch (IOException e) {
+ exception = e;
+ }
+ }
+ if (exception != null) {
+ throw exception;
+ }
+ }
- @Override
- public void close() throws IOException {
- IOException exception = null;
- for (OutputStream output : outputs) {
- try {
- output.close();
- } catch (IOException e) {
- exception = e;
- }
- }
- if (exception != null) {
- throw exception;
- }
- }
+ @Override
+ public void close() throws IOException {
+ IOException exception = null;
+ for (OutputStream output : outputs) {
+ try {
+ output.close();
+ } catch (IOException e) {
+ exception = e;
+ }
+ }
+ if (exception != null) {
+ throw exception;
+ }
+ }
}
src/main/java/azkaban/utils/StringUtils.java 87(+43 -44)
diff --git a/src/main/java/azkaban/utils/StringUtils.java b/src/main/java/azkaban/utils/StringUtils.java
index 77c5941..de3502a 100644
--- a/src/main/java/azkaban/utils/StringUtils.java
+++ b/src/main/java/azkaban/utils/StringUtils.java
@@ -20,52 +20,51 @@ import java.util.Collection;
import java.util.List;
public class StringUtils {
- public static final char SINGLE_QUOTE = '\'';
- public static final char DOUBLE_QUOTE = '\"';
+ public static final char SINGLE_QUOTE = '\'';
+ public static final char DOUBLE_QUOTE = '\"';
- public static String shellQuote(String s, char quoteCh) {
- StringBuffer buf = new StringBuffer(s.length() + 2);
+ public static String shellQuote(String s, char quoteCh) {
+ StringBuffer buf = new StringBuffer(s.length() + 2);
- buf.append(quoteCh);
- for (int i = 0; i < s.length(); i++) {
- final char ch = s.charAt(i);
- if (ch == quoteCh) {
- buf.append('\\');
- }
- buf.append(ch);
- }
- buf.append(quoteCh);
+ buf.append(quoteCh);
+ for (int i = 0; i < s.length(); i++) {
+ final char ch = s.charAt(i);
+ if (ch == quoteCh) {
+ buf.append('\\');
+ }
+ buf.append(ch);
+ }
+ buf.append(quoteCh);
- return buf.toString();
- }
-
- @Deprecated
- public static String join(List<String> list, String delimiter) {
- StringBuffer buffer = new StringBuffer();
- for (String str: list) {
- buffer.append(str);
- buffer.append(delimiter);
- }
-
- return buffer.toString();
- }
-
- /**
- * Use this when you don't want to include Apache Common's string for
- * plugins.
- *
- * @param list
- * @param delimiter
- * @return
- */
- public static String join(Collection<String> list, String delimiter) {
- StringBuffer buffer = new StringBuffer();
- for (String str: list) {
- buffer.append(str);
- buffer.append(delimiter);
- }
-
- return buffer.toString();
- }
+ return buf.toString();
+ }
+
+ @Deprecated
+ public static String join(List<String> list, String delimiter) {
+ StringBuffer buffer = new StringBuffer();
+ for (String str : list) {
+ buffer.append(str);
+ buffer.append(delimiter);
+ }
+
+ return buffer.toString();
+ }
+
+ /**
+ * Use this when you don't want to include Apache Common's string for plugins.
+ *
+ * @param list
+ * @param delimiter
+ * @return
+ */
+ public static String join(Collection<String> list, String delimiter) {
+ StringBuffer buffer = new StringBuffer();
+ for (String str : list) {
+ buffer.append(str);
+ buffer.append(delimiter);
+ }
+
+ return buffer.toString();
+ }
}
src/main/java/azkaban/utils/SwapQueue.java 107(+55 -52)
diff --git a/src/main/java/azkaban/utils/SwapQueue.java b/src/main/java/azkaban/utils/SwapQueue.java
index 9999306..bb547da 100644
--- a/src/main/java/azkaban/utils/SwapQueue.java
+++ b/src/main/java/azkaban/utils/SwapQueue.java
@@ -21,59 +21,62 @@ import java.util.Collection;
import java.util.Iterator;
/**
- * Queue that swaps its lists. Allows for non-blocking writes when reading.
- * Swap should be called before every read.
+ * Queue that swaps its lists. Allows for non-blocking writes when reading. Swap
+ * should be called before every read.
*/
public class SwapQueue<T> implements Iterable<T> {
- ArrayList<T> primaryQueue;
- ArrayList<T> secondaryQueue;
-
- public SwapQueue() {
- primaryQueue = new ArrayList<T>();
- secondaryQueue = new ArrayList<T>();
- }
-
- /**
- * Swaps primaryQueue with secondary queue. The previous primary queue will be released.
- */
- public synchronized void swap() {
- primaryQueue = secondaryQueue;
- secondaryQueue = new ArrayList<T>();
- }
-
- /**
- * Returns a count of the secondary queue.
- * @return
- */
- public synchronized int getSwapQueueSize() {
- return secondaryQueue.size();
- }
-
- public synchronized int getPrimarySize() {
- return primaryQueue.size();
- }
-
- public synchronized void addAll(Collection<T> col) {
- secondaryQueue.addAll(col);
- }
-
- /**
- * Returns both the secondary and primary size
- * @return
- */
- public synchronized int getSize() {
- return secondaryQueue.size() + primaryQueue.size();
- }
-
- public synchronized void add(T element) {
- secondaryQueue.add(element);
- }
+ ArrayList<T> primaryQueue;
+ ArrayList<T> secondaryQueue;
- /**
- * Returns iterator over the primary queue.
- */
- @Override
- public synchronized Iterator<T> iterator() {
- return primaryQueue.iterator();
- }
+ public SwapQueue() {
+ primaryQueue = new ArrayList<T>();
+ secondaryQueue = new ArrayList<T>();
+ }
+
+ /**
+ * Swaps primaryQueue with secondary queue. The previous primary queue will be
+ * released.
+ */
+ public synchronized void swap() {
+ primaryQueue = secondaryQueue;
+ secondaryQueue = new ArrayList<T>();
+ }
+
+ /**
+ * Returns a count of the secondary queue.
+ *
+ * @return
+ */
+ public synchronized int getSwapQueueSize() {
+ return secondaryQueue.size();
+ }
+
+ public synchronized int getPrimarySize() {
+ return primaryQueue.size();
+ }
+
+ public synchronized void addAll(Collection<T> col) {
+ secondaryQueue.addAll(col);
+ }
+
+ /**
+ * Returns both the secondary and primary size
+ *
+ * @return
+ */
+ public synchronized int getSize() {
+ return secondaryQueue.size() + primaryQueue.size();
+ }
+
+ public synchronized void add(T element) {
+ secondaryQueue.add(element);
+ }
+
+ /**
+ * Returns iterator over the primary queue.
+ */
+ @Override
+ public synchronized Iterator<T> iterator() {
+ return primaryQueue.iterator();
+ }
}
src/main/java/azkaban/utils/Triple.java 116(+58 -58)
diff --git a/src/main/java/azkaban/utils/Triple.java b/src/main/java/azkaban/utils/Triple.java
index a880d45..88dc443 100644
--- a/src/main/java/azkaban/utils/Triple.java
+++ b/src/main/java/azkaban/utils/Triple.java
@@ -20,64 +20,64 @@ package azkaban.utils;
* Like pair, but with 3 values.
*/
public class Triple<F, S, T> {
- private final F first;
- private final S second;
- private final T third;
-
- public Triple(F first, S second, T third) {
- this.first = first;
- this.second = second;
- this.third = third;
- }
-
- public F getFirst() {
- return first;
- }
-
- public S getSecond() {
- return second;
- }
-
- public T getThird() {
- return third;
- }
+ private final F first;
+ private final S second;
+ private final T third;
- @Override
- public int hashCode() {
- final int prime = 31;
- int result = 1;
- result = prime * result + ((first == null) ? 0 : first.hashCode());
- result = prime * result + ((second == null) ? 0 : second.hashCode());
- result = prime * result + ((third == null) ? 0 : third.hashCode());
- return result;
- }
+ public Triple(F first, S second, T third) {
+ this.first = first;
+ this.second = second;
+ this.third = third;
+ }
+
+ public F getFirst() {
+ return first;
+ }
+
+ public S getSecond() {
+ return second;
+ }
+
+ public T getThird() {
+ return third;
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + ((first == null) ? 0 : first.hashCode());
+ result = prime * result + ((second == null) ? 0 : second.hashCode());
+ result = prime * result + ((third == null) ? 0 : third.hashCode());
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj)
+ return true;
+ if (obj == null)
+ return false;
+ if (getClass() != obj.getClass())
+ return false;
+ @SuppressWarnings("rawtypes")
+ Triple other = (Triple) obj;
+ if (first == null) {
+ if (other.first != null)
+ return false;
+ } else if (!first.equals(other.first))
+ return false;
+ if (second == null) {
+ if (other.second != null)
+ return false;
+ } else if (!second.equals(other.second))
+ return false;
+ if (third == null) {
+ if (other.third != null)
+ return false;
+ } else if (!third.equals(other.third))
+ return false;
+ return true;
+ }
- @Override
- public boolean equals(Object obj) {
- if (this == obj)
- return true;
- if (obj == null)
- return false;
- if (getClass() != obj.getClass())
- return false;
- @SuppressWarnings("rawtypes")
- Triple other = (Triple) obj;
- if (first == null) {
- if (other.first != null)
- return false;
- } else if (!first.equals(other.first))
- return false;
- if (second == null) {
- if (other.second != null)
- return false;
- } else if (!second.equals(other.second))
- return false;
- if (third == null) {
- if (other.third != null)
- return false;
- } else if (!third.equals(other.third))
- return false;
- return true;
- }
-
}
src/main/java/azkaban/utils/TypedMapWrapper.java 262(+129 -133)
diff --git a/src/main/java/azkaban/utils/TypedMapWrapper.java b/src/main/java/azkaban/utils/TypedMapWrapper.java
index cce512b..282dfdf 100644
--- a/src/main/java/azkaban/utils/TypedMapWrapper.java
+++ b/src/main/java/azkaban/utils/TypedMapWrapper.java
@@ -5,137 +5,133 @@ import java.util.List;
import java.util.Map;
public class TypedMapWrapper<K, V> {
- private Map<K,V> map;
- public TypedMapWrapper(Map<K, V> map) {
- this.map = map;
- }
-
- public String getString(K key) {
- return getString(key, null);
- }
-
- public String getString(K key, String defaultVal) {
- Object obj = map.get(key);
- if (obj == null) {
- return defaultVal;
- }
- if (obj instanceof String) {
- return (String)obj;
- }
-
- return obj.toString();
- }
-
- public Boolean getBool(K key, Boolean defaultVal) {
- Object obj = map.get(key);
- if (obj == null) {
- return defaultVal;
- }
-
- return (Boolean)obj;
- }
-
- public Integer getInt(K key) {
- return getInt(key, -1);
- }
-
- public Integer getInt(K key, Integer defaultVal) {
- Object obj = map.get(key);
- if (obj == null) {
- return defaultVal;
- }
- if (obj instanceof Integer) {
- return (Integer)obj;
- }
- else if (obj instanceof String) {
- return Integer.valueOf((String)obj);
- }
- else {
- return defaultVal;
- }
- }
-
- public Long getLong(K key) {
- return getLong(key, -1l);
- }
-
- public Long getLong(K key, Long defaultVal) {
- Object obj = map.get(key);
- if (obj == null) {
- return defaultVal;
- }
- if (obj instanceof Long) {
- return (Long)obj;
- }
- else if (obj instanceof Integer) {
- return Long.valueOf((Integer)obj);
- }
- else if (obj instanceof String) {
- return Long.valueOf((String)obj);
- }
- else {
- return defaultVal;
- }
- }
-
- @SuppressWarnings("unchecked")
- public Collection<String> getStringCollection(K key) {
- Object obj = map.get(key);
- return (Collection<String>)obj;
- }
-
- @SuppressWarnings("unchecked")
- public Collection<String> getStringCollection(K key, Collection<String> defaultVal) {
- Object obj = map.get(key);
- if (obj == null) {
- return defaultVal;
- }
-
- return (Collection<String>)obj;
- }
-
-
- @SuppressWarnings("unchecked")
- public <C> Collection<C> getCollection(K key) {
- Object obj = map.get(key);
- if (obj instanceof Collection) {
- return (Collection<C>)obj;
- }
- return null;
- }
-
- @SuppressWarnings("unchecked")
- public <L> List<L> getList(K key) {
- Object obj = map.get(key);
- if (obj instanceof List) {
- return (List<L>)obj;
- }
- return null;
- }
-
- @SuppressWarnings("unchecked")
- public <L> List<L> getList(K key, List<L> defaultVal) {
- Object obj = map.get(key);
- if (obj instanceof List) {
- return (List<L>)obj;
- }
- return defaultVal;
- }
-
- public Object getObject(K key) {
- return map.get(key);
- }
-
- public Map<K, V> getMap() {
- return map;
- }
-
- @SuppressWarnings("unchecked")
- public <S, T> Map<S,T> getMap(K key) {
- return (Map<S,T>)map.get(key);
- }
-
- public boolean containsKey(K key) {
- return map.containsKey(key);
- }
+ private Map<K, V> map;
+
+ public TypedMapWrapper(Map<K, V> map) {
+ this.map = map;
+ }
+
+ public String getString(K key) {
+ return getString(key, null);
+ }
+
+ public String getString(K key, String defaultVal) {
+ Object obj = map.get(key);
+ if (obj == null) {
+ return defaultVal;
+ }
+ if (obj instanceof String) {
+ return (String) obj;
+ }
+
+ return obj.toString();
+ }
+
+ public Boolean getBool(K key, Boolean defaultVal) {
+ Object obj = map.get(key);
+ if (obj == null) {
+ return defaultVal;
+ }
+
+ return (Boolean) obj;
+ }
+
+ public Integer getInt(K key) {
+ return getInt(key, -1);
+ }
+
+ public Integer getInt(K key, Integer defaultVal) {
+ Object obj = map.get(key);
+ if (obj == null) {
+ return defaultVal;
+ }
+ if (obj instanceof Integer) {
+ return (Integer) obj;
+ } else if (obj instanceof String) {
+ return Integer.valueOf((String) obj);
+ } else {
+ return defaultVal;
+ }
+ }
+
+ public Long getLong(K key) {
+ return getLong(key, -1l);
+ }
+
+ public Long getLong(K key, Long defaultVal) {
+ Object obj = map.get(key);
+ if (obj == null) {
+ return defaultVal;
+ }
+ if (obj instanceof Long) {
+ return (Long) obj;
+ } else if (obj instanceof Integer) {
+ return Long.valueOf((Integer) obj);
+ } else if (obj instanceof String) {
+ return Long.valueOf((String) obj);
+ } else {
+ return defaultVal;
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ public Collection<String> getStringCollection(K key) {
+ Object obj = map.get(key);
+ return (Collection<String>) obj;
+ }
+
+ @SuppressWarnings("unchecked")
+ public Collection<String> getStringCollection(K key,
+ Collection<String> defaultVal) {
+ Object obj = map.get(key);
+ if (obj == null) {
+ return defaultVal;
+ }
+
+ return (Collection<String>) obj;
+ }
+
+ @SuppressWarnings("unchecked")
+ public <C> Collection<C> getCollection(K key) {
+ Object obj = map.get(key);
+ if (obj instanceof Collection) {
+ return (Collection<C>) obj;
+ }
+ return null;
+ }
+
+ @SuppressWarnings("unchecked")
+ public <L> List<L> getList(K key) {
+ Object obj = map.get(key);
+ if (obj instanceof List) {
+ return (List<L>) obj;
+ }
+ return null;
+ }
+
+ @SuppressWarnings("unchecked")
+ public <L> List<L> getList(K key, List<L> defaultVal) {
+ Object obj = map.get(key);
+ if (obj instanceof List) {
+ return (List<L>) obj;
+ }
+ return defaultVal;
+ }
+
+ public Object getObject(K key) {
+ return map.get(key);
+ }
+
+ public Map<K, V> getMap() {
+ return map;
+ }
+
+ @SuppressWarnings("unchecked")
+ public <S, T> Map<S, T> getMap(K key) {
+ return (Map<S, T>) map.get(key);
+ }
+
+ public boolean containsKey(K key) {
+ return map.containsKey(key);
+ }
}
diff --git a/src/main/java/azkaban/utils/UndefinedPropertyException.java b/src/main/java/azkaban/utils/UndefinedPropertyException.java
index acc142a..4298621 100644
--- a/src/main/java/azkaban/utils/UndefinedPropertyException.java
+++ b/src/main/java/azkaban/utils/UndefinedPropertyException.java
@@ -21,10 +21,10 @@ package azkaban.utils;
*/
public class UndefinedPropertyException extends RuntimeException {
- private static final long serialVersionUID = 1;
+ private static final long serialVersionUID = 1;
- public UndefinedPropertyException(String message) {
- super(message);
- }
+ public UndefinedPropertyException(String message) {
+ super(message);
+ }
}
src/main/java/azkaban/utils/Utils.java 699(+351 -348)
diff --git a/src/main/java/azkaban/utils/Utils.java b/src/main/java/azkaban/utils/Utils.java
index bb14d03..57e6e05 100644
--- a/src/main/java/azkaban/utils/Utils.java
+++ b/src/main/java/azkaban/utils/Utils.java
@@ -49,360 +49,363 @@ import org.joda.time.Years;
* A util helper class full of static methods that are commonly used.
*/
public class Utils {
- public static final Random RANDOM = new Random();
-
- /**
- * Private constructor.
- */
- private Utils() {
- }
-
- /**
- * Equivalent to Object.equals except that it handles nulls. If a and b are
- * both null, true is returned.
- *
- * @param a
- * @param b
- * @return
- */
- public static boolean equals(Object a, Object b) {
- if (a == null || b == null) {
- return a == b;
- }
-
- return a.equals(b);
- }
-
- /**
- * Return the object if it is non-null, otherwise throw an exception
- *
- * @param <T>
- * The type of the object
- * @param t
- * The object
- * @return The object if it is not null
- * @throws IllegalArgumentException
- * if the object is null
- */
- public static <T> T nonNull(T t) {
- if (t == null) {
- throw new IllegalArgumentException("Null value not allowed.");
- } else {
- return t;
- }
- }
-
- public static File findFilefromDir(File dir, String fn){
- if(dir.isDirectory()) {
- for(File f : dir.listFiles()) {
- if(f.getName().equals(fn)) {
- return f;
- }
- }
- }
- return null;
- }
-
- /**
- * Print the message and then exit with the given exit code
- *
- * @param message
- * The message to print
- * @param exitCode
- * The exit code
- */
- public static void croak(String message, int exitCode) {
- System.err.println(message);
- System.exit(exitCode);
- }
-
- public static File createTempDir() {
- return createTempDir(new File(System.getProperty("java.io.tmpdir")));
- }
-
- public static File createTempDir(File parent) {
- File temp = new File(parent,
- Integer.toString(Math.abs(RANDOM.nextInt()) % 100000000));
- temp.delete();
- temp.mkdir();
- temp.deleteOnExit();
- return temp;
- }
-
- public static void zip(File input, File output) throws IOException {
- FileOutputStream out = new FileOutputStream(output);
- ZipOutputStream zOut = new ZipOutputStream(out);
- try {
- zipFile("", input, zOut);
- } finally {
- zOut.close();
- }
- }
-
- public static void zipFolderContent(File folder, File output) throws IOException {
- FileOutputStream out = new FileOutputStream(output);
- ZipOutputStream zOut = new ZipOutputStream(out);
- try {
- File[] files = folder.listFiles();
- if (files != null) {
- for (File f : files) {
- zipFile("", f, zOut);
- }
- }
- } finally {
- zOut.close();
- }
- }
-
- private static void zipFile(String path, File input, ZipOutputStream zOut) throws IOException {
- if (input.isDirectory()) {
- File[] files = input.listFiles();
- if (files != null) {
- for (File f : files) {
- String childPath = path + input.getName()
- + (f.isDirectory() ? "/" : "");
- zipFile(childPath, f, zOut);
- }
- }
- } else {
- String childPath = path + (path.length() > 0 ? "/" : "")
- + input.getName();
- ZipEntry entry = new ZipEntry(childPath);
- zOut.putNextEntry(entry);
- InputStream fileInputStream = new BufferedInputStream(
- new FileInputStream(input));
- try {
- IOUtils.copy(fileInputStream, zOut);
- } finally {
- fileInputStream.close();
- }
- }
- }
-
- public static void unzip(ZipFile source, File dest) throws IOException {
- Enumeration<?> entries = source.entries();
- while (entries.hasMoreElements()) {
- ZipEntry entry = (ZipEntry) entries.nextElement();
- File newFile = new File(dest, entry.getName());
- if (entry.isDirectory()) {
- newFile.mkdirs();
- } else {
- newFile.getParentFile().mkdirs();
- InputStream src = source.getInputStream(entry);
- try {
- OutputStream output = new BufferedOutputStream(
- new FileOutputStream(newFile));
- try {
- IOUtils.copy(src, output);
- } finally {
- output.close();
- }
- } finally {
- src.close();
- }
- }
- }
- }
-
- public static String flattenToString(Collection<?> collection, String delimiter) {
- StringBuffer buffer = new StringBuffer();
- for (Object obj : collection) {
- buffer.append(obj.toString());
- buffer.append(',');
- }
-
- if (buffer.length() > 0) {
- buffer.setLength(buffer.length() - 1);
- }
- return buffer.toString();
- }
-
- public static Double convertToDouble(Object obj) {
- if (obj instanceof String) {
- return Double.parseDouble((String) obj);
- }
-
- return (Double) obj;
- }
-
- /**
- * Get the root cause of the Exception
- *
- * @param e The Exception
- * @return The root cause of the Exception
- */
- private static RuntimeException getCause(InvocationTargetException e) {
- Throwable cause = e.getCause();
- if(cause instanceof RuntimeException)
- throw (RuntimeException) cause;
- else
- throw new IllegalStateException(e.getCause());
+ public static final Random RANDOM = new Random();
+
+ /**
+ * Private constructor.
+ */
+ private Utils() {
+ }
+
+ /**
+ * Equivalent to Object.equals except that it handles nulls. If a and b are
+ * both null, true is returned.
+ *
+ * @param a
+ * @param b
+ * @return
+ */
+ public static boolean equals(Object a, Object b) {
+ if (a == null || b == null) {
+ return a == b;
+ }
+
+ return a.equals(b);
+ }
+
+ /**
+ * Return the object if it is non-null, otherwise throw an exception
+ *
+ * @param <T> The type of the object
+ * @param t The object
+ * @return The object if it is not null
+ * @throws IllegalArgumentException if the object is null
+ */
+ public static <T> T nonNull(T t) {
+ if (t == null) {
+ throw new IllegalArgumentException("Null value not allowed.");
+ } else {
+ return t;
+ }
+ }
+
+ public static File findFilefromDir(File dir, String fn) {
+ if (dir.isDirectory()) {
+ for (File f : dir.listFiles()) {
+ if (f.getName().equals(fn)) {
+ return f;
+ }
+ }
}
-
- /**
- * Get the Class of all the objects
- *
- * @param args The objects to get the Classes from
- * @return The classes as an array
- */
- public static Class<?>[] getTypes(Object... args) {
- Class<?>[] argTypes = new Class<?>[args.length];
- for(int i = 0; i < argTypes.length; i++)
- argTypes[i] = args[i].getClass();
- return argTypes;
+ return null;
+ }
+
+ /**
+ * Print the message and then exit with the given exit code
+ *
+ * @param message The message to print
+ * @param exitCode The exit code
+ */
+ public static void croak(String message, int exitCode) {
+ System.err.println(message);
+ System.exit(exitCode);
+ }
+
+ public static File createTempDir() {
+ return createTempDir(new File(System.getProperty("java.io.tmpdir")));
+ }
+
+ public static File createTempDir(File parent) {
+ File temp =
+ new File(parent,
+ Integer.toString(Math.abs(RANDOM.nextInt()) % 100000000));
+ temp.delete();
+ temp.mkdir();
+ temp.deleteOnExit();
+ return temp;
+ }
+
+ public static void zip(File input, File output) throws IOException {
+ FileOutputStream out = new FileOutputStream(output);
+ ZipOutputStream zOut = new ZipOutputStream(out);
+ try {
+ zipFile("", input, zOut);
+ } finally {
+ zOut.close();
+ }
+ }
+
+ public static void zipFolderContent(File folder, File output)
+ throws IOException {
+ FileOutputStream out = new FileOutputStream(output);
+ ZipOutputStream zOut = new ZipOutputStream(out);
+ try {
+ File[] files = folder.listFiles();
+ if (files != null) {
+ for (File f : files) {
+ zipFile("", f, zOut);
+ }
+ }
+ } finally {
+ zOut.close();
}
-
- public static Object callConstructor(Class<?> c, Object... args) {
- return callConstructor(c, getTypes(args), args);
+ }
+
+ private static void zipFile(String path, File input, ZipOutputStream zOut)
+ throws IOException {
+ if (input.isDirectory()) {
+ File[] files = input.listFiles();
+ if (files != null) {
+ for (File f : files) {
+ String childPath =
+ path + input.getName() + (f.isDirectory() ? "/" : "");
+ zipFile(childPath, f, zOut);
+ }
+ }
+ } else {
+ String childPath =
+ path + (path.length() > 0 ? "/" : "") + input.getName();
+ ZipEntry entry = new ZipEntry(childPath);
+ zOut.putNextEntry(entry);
+ InputStream fileInputStream =
+ new BufferedInputStream(new FileInputStream(input));
+ try {
+ IOUtils.copy(fileInputStream, zOut);
+ } finally {
+ fileInputStream.close();
+ }
}
+ }
- /**
- * Call the class constructor with the given arguments
- *
- * @param c The class
- * @param args The arguments
- * @return The constructed object
- */
- public static Object callConstructor(Class<?> c, Class<?>[] argTypes, Object[] args) {
+ public static void unzip(ZipFile source, File dest) throws IOException {
+ Enumeration<?> entries = source.entries();
+ while (entries.hasMoreElements()) {
+ ZipEntry entry = (ZipEntry) entries.nextElement();
+ File newFile = new File(dest, entry.getName());
+ if (entry.isDirectory()) {
+ newFile.mkdirs();
+ } else {
+ newFile.getParentFile().mkdirs();
+ InputStream src = source.getInputStream(entry);
try {
- Constructor<?> cons = c.getConstructor(argTypes);
- return cons.newInstance(args);
- } catch(InvocationTargetException e) {
- throw getCause(e);
- } catch(IllegalAccessException e) {
- throw new IllegalStateException(e);
- } catch(NoSuchMethodException e) {
- throw new IllegalStateException(e);
- } catch(InstantiationException e) {
- throw new IllegalStateException(e);
+ OutputStream output =
+ new BufferedOutputStream(new FileOutputStream(newFile));
+ try {
+ IOUtils.copy(src, output);
+ } finally {
+ output.close();
+ }
+ } finally {
+ src.close();
}
+ }
+ }
+ }
+
+ public static String flattenToString(Collection<?> collection,
+ String delimiter) {
+ StringBuffer buffer = new StringBuffer();
+ for (Object obj : collection) {
+ buffer.append(obj.toString());
+ buffer.append(',');
+ }
+
+ if (buffer.length() > 0) {
+ buffer.setLength(buffer.length() - 1);
+ }
+ return buffer.toString();
+ }
+
+ public static Double convertToDouble(Object obj) {
+ if (obj instanceof String) {
+ return Double.parseDouble((String) obj);
+ }
+
+ return (Double) obj;
+ }
+
+ /**
+ * Get the root cause of the Exception
+ *
+ * @param e The Exception
+ * @return The root cause of the Exception
+ */
+ private static RuntimeException getCause(InvocationTargetException e) {
+ Throwable cause = e.getCause();
+ if (cause instanceof RuntimeException)
+ throw (RuntimeException) cause;
+ else
+ throw new IllegalStateException(e.getCause());
+ }
+
+ /**
+ * Get the Class of all the objects
+ *
+ * @param args The objects to get the Classes from
+ * @return The classes as an array
+ */
+ public static Class<?>[] getTypes(Object... args) {
+ Class<?>[] argTypes = new Class<?>[args.length];
+ for (int i = 0; i < argTypes.length; i++)
+ argTypes[i] = args[i].getClass();
+ return argTypes;
+ }
+
+ public static Object callConstructor(Class<?> c, Object... args) {
+ return callConstructor(c, getTypes(args), args);
+ }
+
+ /**
+ * Call the class constructor with the given arguments
+ *
+ * @param c The class
+ * @param args The arguments
+ * @return The constructed object
+ */
+ public static Object callConstructor(Class<?> c, Class<?>[] argTypes,
+ Object[] args) {
+ try {
+ Constructor<?> cons = c.getConstructor(argTypes);
+ return cons.newInstance(args);
+ } catch (InvocationTargetException e) {
+ throw getCause(e);
+ } catch (IllegalAccessException e) {
+ throw new IllegalStateException(e);
+ } catch (NoSuchMethodException e) {
+ throw new IllegalStateException(e);
+ } catch (InstantiationException e) {
+ throw new IllegalStateException(e);
+ }
+ }
+
+ public static String formatDuration(long startTime, long endTime) {
+ if (startTime == -1) {
+ return "-";
+ }
+
+ long durationMS;
+ if (endTime == -1) {
+ durationMS = System.currentTimeMillis() - startTime;
+ } else {
+ durationMS = endTime - startTime;
+ }
+
+ long seconds = durationMS / 1000;
+ if (seconds < 60) {
+ return seconds + " sec";
+ }
+
+ long minutes = seconds / 60;
+ seconds %= 60;
+ if (minutes < 60) {
+ return minutes + "m " + seconds + "s";
+ }
+
+ long hours = minutes / 60;
+ minutes %= 60;
+ if (hours < 24) {
+ return hours + "h " + minutes + "m " + seconds + "s";
+ }
+
+ long days = hours / 24;
+ hours %= 24;
+ return days + "d " + hours + "h " + minutes + "m";
+ }
+
+ public static Object invokeStaticMethod(ClassLoader loader, String className,
+ String methodName, Object... args) throws ClassNotFoundException,
+ SecurityException, NoSuchMethodException, IllegalArgumentException,
+ IllegalAccessException, InvocationTargetException {
+ Class<?> clazz = loader.loadClass(className);
+
+ Class<?>[] argTypes = new Class[args.length];
+ for (int i = 0; i < args.length; ++i) {
+ // argTypes[i] = args[i].getClass();
+ argTypes[i] = args[i].getClass();
+ }
+
+ Method method = clazz.getDeclaredMethod(methodName, argTypes);
+ return method.invoke(null, args);
+ }
+
+ public static void copyStream(InputStream input, OutputStream output)
+ throws IOException {
+ byte[] buffer = new byte[1024];
+ int bytesRead;
+ while ((bytesRead = input.read(buffer)) != -1) {
+ output.write(buffer, 0, bytesRead);
+ }
+ }
+
+ public static ReadablePeriod parsePeriodString(String periodStr) {
+ ReadablePeriod period;
+ char periodUnit = periodStr.charAt(periodStr.length() - 1);
+ if (periodStr.equals("null") || periodUnit == 'n') {
+ return null;
+ }
+
+ int periodInt =
+ Integer.parseInt(periodStr.substring(0, periodStr.length() - 1));
+ switch (periodUnit) {
+ case 'y':
+ period = Years.years(periodInt);
+ break;
+ case 'M':
+ period = Months.months(periodInt);
+ break;
+ case 'w':
+ period = Weeks.weeks(periodInt);
+ break;
+ case 'd':
+ period = Days.days(periodInt);
+ break;
+ case 'h':
+ period = Hours.hours(periodInt);
+ break;
+ case 'm':
+ period = Minutes.minutes(periodInt);
+ break;
+ case 's':
+ period = Seconds.seconds(periodInt);
+ break;
+ default:
+ throw new IllegalArgumentException("Invalid schedule period unit '"
+ + periodUnit);
+ }
+
+ return period;
+ }
+
+ public static String createPeriodString(ReadablePeriod period) {
+ String periodStr = "null";
+
+ if (period == null) {
+ return "null";
+ }
+
+ if (period.get(DurationFieldType.years()) > 0) {
+ int years = period.get(DurationFieldType.years());
+ periodStr = years + "y";
+ } else if (period.get(DurationFieldType.months()) > 0) {
+ int months = period.get(DurationFieldType.months());
+ periodStr = months + "M";
+ } else if (period.get(DurationFieldType.weeks()) > 0) {
+ int weeks = period.get(DurationFieldType.weeks());
+ periodStr = weeks + "w";
+ } else if (period.get(DurationFieldType.days()) > 0) {
+ int days = period.get(DurationFieldType.days());
+ periodStr = days + "d";
+ } else if (period.get(DurationFieldType.hours()) > 0) {
+ int hours = period.get(DurationFieldType.hours());
+ periodStr = hours + "h";
+ } else if (period.get(DurationFieldType.minutes()) > 0) {
+ int minutes = period.get(DurationFieldType.minutes());
+ periodStr = minutes + "m";
+ } else if (period.get(DurationFieldType.seconds()) > 0) {
+ int seconds = period.get(DurationFieldType.seconds());
+ periodStr = seconds + "s";
}
- public static String formatDuration(long startTime, long endTime) {
- if (startTime == -1) {
- return "-";
- }
-
- long durationMS;
- if (endTime == -1) {
- durationMS = System.currentTimeMillis() - startTime;
- }
- else {
- durationMS = endTime - startTime;
- }
-
- long seconds = durationMS/1000;
- if (seconds < 60) {
- return seconds + " sec";
- }
-
- long minutes = seconds / 60;
- seconds %= 60;
- if (minutes < 60) {
- return minutes + "m " + seconds + "s";
- }
-
- long hours = minutes / 60;
- minutes %= 60;
- if (hours < 24) {
- return hours + "h " + minutes + "m " + seconds + "s";
- }
-
- long days = hours / 24;
- hours %= 24;
- return days + "d " + hours + "h " + minutes + "m";
- }
-
- public static Object invokeStaticMethod(ClassLoader loader, String className, String methodName, Object ... args) throws ClassNotFoundException, SecurityException, NoSuchMethodException, IllegalArgumentException, IllegalAccessException, InvocationTargetException {
- Class<?> clazz = loader.loadClass(className);
-
- Class<?>[] argTypes = new Class[args.length];
- for (int i=0; i < args.length; ++i) {
- //argTypes[i] = args[i].getClass();
- argTypes[i] = args[i].getClass();
- }
-
- Method method = clazz.getDeclaredMethod(methodName, argTypes);
- return method.invoke(null, args);
- }
-
- public static void copyStream(InputStream input, OutputStream output) throws IOException {
- byte[] buffer = new byte[1024];
- int bytesRead;
- while ((bytesRead = input.read(buffer)) != -1) {
- output.write(buffer, 0, bytesRead);
- }
- }
-
- public static ReadablePeriod parsePeriodString(String periodStr) {
- ReadablePeriod period;
- char periodUnit = periodStr.charAt(periodStr.length() - 1);
- if (periodStr.equals("null") || periodUnit == 'n') {
- return null;
- }
-
- int periodInt = Integer.parseInt(periodStr.substring(0,
- periodStr.length() - 1));
- switch (periodUnit) {
- case 'y':
- period = Years.years(periodInt);
- break;
- case 'M':
- period = Months.months(periodInt);
- break;
- case 'w':
- period = Weeks.weeks(periodInt);
- break;
- case 'd':
- period = Days.days(periodInt);
- break;
- case 'h':
- period = Hours.hours(periodInt);
- break;
- case 'm':
- period = Minutes.minutes(periodInt);
- break;
- case 's':
- period = Seconds.seconds(periodInt);
- break;
- default:
- throw new IllegalArgumentException("Invalid schedule period unit '"
- + periodUnit);
- }
-
- return period;
- }
-
- public static String createPeriodString(ReadablePeriod period) {
- String periodStr = "null";
-
- if (period == null) {
- return "null";
- }
-
- if (period.get(DurationFieldType.years()) > 0) {
- int years = period.get(DurationFieldType.years());
- periodStr = years + "y";
- } else if (period.get(DurationFieldType.months()) > 0) {
- int months = period.get(DurationFieldType.months());
- periodStr = months + "M";
- } else if (period.get(DurationFieldType.weeks()) > 0) {
- int weeks = period.get(DurationFieldType.weeks());
- periodStr = weeks + "w";
- } else if (period.get(DurationFieldType.days()) > 0) {
- int days = period.get(DurationFieldType.days());
- periodStr = days + "d";
- } else if (period.get(DurationFieldType.hours()) > 0) {
- int hours = period.get(DurationFieldType.hours());
- periodStr = hours + "h";
- } else if (period.get(DurationFieldType.minutes()) > 0) {
- int minutes = period.get(DurationFieldType.minutes());
- periodStr = minutes + "m";
- } else if (period.get(DurationFieldType.seconds()) > 0) {
- int seconds = period.get(DurationFieldType.seconds());
- periodStr = seconds + "s";
- }
-
- return periodStr;
- }
+ return periodStr;
+ }
}
src/main/java/azkaban/utils/WebUtils.java 283(+138 -145)
diff --git a/src/main/java/azkaban/utils/WebUtils.java b/src/main/java/azkaban/utils/WebUtils.java
index 4f65159..021f650 100644
--- a/src/main/java/azkaban/utils/WebUtils.java
+++ b/src/main/java/azkaban/utils/WebUtils.java
@@ -26,149 +26,142 @@ import org.joda.time.format.DateTimeFormat;
import azkaban.executor.Status;
public class WebUtils {
- public static final String DATE_TIME_STRING = "YYYY-MM-dd HH:mm:ss";
-
- private static final long ONE_KB = 1024;
- private static final long ONE_MB = 1024 * ONE_KB;
- private static final long ONE_GB = 1024 * ONE_MB;
- private static final long ONE_TB = 1024 * ONE_GB;
-
- public String formatDate(long timeMS) {
- if (timeMS == -1) {
- return "-";
- }
-
- return DateTimeFormat.forPattern(DATE_TIME_STRING).print(timeMS);
- }
-
- public String formatDuration(long startTime, long endTime) {
- if (startTime == -1) {
- return "-";
- }
-
- long durationMS;
- if (endTime == -1) {
- durationMS = System.currentTimeMillis() - startTime;
- }
- else {
- durationMS = endTime - startTime;
- }
-
- long seconds = durationMS/1000;
- if (seconds < 60) {
- return seconds + " sec";
- }
-
- long minutes = seconds / 60;
- seconds %= 60;
- if (minutes < 60) {
- return minutes + "m " + seconds + "s";
- }
-
- long hours = minutes / 60;
- minutes %= 60;
- if (hours < 24) {
- return hours + "h " + minutes + "m " + seconds + "s";
- }
-
- long days = hours / 24;
- hours %= 24;
- return days + "d " + hours + "h " + minutes + "m";
- }
-
- public String formatStatus(Status status) {
- switch(status) {
- case SUCCEEDED:
- return "Success";
- case FAILED:
- return "Failed";
- case RUNNING:
- return "Running";
- case DISABLED:
- return "Disabled";
- case KILLED:
- return "Killed";
- case FAILED_FINISHING:
- return "Running w/Failure";
- case PREPARING:
- return "Preparing";
- case READY:
- return "Ready";
- case PAUSED:
- return "Paused";
- case SKIPPED:
- return "Skipped";
- default:
- }
- return "Unknown";
- }
-
- public String formatDateTime(DateTime dt) {
- return DateTimeFormat.forPattern(DATE_TIME_STRING).print(dt);
- }
-
- public String formatDateTime(long timestamp) {
- return formatDateTime(new DateTime(timestamp));
- }
-
- public String formatPeriod(ReadablePeriod period) {
- String periodStr = "null";
-
- if (period == null) {
- return periodStr;
- }
-
- if (period.get(DurationFieldType.years()) > 0) {
- int years = period.get(DurationFieldType.years());
- periodStr = years + " year(s)";
- }
- else if (period.get(DurationFieldType.months()) > 0) {
- int months = period.get(DurationFieldType.months());
- periodStr = months + " month(s)";
- }
- else if (period.get(DurationFieldType.weeks()) > 0) {
- int weeks = period.get(DurationFieldType.weeks());
- periodStr = weeks + " week(s)";
- }
- else if (period.get(DurationFieldType.days()) > 0) {
- int days = period.get(DurationFieldType.days());
- periodStr = days + " day(s)";
- }
- else if (period.get(DurationFieldType.hours()) > 0) {
- int hours = period.get(DurationFieldType.hours());
- periodStr = hours + " hour(s)";
- }
- else if (period.get(DurationFieldType.minutes()) > 0) {
- int minutes = period.get(DurationFieldType.minutes());
- periodStr = minutes + " minute(s)";
- }
- else if (period.get(DurationFieldType.seconds()) > 0) {
- int seconds = period.get(DurationFieldType.seconds());
- periodStr = seconds + " second(s)";
- }
-
- return periodStr;
- }
-
- public String extractNumericalId(String execId) {
- int index = execId.indexOf('.');
- int index2 = execId.indexOf('.', index+1);
-
- return execId.substring(0, index2);
- }
-
- public String displayBytes(long sizeBytes) {
- NumberFormat nf = NumberFormat.getInstance();
- nf.setMaximumFractionDigits(2);
- if(sizeBytes >= ONE_TB)
- return nf.format(sizeBytes / (double) ONE_TB) + " tb";
- else if(sizeBytes >= ONE_GB)
- return nf.format(sizeBytes / (double) ONE_GB) + " gb";
- else if(sizeBytes >= ONE_MB)
- return nf.format(sizeBytes / (double) ONE_MB) + " mb";
- else if(sizeBytes >= ONE_KB)
- return nf.format(sizeBytes / (double) ONE_KB) + " kb";
- else
- return sizeBytes + " B";
- }
+ public static final String DATE_TIME_STRING = "YYYY-MM-dd HH:mm:ss";
+
+ private static final long ONE_KB = 1024;
+ private static final long ONE_MB = 1024 * ONE_KB;
+ private static final long ONE_GB = 1024 * ONE_MB;
+ private static final long ONE_TB = 1024 * ONE_GB;
+
+ public String formatDate(long timeMS) {
+ if (timeMS == -1) {
+ return "-";
+ }
+
+ return DateTimeFormat.forPattern(DATE_TIME_STRING).print(timeMS);
+ }
+
+ public String formatDuration(long startTime, long endTime) {
+ if (startTime == -1) {
+ return "-";
+ }
+
+ long durationMS;
+ if (endTime == -1) {
+ durationMS = System.currentTimeMillis() - startTime;
+ } else {
+ durationMS = endTime - startTime;
+ }
+
+ long seconds = durationMS / 1000;
+ if (seconds < 60) {
+ return seconds + " sec";
+ }
+
+ long minutes = seconds / 60;
+ seconds %= 60;
+ if (minutes < 60) {
+ return minutes + "m " + seconds + "s";
+ }
+
+ long hours = minutes / 60;
+ minutes %= 60;
+ if (hours < 24) {
+ return hours + "h " + minutes + "m " + seconds + "s";
+ }
+
+ long days = hours / 24;
+ hours %= 24;
+ return days + "d " + hours + "h " + minutes + "m";
+ }
+
+ public String formatStatus(Status status) {
+ switch (status) {
+ case SUCCEEDED:
+ return "Success";
+ case FAILED:
+ return "Failed";
+ case RUNNING:
+ return "Running";
+ case DISABLED:
+ return "Disabled";
+ case KILLED:
+ return "Killed";
+ case FAILED_FINISHING:
+ return "Running w/Failure";
+ case PREPARING:
+ return "Preparing";
+ case READY:
+ return "Ready";
+ case PAUSED:
+ return "Paused";
+ case SKIPPED:
+ return "Skipped";
+ default:
+ }
+ return "Unknown";
+ }
+
+ public String formatDateTime(DateTime dt) {
+ return DateTimeFormat.forPattern(DATE_TIME_STRING).print(dt);
+ }
+
+ public String formatDateTime(long timestamp) {
+ return formatDateTime(new DateTime(timestamp));
+ }
+
+ public String formatPeriod(ReadablePeriod period) {
+ String periodStr = "null";
+
+ if (period == null) {
+ return periodStr;
+ }
+
+ if (period.get(DurationFieldType.years()) > 0) {
+ int years = period.get(DurationFieldType.years());
+ periodStr = years + " year(s)";
+ } else if (period.get(DurationFieldType.months()) > 0) {
+ int months = period.get(DurationFieldType.months());
+ periodStr = months + " month(s)";
+ } else if (period.get(DurationFieldType.weeks()) > 0) {
+ int weeks = period.get(DurationFieldType.weeks());
+ periodStr = weeks + " week(s)";
+ } else if (period.get(DurationFieldType.days()) > 0) {
+ int days = period.get(DurationFieldType.days());
+ periodStr = days + " day(s)";
+ } else if (period.get(DurationFieldType.hours()) > 0) {
+ int hours = period.get(DurationFieldType.hours());
+ periodStr = hours + " hour(s)";
+ } else if (period.get(DurationFieldType.minutes()) > 0) {
+ int minutes = period.get(DurationFieldType.minutes());
+ periodStr = minutes + " minute(s)";
+ } else if (period.get(DurationFieldType.seconds()) > 0) {
+ int seconds = period.get(DurationFieldType.seconds());
+ periodStr = seconds + " second(s)";
+ }
+
+ return periodStr;
+ }
+
+ public String extractNumericalId(String execId) {
+ int index = execId.indexOf('.');
+ int index2 = execId.indexOf('.', index + 1);
+
+ return execId.substring(0, index2);
+ }
+
+ public String displayBytes(long sizeBytes) {
+ NumberFormat nf = NumberFormat.getInstance();
+ nf.setMaximumFractionDigits(2);
+ if (sizeBytes >= ONE_TB)
+ return nf.format(sizeBytes / (double) ONE_TB) + " tb";
+ else if (sizeBytes >= ONE_GB)
+ return nf.format(sizeBytes / (double) ONE_GB) + " gb";
+ else if (sizeBytes >= ONE_MB)
+ return nf.format(sizeBytes / (double) ONE_MB) + " mb";
+ else if (sizeBytes >= ONE_KB)
+ return nf.format(sizeBytes / (double) ONE_KB) + " kb";
+ else
+ return sizeBytes + " B";
+ }
}
src/main/java/azkaban/webapp/AzkabanServer.java 210(+107 -103)
diff --git a/src/main/java/azkaban/webapp/AzkabanServer.java b/src/main/java/azkaban/webapp/AzkabanServer.java
index edb7d19..80953d8 100644
--- a/src/main/java/azkaban/webapp/AzkabanServer.java
+++ b/src/main/java/azkaban/webapp/AzkabanServer.java
@@ -32,108 +32,112 @@ import azkaban.user.UserManager;
import azkaban.utils.Props;
import azkaban.webapp.session.SessionCache;
-
public abstract class AzkabanServer {
- private static final Logger logger = Logger.getLogger(AzkabanServer.class);
- public static final String AZKABAN_PROPERTIES_FILE = "azkaban.properties";
- public static final String AZKABAN_PRIVATE_PROPERTIES_FILE = "azkaban.private.properties";
- public static final String DEFAULT_CONF_PATH = "conf";
-
- public static Props loadProps(String[] args) {
- return loadProps(args, new OptionParser());
- }
-
- public static Props loadProps(String[] args, OptionParser parser) {;
- OptionSpec<String> configDirectory = parser
- .acceptsAll(Arrays.asList("c", "conf"), "The conf directory for Azkaban.")
- .withRequiredArg()
- .describedAs("conf").ofType(String.class);
-
- // Grabbing the azkaban settings from the conf directory.
- Props azkabanSettings = null;
- OptionSet options = parser.parse(args);
-
- if (options.has(configDirectory)) {
- String path = options.valueOf(configDirectory);
- logger.info("Loading azkaban settings file from " + path);
- File dir = new File(path);
- if (!dir.exists()) {
- logger.error("Conf directory " + path + " doesn't exist.");
- }
- else if (!dir.isDirectory()) {
- logger.error("Conf directory " + path + " isn't a directory.");
- }
- else {
- azkabanSettings = loadAzkabanConfigurationFromDirectory(dir);
- }
- }
- else {
- logger.info("Conf parameter not set, attempting to get value from AZKABAN_HOME env.");
- azkabanSettings = loadConfigurationFromAzkabanHome();
- }
-
- return azkabanSettings;
- }
-
- private static Props loadAzkabanConfigurationFromDirectory(File dir) {
- File azkabanPrivatePropsFile = new File(dir, AZKABAN_PRIVATE_PROPERTIES_FILE);
- File azkabanPropsFile = new File(dir, AZKABAN_PROPERTIES_FILE);
-
- Props props = null;
- try {
- // This is purely optional
- if (azkabanPrivatePropsFile.exists() && azkabanPrivatePropsFile.isFile()) {
- logger.info("Loading azkaban private properties file" );
- props = new Props(null, azkabanPrivatePropsFile);
- }
-
- if (azkabanPropsFile.exists() && azkabanPropsFile.isFile()) {
- logger.info("Loading azkaban properties file" );
- props = new Props(props, azkabanPropsFile);
- }
- } catch (FileNotFoundException e) {
- logger.error("File not found. Could not load azkaban config file", e);
- } catch (IOException e) {
- logger.error("File found, but error reading. Could not load azkaban config file", e);
- }
-
- return props;
- }
-
- /**
- * Loads the Azkaban property file from the AZKABAN_HOME conf directory
- *
- * @return
- */
- private static Props loadConfigurationFromAzkabanHome() {
- String azkabanHome = System.getenv("AZKABAN_HOME");
-
- if (azkabanHome == null) {
- logger.error("AZKABAN_HOME not set. Will try default.");
- return null;
- }
-
- if (!new File(azkabanHome).isDirectory() || !new File(azkabanHome).canRead()) {
- logger.error(azkabanHome + " is not a readable directory.");
- return null;
- }
-
- File confPath = new File(azkabanHome, DEFAULT_CONF_PATH);
- if (!confPath.exists() || !confPath.isDirectory()
- || !confPath.canRead()) {
- logger.error(azkabanHome + " does not contain a readable conf directory.");
- return null;
- }
-
- return loadAzkabanConfigurationFromDirectory(confPath);
- }
-
- public abstract Props getServerProps();
-
- public abstract SessionCache getSessionCache();
-
- public abstract VelocityEngine getVelocityEngine();
-
- public abstract UserManager getUserManager();
-
+ private static final Logger logger = Logger.getLogger(AzkabanServer.class);
+ public static final String AZKABAN_PROPERTIES_FILE = "azkaban.properties";
+ public static final String AZKABAN_PRIVATE_PROPERTIES_FILE =
+ "azkaban.private.properties";
+ public static final String DEFAULT_CONF_PATH = "conf";
+
+ public static Props loadProps(String[] args) {
+ return loadProps(args, new OptionParser());
+ }
+
+ public static Props loadProps(String[] args, OptionParser parser) {
+ ;
+ OptionSpec<String> configDirectory =
+ parser
+ .acceptsAll(Arrays.asList("c", "conf"),
+ "The conf directory for Azkaban.").withRequiredArg()
+ .describedAs("conf").ofType(String.class);
+
+ // Grabbing the azkaban settings from the conf directory.
+ Props azkabanSettings = null;
+ OptionSet options = parser.parse(args);
+
+ if (options.has(configDirectory)) {
+ String path = options.valueOf(configDirectory);
+ logger.info("Loading azkaban settings file from " + path);
+ File dir = new File(path);
+ if (!dir.exists()) {
+ logger.error("Conf directory " + path + " doesn't exist.");
+ } else if (!dir.isDirectory()) {
+ logger.error("Conf directory " + path + " isn't a directory.");
+ } else {
+ azkabanSettings = loadAzkabanConfigurationFromDirectory(dir);
+ }
+ } else {
+ logger
+ .info("Conf parameter not set, attempting to get value from AZKABAN_HOME env.");
+ azkabanSettings = loadConfigurationFromAzkabanHome();
+ }
+
+ return azkabanSettings;
+ }
+
+ private static Props loadAzkabanConfigurationFromDirectory(File dir) {
+ File azkabanPrivatePropsFile =
+ new File(dir, AZKABAN_PRIVATE_PROPERTIES_FILE);
+ File azkabanPropsFile = new File(dir, AZKABAN_PROPERTIES_FILE);
+
+ Props props = null;
+ try {
+ // This is purely optional
+ if (azkabanPrivatePropsFile.exists() && azkabanPrivatePropsFile.isFile()) {
+ logger.info("Loading azkaban private properties file");
+ props = new Props(null, azkabanPrivatePropsFile);
+ }
+
+ if (azkabanPropsFile.exists() && azkabanPropsFile.isFile()) {
+ logger.info("Loading azkaban properties file");
+ props = new Props(props, azkabanPropsFile);
+ }
+ } catch (FileNotFoundException e) {
+ logger.error("File not found. Could not load azkaban config file", e);
+ } catch (IOException e) {
+ logger.error(
+ "File found, but error reading. Could not load azkaban config file",
+ e);
+ }
+
+ return props;
+ }
+
+ /**
+ * Loads the Azkaban property file from the AZKABAN_HOME conf directory
+ *
+ * @return
+ */
+ private static Props loadConfigurationFromAzkabanHome() {
+ String azkabanHome = System.getenv("AZKABAN_HOME");
+
+ if (azkabanHome == null) {
+ logger.error("AZKABAN_HOME not set. Will try default.");
+ return null;
+ }
+
+ if (!new File(azkabanHome).isDirectory()
+ || !new File(azkabanHome).canRead()) {
+ logger.error(azkabanHome + " is not a readable directory.");
+ return null;
+ }
+
+ File confPath = new File(azkabanHome, DEFAULT_CONF_PATH);
+ if (!confPath.exists() || !confPath.isDirectory() || !confPath.canRead()) {
+ logger
+ .error(azkabanHome + " does not contain a readable conf directory.");
+ return null;
+ }
+
+ return loadAzkabanConfigurationFromDirectory(confPath);
+ }
+
+ public abstract Props getServerProps();
+
+ public abstract SessionCache getSessionCache();
+
+ public abstract VelocityEngine getVelocityEngine();
+
+ public abstract UserManager getUserManager();
+
}
diff --git a/src/main/java/azkaban/webapp/AzkabanSingleServer.java b/src/main/java/azkaban/webapp/AzkabanSingleServer.java
index 4ddd032..5aa8be1 100644
--- a/src/main/java/azkaban/webapp/AzkabanSingleServer.java
+++ b/src/main/java/azkaban/webapp/AzkabanSingleServer.java
@@ -24,28 +24,33 @@ import azkaban.execapp.AzkabanExecutorServer;
import azkaban.utils.Props;
public class AzkabanSingleServer {
- private static final Logger logger = Logger.getLogger(AzkabanWebServer.class);
- public static void main(String[] args) throws Exception {
- logger.info("Starting Azkaban Server");
-
- Props props = AzkabanServer.loadProps(args);
- if (props == null) {
- logger.error("Properties not found. Need it to connect to the db.");
- logger.error("Exiting...");
- return;
- }
-
- boolean checkversion = props.getBoolean(AzkabanDatabaseSetup.DATABASE_CHECK_VERSION, true);
-
- if (checkversion) {
- boolean updateDB = props.getBoolean(AzkabanDatabaseSetup.DATABASE_AUTO_UPDATE_TABLES, true);
- String scriptDir = props.getString(AzkabanDatabaseSetup.DATABASE_SQL_SCRIPT_DIR, "sql");
- AzkabanDatabaseUpdater.runDatabaseUpdater(props, scriptDir, updateDB);
- }
-
- AzkabanWebServer.main(args);
- logger.info("Azkaban Web Server started...");
- AzkabanExecutorServer.main(args);
- logger.info("Azkaban Exec Server started...");
- }
+ private static final Logger logger = Logger.getLogger(AzkabanWebServer.class);
+
+ public static void main(String[] args) throws Exception {
+ logger.info("Starting Azkaban Server");
+
+ Props props = AzkabanServer.loadProps(args);
+ if (props == null) {
+ logger.error("Properties not found. Need it to connect to the db.");
+ logger.error("Exiting...");
+ return;
+ }
+
+ boolean checkversion =
+ props.getBoolean(AzkabanDatabaseSetup.DATABASE_CHECK_VERSION, true);
+
+ if (checkversion) {
+ boolean updateDB =
+ props.getBoolean(AzkabanDatabaseSetup.DATABASE_AUTO_UPDATE_TABLES,
+ true);
+ String scriptDir =
+ props.getString(AzkabanDatabaseSetup.DATABASE_SQL_SCRIPT_DIR, "sql");
+ AzkabanDatabaseUpdater.runDatabaseUpdater(props, scriptDir, updateDB);
+ }
+
+ AzkabanWebServer.main(args);
+ logger.info("Azkaban Web Server started...");
+ AzkabanExecutorServer.main(args);
+ logger.info("Azkaban Exec Server started...");
+ }
}
src/main/java/azkaban/webapp/AzkabanWebServer.java 2329(+1192 -1137)
diff --git a/src/main/java/azkaban/webapp/AzkabanWebServer.java b/src/main/java/azkaban/webapp/AzkabanWebServer.java
index a0cacc8..38cd93b 100644
--- a/src/main/java/azkaban/webapp/AzkabanWebServer.java
+++ b/src/main/java/azkaban/webapp/AzkabanWebServer.java
@@ -119,1143 +119,1198 @@ import azkaban.webapp.session.SessionCache;
* Jetty truststore password
*/
public class AzkabanWebServer extends AzkabanServer {
- private static final Logger logger = Logger.getLogger(AzkabanWebServer.class);
-
- public static final String AZKABAN_HOME = "AZKABAN_HOME";
- public static final String DEFAULT_CONF_PATH = "conf";
- public static final String AZKABAN_PROPERTIES_FILE = "azkaban.properties";
- public static final String AZKABAN_PRIVATE_PROPERTIES_FILE = "azkaban.private.properties";
-
- private static final int MAX_FORM_CONTENT_SIZE = 10*1024*1024;
- private static final int MAX_HEADER_BUFFER_SIZE = 10*1024*1024;
- private static AzkabanWebServer app;
-
- private static final String DEFAULT_TIMEZONE_ID = "default.timezone.id";
- private static final int DEFAULT_PORT_NUMBER = 8081;
- private static final int DEFAULT_SSL_PORT_NUMBER = 8443;
- private static final int DEFAULT_THREAD_NUMBER = 20;
- private static final String VELOCITY_DEV_MODE_PARAM = "velocity.dev.mode";
- private static final String USER_MANAGER_CLASS_PARAM = "user.manager.class";
- private static final String DEFAULT_STATIC_DIR = "";
-
- private final VelocityEngine velocityEngine;
-
- private final Server server;
- private UserManager userManager;
- private ProjectManager projectManager;
-// private ExecutorManagerAdapter executorManager;
- private ExecutorManager executorManager;
- private ScheduleManager scheduleManager;
- private TriggerManager triggerManager;
- private Map<String, Alerter> alerters;
-
- private final ClassLoader baseClassLoader;
-
- private Props props;
- private SessionCache sessionCache;
- private File tempDir;
- private Map<String, TriggerPlugin> triggerPlugins;
-
- private MBeanServer mbeanServer;
- private ArrayList<ObjectName> registeredMBeans = new ArrayList<ObjectName>();
-
- public static AzkabanWebServer getInstance() {
- return app;
- }
-
- /**
- * Constructor usually called by tomcat AzkabanServletContext to create the
- * initial server
- */
- public AzkabanWebServer() throws Exception {
- this(null, loadConfigurationFromAzkabanHome());
- }
-
- /**
- * Constructor
- */
- public AzkabanWebServer(Server server, Props props) throws Exception {
- this.props = props;
- this.server = server;
- velocityEngine = configureVelocityEngine(props.getBoolean(VELOCITY_DEV_MODE_PARAM, false));
- sessionCache = new SessionCache(props);
- userManager = loadUserManager(props);
-
- alerters = loadAlerters(props);
-
- executorManager = loadExecutorManager(props);
- projectManager = loadProjectManager(props);
-
- triggerManager = loadTriggerManager(props);
- loadBuiltinCheckersAndActions();
-
- // load all trigger agents here
- scheduleManager = loadScheduleManager(triggerManager, props);
-
- String triggerPluginDir = props.getString("trigger.plugin.dir", "plugins/triggers");
-
- loadPluginCheckersAndActions(triggerPluginDir);
-
- //baseClassLoader = getBaseClassloader();
- baseClassLoader = this.getClassLoader();
-
- tempDir = new File(props.getString("azkaban.temp.dir", "temp"));
-
- // Setup time zone
- if (props.containsKey(DEFAULT_TIMEZONE_ID)) {
- String timezone = props.getString(DEFAULT_TIMEZONE_ID);
- System.setProperty("user.timezone", timezone);
- TimeZone.setDefault(TimeZone.getTimeZone(timezone));
- DateTimeZone.setDefault(DateTimeZone.forID(timezone));
- logger.info("Setting timezone to " + timezone);
- }
-
- configureMBeanServer();
- }
-
- private void setTriggerPlugins(Map<String, TriggerPlugin> triggerPlugins) {
- this.triggerPlugins = triggerPlugins;
- }
-
- private UserManager loadUserManager(Props props) {
- Class<?> userManagerClass = props.getClass(USER_MANAGER_CLASS_PARAM, null);
- logger.info("Loading user manager class " + userManagerClass.getName());
- UserManager manager = null;
- if (userManagerClass != null && userManagerClass.getConstructors().length > 0) {
- try {
- Constructor<?> userManagerConstructor = userManagerClass.getConstructor(Props.class);
- manager = (UserManager) userManagerConstructor.newInstance(props);
- }
- catch (Exception e) {
- logger.error("Could not instantiate UserManager "+ userManagerClass.getName());
- throw new RuntimeException(e);
- }
- }
- else {
- manager = new XmlUserManager(props);
- }
- return manager;
- }
-
- private ProjectManager loadProjectManager(Props props) {
- logger.info("Loading JDBC for project management");
- JdbcProjectLoader loader = new JdbcProjectLoader(props);
- ProjectManager manager = new ProjectManager(loader, props);
- return manager;
- }
-
- private ExecutorManager loadExecutorManager(Props props) throws Exception {
- JdbcExecutorLoader loader = new JdbcExecutorLoader(props);
- ExecutorManager execManager = new ExecutorManager(props, loader, alerters);
- return execManager;
- }
-
-// private ExecutorManagerAdapter loadExecutorManagerAdapter(Props props) throws Exception {
-// String executorMode = props.getString("executor.manager.mode", "local");
-// ExecutorManagerAdapter adapter;
-// if(executorMode.equals("local")) {
-// adapter = loadExecutorManager(props);
-// } else {
-// throw new Exception("Unknown ExecutorManager mode " + executorMode);
-// }
-// return adapter;
-// }
-
- private ScheduleManager loadScheduleManager(TriggerManager tm, Props props ) throws Exception {
- logger.info("Loading trigger based scheduler");
- ScheduleLoader loader = new TriggerBasedScheduleLoader(tm, ScheduleManager.triggerSource);
- return new ScheduleManager(loader);
- }
-
- private TriggerManager loadTriggerManager(Props props) throws TriggerManagerException {
- TriggerLoader loader = new JdbcTriggerLoader(props);
- return new TriggerManager(props, loader, executorManager);
- }
-
- private void loadBuiltinCheckersAndActions() {
- logger.info("Loading built-in checker and action types");
- if(triggerManager instanceof TriggerManager) {
- SlaChecker.setExecutorManager(executorManager);
- ExecuteFlowAction.setExecutorManager(executorManager);
- ExecuteFlowAction.setProjectManager(projectManager);
- ExecuteFlowAction.setTriggerManager(triggerManager);
- KillExecutionAction.setExecutorManager(executorManager);
- SlaAlertAction.setExecutorManager(executorManager);
- //Map<String, azkaban.alert.Alerter> alerters = loadAlerters(props);
- SlaAlertAction.setAlerters(alerters);
- SlaAlertAction.setExecutorManager(executorManager);
- CreateTriggerAction.setTriggerManager(triggerManager);
- ExecutionChecker.setExecutorManager(executorManager);
- }
- triggerManager.registerCheckerType(BasicTimeChecker.type, BasicTimeChecker.class);
- triggerManager.registerCheckerType(SlaChecker.type, SlaChecker.class);
- triggerManager.registerCheckerType(ExecutionChecker.type, ExecutionChecker.class);
- triggerManager.registerActionType(ExecuteFlowAction.type, ExecuteFlowAction.class);
- triggerManager.registerActionType(KillExecutionAction.type, KillExecutionAction.class);
- triggerManager.registerActionType(SlaAlertAction.type, SlaAlertAction.class);
- triggerManager.registerActionType(CreateTriggerAction.type, CreateTriggerAction.class);
- }
-
- private Map<String, Alerter> loadAlerters(Props props) {
- Map<String, Alerter> allAlerters = new HashMap<String, Alerter>();
- // load built-in alerters
- Emailer mailAlerter = new Emailer(props);
- allAlerters.put("email", mailAlerter);
- // load all plugin alerters
- String pluginDir = props.getString("alerter.plugin.dir", "plugins/alerter");
- allAlerters.putAll(loadPluginAlerters(pluginDir));
- return allAlerters;
- }
-
- private Map<String, Alerter> loadPluginAlerters(String pluginPath) {
- File alerterPluginPath = new File(pluginPath);
- if (!alerterPluginPath.exists()) {
- return Collections.<String, Alerter>emptyMap();
- }
-
- Map<String, Alerter> installedAlerterPlugins = new HashMap<String, Alerter>();
- ClassLoader parentLoader = getClass().getClassLoader();
- File[] pluginDirs = alerterPluginPath.listFiles();
- ArrayList<String> jarPaths = new ArrayList<String>();
- for (File pluginDir: pluginDirs) {
- if (!pluginDir.isDirectory()) {
- logger.error("The plugin path " + pluginDir + " is not a directory.");
- continue;
- }
-
- // Load the conf directory
- File propertiesDir = new File(pluginDir, "conf");
- Props pluginProps = null;
- if (propertiesDir.exists() && propertiesDir.isDirectory()) {
- File propertiesFile = new File(propertiesDir, "plugin.properties");
- File propertiesOverrideFile = new File(propertiesDir, "override.properties");
-
- if (propertiesFile.exists()) {
- if (propertiesOverrideFile.exists()) {
- pluginProps = PropsUtils.loadProps(null, propertiesFile, propertiesOverrideFile);
- }
- else {
- pluginProps = PropsUtils.loadProps(null, propertiesFile);
- }
- }
- else {
- logger.error("Plugin conf file " + propertiesFile + " not found.");
- continue;
- }
- }
- else {
- logger.error("Plugin conf path " + propertiesDir + " not found.");
- continue;
- }
-
- String pluginName = pluginProps.getString("alerter.name");
- List<String> extLibClasspath = pluginProps.getStringList("alerter.external.classpaths", (List<String>)null);
-
- String pluginClass = pluginProps.getString("alerter.class");
- if (pluginClass == null) {
- logger.error("Alerter class is not set.");
- }
- else {
- logger.info("Plugin class " + pluginClass);
- }
-
- URLClassLoader urlClassLoader = null;
- File libDir = new File(pluginDir, "lib");
- if (libDir.exists() && libDir.isDirectory()) {
- File[] files = libDir.listFiles();
-
- ArrayList<URL> urls = new ArrayList<URL>();
- for (int i=0; i < files.length; ++i) {
- try {
- URL url = files[i].toURI().toURL();
- urls.add(url);
- } catch (MalformedURLException e) {
- logger.error(e);
- }
- }
- if (extLibClasspath != null) {
- for (String extLib : extLibClasspath) {
- try {
- File file = new File(pluginDir, extLib);
- URL url = file.toURI().toURL();
- urls.add(url);
- } catch (MalformedURLException e) {
- logger.error(e);
- }
- }
- }
-
- urlClassLoader = new URLClassLoader(urls.toArray(new URL[urls.size()]), parentLoader);
- }
- else {
- logger.error("Library path " + propertiesDir + " not found.");
- continue;
- }
-
- Class<?> alerterClass = null;
- try {
- alerterClass = urlClassLoader.loadClass(pluginClass);
- }
- catch (ClassNotFoundException e) {
- logger.error("Class " + pluginClass + " not found.");
- continue;
- }
-
- String source = FileIOUtils.getSourcePathFromClass(alerterClass);
- logger.info("Source jar " + source);
- jarPaths.add("jar:file:" + source);
-
- Constructor<?> constructor = null;
- try {
- constructor = alerterClass.getConstructor(Props.class);
- } catch (NoSuchMethodException e) {
- logger.error("Constructor not found in " + pluginClass);
- continue;
- }
-
- Object obj = null;
- try {
- obj = constructor.newInstance(pluginProps);
- } catch (Exception e) {
- logger.error(e);
- }
-
- if (!(obj instanceof Alerter)) {
- logger.error("The object is not an Alerter");
- continue;
- }
-
- Alerter plugin = (Alerter) obj;
- installedAlerterPlugins.put(pluginName, plugin);
- }
-
- return installedAlerterPlugins;
-
- }
-
- private void loadPluginCheckersAndActions(String pluginPath) {
- logger.info("Loading plug-in checker and action types");
- File triggerPluginPath = new File(pluginPath);
- if (!triggerPluginPath.exists()) {
- logger.error("plugin path " + pluginPath + " doesn't exist!");
- return;
- }
-
- ClassLoader parentLoader = this.getClassLoader();
- File[] pluginDirs = triggerPluginPath.listFiles();
- ArrayList<String> jarPaths = new ArrayList<String>();
- for (File pluginDir: pluginDirs) {
- if (!pluginDir.exists()) {
- logger.error("Error! Trigger plugin path " + pluginDir.getPath() + " doesn't exist.");
- continue;
- }
-
- if (!pluginDir.isDirectory()) {
- logger.error("The plugin path " + pluginDir + " is not a directory.");
- continue;
- }
-
- // Load the conf directory
- File propertiesDir = new File(pluginDir, "conf");
- Props pluginProps = null;
- if (propertiesDir.exists() && propertiesDir.isDirectory()) {
- File propertiesFile = new File(propertiesDir, "plugin.properties");
- File propertiesOverrideFile = new File(propertiesDir, "override.properties");
-
- if (propertiesFile.exists()) {
- if (propertiesOverrideFile.exists()) {
- pluginProps = PropsUtils.loadProps(null, propertiesFile, propertiesOverrideFile);
- }
- else {
- pluginProps = PropsUtils.loadProps(null, propertiesFile);
- }
- }
- else {
- logger.error("Plugin conf file " + propertiesFile + " not found.");
- continue;
- }
- }
- else {
- logger.error("Plugin conf path " + propertiesDir + " not found.");
- continue;
- }
-
- List<String> extLibClasspath = pluginProps.getStringList("trigger.external.classpaths", (List<String>)null);
-
- String pluginClass = pluginProps.getString("trigger.class");
- if (pluginClass == null) {
- logger.error("Trigger class is not set.");
- }
- else {
- logger.error("Plugin class " + pluginClass);
- }
-
- URLClassLoader urlClassLoader = null;
- File libDir = new File(pluginDir, "lib");
- if (libDir.exists() && libDir.isDirectory()) {
- File[] files = libDir.listFiles();
-
- ArrayList<URL> urls = new ArrayList<URL>();
- for (int i=0; i < files.length; ++i) {
- try {
- URL url = files[i].toURI().toURL();
- urls.add(url);
- } catch (MalformedURLException e) {
- logger.error(e);
- }
- }
- if (extLibClasspath != null) {
- for (String extLib : extLibClasspath) {
- try {
- File file = new File(pluginDir, extLib);
- URL url = file.toURI().toURL();
- urls.add(url);
- } catch (MalformedURLException e) {
- logger.error(e);
- }
- }
- }
-
- urlClassLoader = new URLClassLoader(urls.toArray(new URL[urls.size()]), parentLoader);
- }
- else {
- logger.error("Library path " + propertiesDir + " not found.");
- continue;
- }
-
- Class<?> triggerClass = null;
- try {
- triggerClass = urlClassLoader.loadClass(pluginClass);
- }
- catch (ClassNotFoundException e) {
- logger.error("Class " + pluginClass + " not found.");
- continue;
- }
-
- String source = FileIOUtils.getSourcePathFromClass(triggerClass);
- logger.info("Source jar " + source);
- jarPaths.add("jar:file:" + source);
-
- try {
- Utils.invokeStaticMethod(urlClassLoader, pluginClass, "initiateCheckerTypes", pluginProps, app);
- } catch (Exception e) {
- logger.error("Unable to initiate checker types for " + pluginClass);
- continue;
- }
-
- try {
- Utils.invokeStaticMethod(urlClassLoader, pluginClass, "initiateActionTypes", pluginProps, app);
- } catch (Exception e) {
- logger.error("Unable to initiate action types for " + pluginClass);
- continue;
- }
-
- }
- }
-
- /**
- * Returns the web session cache.
- *
- * @return
- */
- public SessionCache getSessionCache() {
- return sessionCache;
- }
-
- /**
- * Returns the velocity engine for pages to use.
- *
- * @return
- */
- public VelocityEngine getVelocityEngine() {
- return velocityEngine;
- }
-
- /**
- *
- * @return
- */
- public UserManager getUserManager() {
- return userManager;
- }
-
- /**
- *
- * @return
- */
- public ProjectManager getProjectManager() {
- return projectManager;
- }
-
- /**
+ private static final Logger logger = Logger.getLogger(AzkabanWebServer.class);
+
+ public static final String AZKABAN_HOME = "AZKABAN_HOME";
+ public static final String DEFAULT_CONF_PATH = "conf";
+ public static final String AZKABAN_PROPERTIES_FILE = "azkaban.properties";
+ public static final String AZKABAN_PRIVATE_PROPERTIES_FILE =
+ "azkaban.private.properties";
+
+ private static final int MAX_FORM_CONTENT_SIZE = 10 * 1024 * 1024;
+ private static final int MAX_HEADER_BUFFER_SIZE = 10 * 1024 * 1024;
+ private static AzkabanWebServer app;
+
+ private static final String DEFAULT_TIMEZONE_ID = "default.timezone.id";
+ private static final int DEFAULT_PORT_NUMBER = 8081;
+ private static final int DEFAULT_SSL_PORT_NUMBER = 8443;
+ private static final int DEFAULT_THREAD_NUMBER = 20;
+ private static final String VELOCITY_DEV_MODE_PARAM = "velocity.dev.mode";
+ private static final String USER_MANAGER_CLASS_PARAM = "user.manager.class";
+ private static final String DEFAULT_STATIC_DIR = "";
+
+ private final VelocityEngine velocityEngine;
+
+ private final Server server;
+ private UserManager userManager;
+ private ProjectManager projectManager;
+ // private ExecutorManagerAdapter executorManager;
+ private ExecutorManager executorManager;
+ private ScheduleManager scheduleManager;
+ private TriggerManager triggerManager;
+ private Map<String, Alerter> alerters;
+
+ private final ClassLoader baseClassLoader;
+
+ private Props props;
+ private SessionCache sessionCache;
+ private File tempDir;
+ private Map<String, TriggerPlugin> triggerPlugins;
+
+ private MBeanServer mbeanServer;
+ private ArrayList<ObjectName> registeredMBeans = new ArrayList<ObjectName>();
+
+ public static AzkabanWebServer getInstance() {
+ return app;
+ }
+
+ /**
+ * Constructor usually called by tomcat AzkabanServletContext to create the
+ * initial server
+ */
+ public AzkabanWebServer() throws Exception {
+ this(null, loadConfigurationFromAzkabanHome());
+ }
+
+ /**
+ * Constructor
+ */
+ public AzkabanWebServer(Server server, Props props) throws Exception {
+ this.props = props;
+ this.server = server;
+ velocityEngine =
+ configureVelocityEngine(props
+ .getBoolean(VELOCITY_DEV_MODE_PARAM, false));
+ sessionCache = new SessionCache(props);
+ userManager = loadUserManager(props);
+
+ alerters = loadAlerters(props);
+
+ executorManager = loadExecutorManager(props);
+ projectManager = loadProjectManager(props);
+
+ triggerManager = loadTriggerManager(props);
+ loadBuiltinCheckersAndActions();
+
+ // load all trigger agents here
+ scheduleManager = loadScheduleManager(triggerManager, props);
+
+ String triggerPluginDir =
+ props.getString("trigger.plugin.dir", "plugins/triggers");
+
+ loadPluginCheckersAndActions(triggerPluginDir);
+
+ // baseClassLoader = getBaseClassloader();
+ baseClassLoader = this.getClassLoader();
+
+ tempDir = new File(props.getString("azkaban.temp.dir", "temp"));
+
+ // Setup time zone
+ if (props.containsKey(DEFAULT_TIMEZONE_ID)) {
+ String timezone = props.getString(DEFAULT_TIMEZONE_ID);
+ System.setProperty("user.timezone", timezone);
+ TimeZone.setDefault(TimeZone.getTimeZone(timezone));
+ DateTimeZone.setDefault(DateTimeZone.forID(timezone));
+ logger.info("Setting timezone to " + timezone);
+ }
+
+ configureMBeanServer();
+ }
+
+ private void setTriggerPlugins(Map<String, TriggerPlugin> triggerPlugins) {
+ this.triggerPlugins = triggerPlugins;
+ }
+
+ private UserManager loadUserManager(Props props) {
+ Class<?> userManagerClass = props.getClass(USER_MANAGER_CLASS_PARAM, null);
+ logger.info("Loading user manager class " + userManagerClass.getName());
+ UserManager manager = null;
+ if (userManagerClass != null
+ && userManagerClass.getConstructors().length > 0) {
+ try {
+ Constructor<?> userManagerConstructor =
+ userManagerClass.getConstructor(Props.class);
+ manager = (UserManager) userManagerConstructor.newInstance(props);
+ } catch (Exception e) {
+ logger.error("Could not instantiate UserManager "
+ + userManagerClass.getName());
+ throw new RuntimeException(e);
+ }
+ } else {
+ manager = new XmlUserManager(props);
+ }
+ return manager;
+ }
+
+ private ProjectManager loadProjectManager(Props props) {
+ logger.info("Loading JDBC for project management");
+ JdbcProjectLoader loader = new JdbcProjectLoader(props);
+ ProjectManager manager = new ProjectManager(loader, props);
+ return manager;
+ }
+
+ private ExecutorManager loadExecutorManager(Props props) throws Exception {
+ JdbcExecutorLoader loader = new JdbcExecutorLoader(props);
+ ExecutorManager execManager = new ExecutorManager(props, loader, alerters);
+ return execManager;
+ }
+
+ // private ExecutorManagerAdapter loadExecutorManagerAdapter(Props props)
+ // throws Exception {
+ // String executorMode = props.getString("executor.manager.mode", "local");
+ // ExecutorManagerAdapter adapter;
+ // if(executorMode.equals("local")) {
+ // adapter = loadExecutorManager(props);
+ // } else {
+ // throw new Exception("Unknown ExecutorManager mode " + executorMode);
+ // }
+ // return adapter;
+ // }
+
+ private ScheduleManager loadScheduleManager(TriggerManager tm, Props props)
+ throws Exception {
+ logger.info("Loading trigger based scheduler");
+ ScheduleLoader loader =
+ new TriggerBasedScheduleLoader(tm, ScheduleManager.triggerSource);
+ return new ScheduleManager(loader);
+ }
+
+ private TriggerManager loadTriggerManager(Props props)
+ throws TriggerManagerException {
+ TriggerLoader loader = new JdbcTriggerLoader(props);
+ return new TriggerManager(props, loader, executorManager);
+ }
+
+ private void loadBuiltinCheckersAndActions() {
+ logger.info("Loading built-in checker and action types");
+ if (triggerManager instanceof TriggerManager) {
+ SlaChecker.setExecutorManager(executorManager);
+ ExecuteFlowAction.setExecutorManager(executorManager);
+ ExecuteFlowAction.setProjectManager(projectManager);
+ ExecuteFlowAction.setTriggerManager(triggerManager);
+ KillExecutionAction.setExecutorManager(executorManager);
+ SlaAlertAction.setExecutorManager(executorManager);
+ // Map<String, azkaban.alert.Alerter> alerters = loadAlerters(props);
+ SlaAlertAction.setAlerters(alerters);
+ SlaAlertAction.setExecutorManager(executorManager);
+ CreateTriggerAction.setTriggerManager(triggerManager);
+ ExecutionChecker.setExecutorManager(executorManager);
+ }
+ triggerManager.registerCheckerType(BasicTimeChecker.type,
+ BasicTimeChecker.class);
+ triggerManager.registerCheckerType(SlaChecker.type, SlaChecker.class);
+ triggerManager.registerCheckerType(ExecutionChecker.type,
+ ExecutionChecker.class);
+ triggerManager.registerActionType(ExecuteFlowAction.type,
+ ExecuteFlowAction.class);
+ triggerManager.registerActionType(KillExecutionAction.type,
+ KillExecutionAction.class);
+ triggerManager
+ .registerActionType(SlaAlertAction.type, SlaAlertAction.class);
+ triggerManager.registerActionType(CreateTriggerAction.type,
+ CreateTriggerAction.class);
+ }
+
+ private Map<String, Alerter> loadAlerters(Props props) {
+ Map<String, Alerter> allAlerters = new HashMap<String, Alerter>();
+ // load built-in alerters
+ Emailer mailAlerter = new Emailer(props);
+ allAlerters.put("email", mailAlerter);
+ // load all plugin alerters
+ String pluginDir = props.getString("alerter.plugin.dir", "plugins/alerter");
+ allAlerters.putAll(loadPluginAlerters(pluginDir));
+ return allAlerters;
+ }
+
+ private Map<String, Alerter> loadPluginAlerters(String pluginPath) {
+ File alerterPluginPath = new File(pluginPath);
+ if (!alerterPluginPath.exists()) {
+ return Collections.<String, Alerter> emptyMap();
+ }
+
+ Map<String, Alerter> installedAlerterPlugins =
+ new HashMap<String, Alerter>();
+ ClassLoader parentLoader = getClass().getClassLoader();
+ File[] pluginDirs = alerterPluginPath.listFiles();
+ ArrayList<String> jarPaths = new ArrayList<String>();
+ for (File pluginDir : pluginDirs) {
+ if (!pluginDir.isDirectory()) {
+ logger.error("The plugin path " + pluginDir + " is not a directory.");
+ continue;
+ }
+
+ // Load the conf directory
+ File propertiesDir = new File(pluginDir, "conf");
+ Props pluginProps = null;
+ if (propertiesDir.exists() && propertiesDir.isDirectory()) {
+ File propertiesFile = new File(propertiesDir, "plugin.properties");
+ File propertiesOverrideFile =
+ new File(propertiesDir, "override.properties");
+
+ if (propertiesFile.exists()) {
+ if (propertiesOverrideFile.exists()) {
+ pluginProps =
+ PropsUtils.loadProps(null, propertiesFile,
+ propertiesOverrideFile);
+ } else {
+ pluginProps = PropsUtils.loadProps(null, propertiesFile);
+ }
+ } else {
+ logger.error("Plugin conf file " + propertiesFile + " not found.");
+ continue;
+ }
+ } else {
+ logger.error("Plugin conf path " + propertiesDir + " not found.");
+ continue;
+ }
+
+ String pluginName = pluginProps.getString("alerter.name");
+ List<String> extLibClasspath =
+ pluginProps.getStringList("alerter.external.classpaths",
+ (List<String>) null);
+
+ String pluginClass = pluginProps.getString("alerter.class");
+ if (pluginClass == null) {
+ logger.error("Alerter class is not set.");
+ } else {
+ logger.info("Plugin class " + pluginClass);
+ }
+
+ URLClassLoader urlClassLoader = null;
+ File libDir = new File(pluginDir, "lib");
+ if (libDir.exists() && libDir.isDirectory()) {
+ File[] files = libDir.listFiles();
+
+ ArrayList<URL> urls = new ArrayList<URL>();
+ for (int i = 0; i < files.length; ++i) {
+ try {
+ URL url = files[i].toURI().toURL();
+ urls.add(url);
+ } catch (MalformedURLException e) {
+ logger.error(e);
+ }
+ }
+ if (extLibClasspath != null) {
+ for (String extLib : extLibClasspath) {
+ try {
+ File file = new File(pluginDir, extLib);
+ URL url = file.toURI().toURL();
+ urls.add(url);
+ } catch (MalformedURLException e) {
+ logger.error(e);
+ }
+ }
+ }
+
+ urlClassLoader =
+ new URLClassLoader(urls.toArray(new URL[urls.size()]), parentLoader);
+ } else {
+ logger.error("Library path " + propertiesDir + " not found.");
+ continue;
+ }
+
+ Class<?> alerterClass = null;
+ try {
+ alerterClass = urlClassLoader.loadClass(pluginClass);
+ } catch (ClassNotFoundException e) {
+ logger.error("Class " + pluginClass + " not found.");
+ continue;
+ }
+
+ String source = FileIOUtils.getSourcePathFromClass(alerterClass);
+ logger.info("Source jar " + source);
+ jarPaths.add("jar:file:" + source);
+
+ Constructor<?> constructor = null;
+ try {
+ constructor = alerterClass.getConstructor(Props.class);
+ } catch (NoSuchMethodException e) {
+ logger.error("Constructor not found in " + pluginClass);
+ continue;
+ }
+
+ Object obj = null;
+ try {
+ obj = constructor.newInstance(pluginProps);
+ } catch (Exception e) {
+ logger.error(e);
+ }
+
+ if (!(obj instanceof Alerter)) {
+ logger.error("The object is not an Alerter");
+ continue;
+ }
+
+ Alerter plugin = (Alerter) obj;
+ installedAlerterPlugins.put(pluginName, plugin);
+ }
+
+ return installedAlerterPlugins;
+
+ }
+
+ private void loadPluginCheckersAndActions(String pluginPath) {
+ logger.info("Loading plug-in checker and action types");
+ File triggerPluginPath = new File(pluginPath);
+ if (!triggerPluginPath.exists()) {
+ logger.error("plugin path " + pluginPath + " doesn't exist!");
+ return;
+ }
+
+ ClassLoader parentLoader = this.getClassLoader();
+ File[] pluginDirs = triggerPluginPath.listFiles();
+ ArrayList<String> jarPaths = new ArrayList<String>();
+ for (File pluginDir : pluginDirs) {
+ if (!pluginDir.exists()) {
+ logger.error("Error! Trigger plugin path " + pluginDir.getPath()
+ + " doesn't exist.");
+ continue;
+ }
+
+ if (!pluginDir.isDirectory()) {
+ logger.error("The plugin path " + pluginDir + " is not a directory.");
+ continue;
+ }
+
+ // Load the conf directory
+ File propertiesDir = new File(pluginDir, "conf");
+ Props pluginProps = null;
+ if (propertiesDir.exists() && propertiesDir.isDirectory()) {
+ File propertiesFile = new File(propertiesDir, "plugin.properties");
+ File propertiesOverrideFile =
+ new File(propertiesDir, "override.properties");
+
+ if (propertiesFile.exists()) {
+ if (propertiesOverrideFile.exists()) {
+ pluginProps =
+ PropsUtils.loadProps(null, propertiesFile,
+ propertiesOverrideFile);
+ } else {
+ pluginProps = PropsUtils.loadProps(null, propertiesFile);
+ }
+ } else {
+ logger.error("Plugin conf file " + propertiesFile + " not found.");
+ continue;
+ }
+ } else {
+ logger.error("Plugin conf path " + propertiesDir + " not found.");
+ continue;
+ }
+
+ List<String> extLibClasspath =
+ pluginProps.getStringList("trigger.external.classpaths",
+ (List<String>) null);
+
+ String pluginClass = pluginProps.getString("trigger.class");
+ if (pluginClass == null) {
+ logger.error("Trigger class is not set.");
+ } else {
+ logger.error("Plugin class " + pluginClass);
+ }
+
+ URLClassLoader urlClassLoader = null;
+ File libDir = new File(pluginDir, "lib");
+ if (libDir.exists() && libDir.isDirectory()) {
+ File[] files = libDir.listFiles();
+
+ ArrayList<URL> urls = new ArrayList<URL>();
+ for (int i = 0; i < files.length; ++i) {
+ try {
+ URL url = files[i].toURI().toURL();
+ urls.add(url);
+ } catch (MalformedURLException e) {
+ logger.error(e);
+ }
+ }
+ if (extLibClasspath != null) {
+ for (String extLib : extLibClasspath) {
+ try {
+ File file = new File(pluginDir, extLib);
+ URL url = file.toURI().toURL();
+ urls.add(url);
+ } catch (MalformedURLException e) {
+ logger.error(e);
+ }
+ }
+ }
+
+ urlClassLoader =
+ new URLClassLoader(urls.toArray(new URL[urls.size()]), parentLoader);
+ } else {
+ logger.error("Library path " + propertiesDir + " not found.");
+ continue;
+ }
+
+ Class<?> triggerClass = null;
+ try {
+ triggerClass = urlClassLoader.loadClass(pluginClass);
+ } catch (ClassNotFoundException e) {
+ logger.error("Class " + pluginClass + " not found.");
+ continue;
+ }
+
+ String source = FileIOUtils.getSourcePathFromClass(triggerClass);
+ logger.info("Source jar " + source);
+ jarPaths.add("jar:file:" + source);
+
+ try {
+ Utils.invokeStaticMethod(urlClassLoader, pluginClass,
+ "initiateCheckerTypes", pluginProps, app);
+ } catch (Exception e) {
+ logger.error("Unable to initiate checker types for " + pluginClass);
+ continue;
+ }
+
+ try {
+ Utils.invokeStaticMethod(urlClassLoader, pluginClass,
+ "initiateActionTypes", pluginProps, app);
+ } catch (Exception e) {
+ logger.error("Unable to initiate action types for " + pluginClass);
+ continue;
+ }
+
+ }
+ }
+
+ /**
+ * Returns the web session cache.
+ *
+ * @return
+ */
+ public SessionCache getSessionCache() {
+ return sessionCache;
+ }
+
+ /**
+ * Returns the velocity engine for pages to use.
+ *
+ * @return
+ */
+ public VelocityEngine getVelocityEngine() {
+ return velocityEngine;
+ }
+
+ /**
+ *
+ * @return
+ */
+ public UserManager getUserManager() {
+ return userManager;
+ }
+
+ /**
+ *
+ * @return
+ */
+ public ProjectManager getProjectManager() {
+ return projectManager;
+ }
+
+ /**
*
*/
- public ExecutorManager getExecutorManager() {
- return executorManager;
- }
-
- public ScheduleManager getScheduleManager() {
- return scheduleManager;
- }
-
- public TriggerManager getTriggerManager() {
- return triggerManager;
- }
-
-// public TriggerBasedScheduler getScheduler() {
-// return scheduler;
-// }
-//
- /**
- * Creates and configures the velocity engine.
- *
- * @param devMode
- * @return
- */
- private VelocityEngine configureVelocityEngine(final boolean devMode) {
- VelocityEngine engine = new VelocityEngine();
- engine.setProperty("resource.loader", "classpath, jar");
- engine.setProperty("classpath.resource.loader.class", ClasspathResourceLoader.class.getName());
- engine.setProperty("classpath.resource.loader.cache", !devMode);
- engine.setProperty("classpath.resource.loader.modificationCheckInterval", 5L);
- engine.setProperty("jar.resource.loader.class", JarResourceLoader.class.getName());
- engine.setProperty("jar.resource.loader.cache", !devMode);
- engine.setProperty("resource.manager.logwhenfound", false);
- engine.setProperty("input.encoding", "UTF-8");
- engine.setProperty("output.encoding", "UTF-8");
- engine.setProperty("directive.set.null.allowed", true);
- engine.setProperty("resource.manager.logwhenfound", false);
- engine.setProperty("velocimacro.permissions.allow.inline", true);
- engine.setProperty("velocimacro.library.autoreload", devMode);
- engine.setProperty("velocimacro.library", "/azkaban/webapp/servlet/velocity/macros.vm");
- engine.setProperty("velocimacro.permissions.allow.inline.to.replace.global", true);
- engine.setProperty("velocimacro.arguments.strict", true);
- engine.setProperty("runtime.log.invalid.references", devMode);
- engine.setProperty("runtime.log.logsystem.class", Log4JLogChute.class);
- engine.setProperty("runtime.log.logsystem.log4j.logger", Logger.getLogger("org.apache.velocity.Logger"));
- engine.setProperty("parser.pool.size", 3);
- return engine;
- }
-
-// private ClassLoader getBaseClassloader() throws MalformedURLException {
-// final ClassLoader retVal;
-//
-// String hadoopHome = System.getenv("HADOOP_HOME");
-// String hadoopConfDir = System.getenv("HADOOP_CONF_DIR");
-//
-// if (hadoopConfDir != null) {
-// logger.info("Using hadoop config found in " + hadoopConfDir);
-// retVal = new URLClassLoader(new URL[] { new File(hadoopConfDir)
-// .toURI().toURL() }, getClass().getClassLoader());
-// } else if (hadoopHome != null) {
-// logger.info("Using hadoop config found in " + hadoopHome);
-// retVal = new URLClassLoader(
-// new URL[] { new File(hadoopHome, "conf").toURI().toURL() },
-// getClass().getClassLoader());
-// } else {
-// logger.info("HADOOP_HOME not set, using default hadoop config.");
-// retVal = getClass().getClassLoader();
-// }
-//
-// return retVal;
-// }
-
- public ClassLoader getClassLoader() {
- return baseClassLoader;
- }
-
- /**
- * Returns the global azkaban properties
- *
- * @return
- */
- public Props getServerProps() {
- return props;
- }
-
- /**
- * Azkaban using Jetty
- *
- * @param args
- */
- public static void main(String[] args) throws Exception {
- logger.error("Starting Jetty Azkaban Executor...");
- Props azkabanSettings = AzkabanServer.loadProps(args);
-
- if (azkabanSettings == null) {
- logger.error("Azkaban Properties not loaded.");
- logger.error("Exiting Azkaban...");
- return;
- }
-
- int maxThreads = azkabanSettings.getInt("jetty.maxThreads", DEFAULT_THREAD_NUMBER);
- boolean isStatsOn = azkabanSettings.getBoolean("jetty.connector.stats", true);
- logger.info("Setting up connector with stats on: " + isStatsOn);
-
- boolean ssl;
- int port;
- final Server server = new Server();
- if (azkabanSettings.getBoolean("jetty.use.ssl", true)) {
- int sslPortNumber = azkabanSettings.getInt("jetty.ssl.port", DEFAULT_SSL_PORT_NUMBER);
- port = sslPortNumber;
- ssl = true;
- logger.info("Setting up Jetty Https Server with port:" + sslPortNumber + " and numThreads:" + maxThreads);
-
- SslSocketConnector secureConnector = new SslSocketConnector();
- secureConnector.setPort(sslPortNumber);
- secureConnector.setKeystore(azkabanSettings.getString("jetty.keystore"));
- secureConnector.setPassword(azkabanSettings.getString("jetty.password"));
- secureConnector.setKeyPassword(azkabanSettings.getString("jetty.keypassword"));
- secureConnector.setTruststore(azkabanSettings.getString("jetty.truststore"));
- secureConnector.setTrustPassword(azkabanSettings.getString("jetty.trustpassword"));
- secureConnector.setHeaderBufferSize(MAX_HEADER_BUFFER_SIZE);
-
- server.addConnector(secureConnector);
- }
- else {
- ssl = false;
- port = azkabanSettings.getInt("jetty.port", DEFAULT_PORT_NUMBER);
- SocketConnector connector = new SocketConnector();
- connector.setPort(port);
- connector.setHeaderBufferSize(MAX_HEADER_BUFFER_SIZE);
- server.addConnector(connector);
- }
-
- // setting stats configuration for connectors
- for (Connector connector : server.getConnectors()) {
- connector.setStatsOn(isStatsOn);
- }
-
- String hostname = azkabanSettings.getString("jetty.hostname", "localhost");
- azkabanSettings.put("server.hostname", hostname);
- azkabanSettings.put("server.port", port);
- azkabanSettings.put("server.useSSL", String.valueOf(ssl));
-
- app = new AzkabanWebServer(server, azkabanSettings);
-
- boolean checkDB = azkabanSettings.getBoolean(AzkabanDatabaseSetup.DATABASE_CHECK_VERSION, false);
- if (checkDB) {
- AzkabanDatabaseSetup setup = new AzkabanDatabaseSetup(azkabanSettings);
- setup.loadTableInfo();
- if(setup.needsUpdating()) {
- logger.error("Database is out of date.");
- setup.printUpgradePlan();
-
- logger.error("Exiting with error.");
- System.exit(-1);
- }
- }
-
- QueuedThreadPool httpThreadPool = new QueuedThreadPool(maxThreads);
- server.setThreadPool(httpThreadPool);
-
- String staticDir = azkabanSettings.getString("web.resource.dir", DEFAULT_STATIC_DIR);
- logger.info("Setting up web resource dir " + staticDir);
- Context root = new Context(server, "/", Context.SESSIONS);
- root.setMaxFormContentSize(MAX_FORM_CONTENT_SIZE);
-
- String defaultServletPath = azkabanSettings.getString("azkaban.default.servlet.path", "/index");
- root.setResourceBase(staticDir);
- ServletHolder indexRedirect = new ServletHolder(new IndexRedirectServlet(defaultServletPath));
- root.addServlet(indexRedirect, "/");
- ServletHolder index = new ServletHolder(new ProjectServlet());
- root.addServlet(index, "/index");
-
- ServletHolder staticServlet = new ServletHolder(new DefaultServlet());
- root.addServlet(staticServlet, "/css/*");
- root.addServlet(staticServlet, "/js/*");
- root.addServlet(staticServlet, "/images/*");
- root.addServlet(staticServlet, "/fonts/*");
- root.addServlet(staticServlet, "/favicon.ico");
-
- root.addServlet(new ServletHolder(new ProjectManagerServlet()),"/manager");
- root.addServlet(new ServletHolder(new ExecutorServlet()),"/executor");
- root.addServlet(new ServletHolder(new HistoryServlet()), "/history");
- root.addServlet(new ServletHolder(new ScheduleServlet()),"/schedule");
- root.addServlet(new ServletHolder(new JMXHttpServlet()),"/jmx");
- root.addServlet(new ServletHolder(new TriggerManagerServlet()),"/triggers");
-
- ServletHolder restliHolder = new ServletHolder(new RestliServlet());
- restliHolder.setInitParameter("resourcePackages", "azkaban.restli");
- root.addServlet(restliHolder, "/restli/*");
-
- String viewerPluginDir = azkabanSettings.getString("viewer.plugin.dir", "plugins/viewer");
- loadViewerPlugins(root, viewerPluginDir, app.getVelocityEngine());
-
- // triggerplugin
- String triggerPluginDir = azkabanSettings.getString("trigger.plugin.dir", "plugins/triggers");
- Map<String, TriggerPlugin> triggerPlugins = loadTriggerPlugins(root, triggerPluginDir, app);
- app.setTriggerPlugins(triggerPlugins);
- // always have basic time trigger
- //TODO: find something else to do the job
-// app.getTriggerManager().addTriggerAgent(app.getScheduleManager().getTriggerSource(), app.getScheduleManager());
- // add additional triggers
-// for(TriggerPlugin plugin : triggerPlugins.values()) {
-// TriggerAgent agent = plugin.getAgent();
-// app.getTriggerManager().addTriggerAgent(agent.getTriggerSource(), agent);
-// }
- // fire up
- app.getTriggerManager().start();
-
- root.setAttribute(AzkabanServletContextListener.AZKABAN_SERVLET_CONTEXT_KEY, app);
- try {
- server.start();
- }
- catch (Exception e) {
- logger.warn(e);
- Utils.croak(e.getMessage(), 1);
- }
-
- Runtime.getRuntime().addShutdownHook(new Thread() {
-
- public void run() {
- logger.info("Shutting down http server...");
- try {
- app.close();
- server.stop();
- server.destroy();
- }
- catch (Exception e) {
- logger.error("Error while shutting down http server.", e);
- }
- logger.info("kk thx bye.");
- }
- });
- logger.info("Server running on " + (ssl ? "ssl" : "") + " port " + port + ".");
- }
-
- private static Map<String, TriggerPlugin> loadTriggerPlugins(Context root, String pluginPath, AzkabanWebServer azkabanWebApp) {
- File triggerPluginPath = new File(pluginPath);
- if (!triggerPluginPath.exists()) {
- //return Collections.<String, TriggerPlugin>emptyMap();
- return new HashMap<String, TriggerPlugin>();
- }
-
- Map<String, TriggerPlugin> installedTriggerPlugins = new HashMap<String, TriggerPlugin>();
- ClassLoader parentLoader = AzkabanWebServer.class.getClassLoader();
- File[] pluginDirs = triggerPluginPath.listFiles();
- ArrayList<String> jarPaths = new ArrayList<String>();
- for (File pluginDir: pluginDirs) {
- if (!pluginDir.exists()) {
- logger.error("Error! Trigger plugin path " + pluginDir.getPath() + " doesn't exist.");
- continue;
- }
-
- if (!pluginDir.isDirectory()) {
- logger.error("The plugin path " + pluginDir + " is not a directory.");
- continue;
- }
-
- // Load the conf directory
- File propertiesDir = new File(pluginDir, "conf");
- Props pluginProps = null;
- if (propertiesDir.exists() && propertiesDir.isDirectory()) {
- File propertiesFile = new File(propertiesDir, "plugin.properties");
- File propertiesOverrideFile = new File(propertiesDir, "override.properties");
-
- if (propertiesFile.exists()) {
- if (propertiesOverrideFile.exists()) {
- pluginProps = PropsUtils.loadProps(null, propertiesFile, propertiesOverrideFile);
- }
- else {
- pluginProps = PropsUtils.loadProps(null, propertiesFile);
- }
- }
- else {
- logger.error("Plugin conf file " + propertiesFile + " not found.");
- continue;
- }
- }
- else {
- logger.error("Plugin conf path " + propertiesDir + " not found.");
- continue;
- }
-
- String pluginName = pluginProps.getString("trigger.name");
-// String pluginWebPath = pluginProps.getString("trigger.web.path");
-// int pluginOrder = pluginProps.getInt("trigger.order", 0);
-// boolean pluginHidden = pluginProps.getBoolean("trigger.hidden", false);
- List<String> extLibClasspath = pluginProps.getStringList("trigger.external.classpaths", (List<String>)null);
-
- String pluginClass = pluginProps.getString("trigger.class");
- if (pluginClass == null) {
- logger.error("Trigger class is not set.");
- }
- else {
- logger.error("Plugin class " + pluginClass);
- }
-
- URLClassLoader urlClassLoader = null;
- File libDir = new File(pluginDir, "lib");
- if (libDir.exists() && libDir.isDirectory()) {
- File[] files = libDir.listFiles();
-
- ArrayList<URL> urls = new ArrayList<URL>();
- for (int i=0; i < files.length; ++i) {
- try {
- URL url = files[i].toURI().toURL();
- urls.add(url);
- } catch (MalformedURLException e) {
- logger.error(e);
- }
- }
- if (extLibClasspath != null) {
- for (String extLib : extLibClasspath) {
- try {
- File file = new File(pluginDir, extLib);
- URL url = file.toURI().toURL();
- urls.add(url);
- } catch (MalformedURLException e) {
- logger.error(e);
- }
- }
- }
-
- urlClassLoader = new URLClassLoader(urls.toArray(new URL[urls.size()]), parentLoader);
- }
- else {
- logger.error("Library path " + propertiesDir + " not found.");
- continue;
- }
-
- Class<?> triggerClass = null;
- try {
- triggerClass = urlClassLoader.loadClass(pluginClass);
- }
- catch (ClassNotFoundException e) {
- logger.error("Class " + pluginClass + " not found.");
- continue;
- }
-
- String source = FileIOUtils.getSourcePathFromClass(triggerClass);
- logger.info("Source jar " + source);
- jarPaths.add("jar:file:" + source);
-
- Constructor<?> constructor = null;
- try {
- constructor = triggerClass.getConstructor(String.class, Props.class, Context.class, AzkabanWebServer.class);
- } catch (NoSuchMethodException e) {
- logger.error("Constructor not found in " + pluginClass);
- continue;
- }
-
- Object obj = null;
- try {
- obj = constructor.newInstance(pluginName, pluginProps, root, azkabanWebApp);
- } catch (Exception e) {
- logger.error(e);
- }
-
- if (!(obj instanceof TriggerPlugin)) {
- logger.error("The object is not an TriggerPlugin");
- continue;
- }
-
- TriggerPlugin plugin = (TriggerPlugin) obj;
- installedTriggerPlugins.put(pluginName, plugin);
- }
-
- // Velocity needs the jar resource paths to be set.
- String jarResourcePath = StringUtils.join(jarPaths, ", ");
- logger.info("Setting jar resource path " + jarResourcePath);
- VelocityEngine ve = azkabanWebApp.getVelocityEngine();
- ve.addProperty("jar.resource.loader.path", jarResourcePath);
-
- return installedTriggerPlugins;
- }
-
- public Map<String, TriggerPlugin> getTriggerPlugins() {
- return triggerPlugins;
- }
-
- private static void loadViewerPlugins(Context root, String pluginPath, VelocityEngine ve) {
- File viewerPluginPath = new File(pluginPath);
- if (!viewerPluginPath.exists()) {
- return;
- }
-
- ClassLoader parentLoader = AzkabanWebServer.class.getClassLoader();
- File[] pluginDirs = viewerPluginPath.listFiles();
- ArrayList<String> jarPaths = new ArrayList<String>();
- for (File pluginDir: pluginDirs) {
- if (!pluginDir.exists()) {
- logger.error("Error viewer plugin path " + pluginDir.getPath() + " doesn't exist.");
- continue;
- }
-
- if (!pluginDir.isDirectory()) {
- logger.error("The plugin path " + pluginDir + " is not a directory.");
- continue;
- }
-
- // Load the conf directory
- File propertiesDir = new File(pluginDir, "conf");
- Props pluginProps = null;
- if (propertiesDir.exists() && propertiesDir.isDirectory()) {
- File propertiesFile = new File(propertiesDir, "plugin.properties");
- File propertiesOverrideFile = new File(propertiesDir, "override.properties");
-
- if (propertiesFile.exists()) {
- if (propertiesOverrideFile.exists()) {
- pluginProps = PropsUtils.loadProps(null, propertiesFile, propertiesOverrideFile);
- }
- else {
- pluginProps = PropsUtils.loadProps(null, propertiesFile);
- }
- }
- else {
- logger.error("Plugin conf file " + propertiesFile + " not found.");
- continue;
- }
- }
- else {
- logger.error("Plugin conf path " + propertiesDir + " not found.");
- continue;
- }
-
- String pluginName = pluginProps.getString("viewer.name");
- String pluginWebPath = pluginProps.getString("viewer.path");
- String pluginJobTypes = pluginProps.getString("viewer.jobtypes", null);
- int pluginOrder = pluginProps.getInt("viewer.order", 0);
- boolean pluginHidden = pluginProps.getBoolean("viewer.hidden", false);
- List<String> extLibClasspath = pluginProps.getStringList("viewer.external.classpaths", (List<String>)null);
-
- String pluginClass = pluginProps.getString("viewer.servlet.class");
- if (pluginClass == null) {
- logger.error("Viewer class is not set.");
- }
- else {
- logger.error("Plugin class " + pluginClass);
- }
-
- URLClassLoader urlClassLoader = null;
- File libDir = new File(pluginDir, "lib");
- if (libDir.exists() && libDir.isDirectory()) {
- File[] files = libDir.listFiles();
-
- ArrayList<URL> urls = new ArrayList<URL>();
- for (int i=0; i < files.length; ++i) {
- try {
- URL url = files[i].toURI().toURL();
- urls.add(url);
- } catch (MalformedURLException e) {
- logger.error(e);
- }
- }
-
- // Load any external libraries.
- if (extLibClasspath != null) {
- for (String extLib : extLibClasspath) {
- File extLibFile = new File(pluginDir, extLib);
- if (extLibFile.exists()) {
- if (extLibFile.isDirectory()) {
- // extLibFile is a directory; load all the files in the directory.
- File[] extLibFiles = extLibFile.listFiles();
- for (int i=0; i < extLibFiles.length; ++i) {
- try {
- URL url = extLibFiles[i].toURI().toURL();
- urls.add(url);
- } catch (MalformedURLException e) {
- logger.error(e);
- }
- }
- }
- else { // extLibFile is a file
- try {
- URL url = extLibFile.toURI().toURL();
- urls.add(url);
- } catch (MalformedURLException e) {
- logger.error(e);
- }
- }
- }
- else {
- logger.error("External library path " + extLibFile.getAbsolutePath() + " not found.");
- continue;
- }
- }
- }
-
- urlClassLoader = new URLClassLoader(urls.toArray(new URL[urls.size()]), parentLoader);
- }
- else {
- logger.error("Library path " + libDir.getAbsolutePath() + " not found.");
- continue;
- }
-
- Class<?> viewerClass = null;
- try {
- viewerClass = urlClassLoader.loadClass(pluginClass);
- }
- catch (ClassNotFoundException e) {
- logger.error("Class " + pluginClass + " not found.");
- continue;
- }
-
- String source = FileIOUtils.getSourcePathFromClass(viewerClass);
- logger.info("Source jar " + source);
- jarPaths.add("jar:file:" + source);
-
- Constructor<?> constructor = null;
- try {
- constructor = viewerClass.getConstructor(Props.class);
- } catch (NoSuchMethodException e) {
- logger.error("Constructor not found in " + pluginClass);
- continue;
- }
-
- Object obj = null;
- try {
- obj = constructor.newInstance(pluginProps);
- } catch (Exception e) {
- logger.error(e);
- logger.error(e.getCause());
- }
-
- if (!(obj instanceof AbstractAzkabanServlet)) {
- logger.error("The object is not an AbstractAzkabanServlet");
- continue;
- }
-
- AbstractAzkabanServlet avServlet = (AbstractAzkabanServlet)obj;
- root.addServlet(new ServletHolder(avServlet), "/" + pluginWebPath + "/*");
- PluginRegistry.getRegistry().register(new ViewerPlugin(
- pluginName,
- pluginWebPath,
- pluginOrder,
- pluginHidden,
- pluginJobTypes));
- }
-
- // Velocity needs the jar resource paths to be set.
- String jarResourcePath = StringUtils.join(jarPaths, ", ");
- logger.info("Setting jar resource path " + jarResourcePath);
- ve.addProperty("jar.resource.loader.path", jarResourcePath);
- }
-
- /**
- * Loads the Azkaban property file from the AZKABAN_HOME conf directory
- *
- * @return
- */
- private static Props loadConfigurationFromAzkabanHome() {
- String azkabanHome = System.getenv("AZKABAN_HOME");
-
- if (azkabanHome == null) {
- logger.error("AZKABAN_HOME not set. Will try default.");
- return null;
- }
-
- if (!new File(azkabanHome).isDirectory() || !new File(azkabanHome).canRead()) {
- logger.error(azkabanHome + " is not a readable directory.");
- return null;
- }
-
- File confPath = new File(azkabanHome, DEFAULT_CONF_PATH);
- if (!confPath.exists() || !confPath.isDirectory()
- || !confPath.canRead()) {
- logger.error(azkabanHome + " does not contain a readable conf directory.");
- return null;
- }
-
- return loadAzkabanConfigurationFromDirectory(confPath);
- }
-
- /**
- * Returns the set temp dir
- *
- * @return
- */
- public File getTempDirectory() {
- return tempDir;
- }
-
- private static Props loadAzkabanConfigurationFromDirectory(File dir) {
- File azkabanPrivatePropsFile = new File(dir, AZKABAN_PRIVATE_PROPERTIES_FILE);
- File azkabanPropsFile = new File(dir, AZKABAN_PROPERTIES_FILE);
-
- Props props = null;
- try {
- // This is purely optional
- if (azkabanPrivatePropsFile.exists() && azkabanPrivatePropsFile.isFile()) {
- logger.info("Loading azkaban private properties file" );
- props = new Props(null, azkabanPrivatePropsFile);
- }
-
- if (azkabanPropsFile.exists() && azkabanPropsFile.isFile()) {
- logger.info("Loading azkaban properties file" );
- props = new Props(props, azkabanPropsFile);
- }
- } catch (FileNotFoundException e) {
- logger.error("File not found. Could not load azkaban config file", e);
- } catch (IOException e) {
- logger.error("File found, but error reading. Could not load azkaban config file", e);
- }
-
- return props;
- }
-
- private void configureMBeanServer() {
- logger.info("Registering MBeans...");
- mbeanServer = ManagementFactory.getPlatformMBeanServer();
-
- registerMbean("jetty", new JmxJettyServer(server));
- registerMbean("triggerManager", new JmxTriggerManager(triggerManager));
- if(executorManager instanceof ExecutorManager) {
- registerMbean("executorManager", new JmxExecutorManager((ExecutorManager) executorManager));
- }
-// registerMbean("executorManager", new JmxExecutorManager(executorManager));
-// registerMbean("executorManager", new JmxExecutorManager(executorManager));
- }
-
- public void close() {
- try {
- for (ObjectName name : registeredMBeans) {
- mbeanServer.unregisterMBean(name);
- logger.info("Jmx MBean " + name.getCanonicalName() + " unregistered.");
- }
- } catch (Exception e) {
- logger.error("Failed to cleanup MBeanServer", e);
- }
- scheduleManager.shutdown();
-// if(executorManager instanceof ExecutorManagerLocalAdapter) {
-// ((ExecutorManagerLocalAdapter)executorManager).getExecutorManager().shutdown();
-// }
- executorManager.shutdown();
- }
-
- private void registerMbean(String name, Object mbean) {
- Class<?> mbeanClass = mbean.getClass();
- ObjectName mbeanName;
- try {
- mbeanName = new ObjectName(mbeanClass.getName() + ":name=" + name);
- mbeanServer.registerMBean(mbean, mbeanName);
- logger.info("Bean " + mbeanClass.getCanonicalName() + " registered.");
- registeredMBeans.add(mbeanName);
- } catch (Exception e) {
- logger.error("Error registering mbean " + mbeanClass.getCanonicalName(), e);
- }
- }
-
- public List<ObjectName> getMbeanNames() {
- return registeredMBeans;
- }
-
- public MBeanInfo getMBeanInfo(ObjectName name) {
- try {
- return mbeanServer.getMBeanInfo(name);
- } catch (Exception e) {
- logger.error(e);
- return null;
- }
- }
-
- public Object getMBeanAttribute(ObjectName name, String attribute) {
- try {
- return mbeanServer.getAttribute(name, attribute);
- } catch (Exception e) {
- logger.error(e);
- return null;
- }
- }
+ public ExecutorManager getExecutorManager() {
+ return executorManager;
+ }
+
+ public ScheduleManager getScheduleManager() {
+ return scheduleManager;
+ }
+
+ public TriggerManager getTriggerManager() {
+ return triggerManager;
+ }
+
+ // public TriggerBasedScheduler getScheduler() {
+ // return scheduler;
+ // }
+ //
+ /**
+ * Creates and configures the velocity engine.
+ *
+ * @param devMode
+ * @return
+ */
+ private VelocityEngine configureVelocityEngine(final boolean devMode) {
+ VelocityEngine engine = new VelocityEngine();
+ engine.setProperty("resource.loader", "classpath, jar");
+ engine.setProperty("classpath.resource.loader.class",
+ ClasspathResourceLoader.class.getName());
+ engine.setProperty("classpath.resource.loader.cache", !devMode);
+ engine.setProperty("classpath.resource.loader.modificationCheckInterval",
+ 5L);
+ engine.setProperty("jar.resource.loader.class",
+ JarResourceLoader.class.getName());
+ engine.setProperty("jar.resource.loader.cache", !devMode);
+ engine.setProperty("resource.manager.logwhenfound", false);
+ engine.setProperty("input.encoding", "UTF-8");
+ engine.setProperty("output.encoding", "UTF-8");
+ engine.setProperty("directive.set.null.allowed", true);
+ engine.setProperty("resource.manager.logwhenfound", false);
+ engine.setProperty("velocimacro.permissions.allow.inline", true);
+ engine.setProperty("velocimacro.library.autoreload", devMode);
+ engine.setProperty("velocimacro.library",
+ "/azkaban/webapp/servlet/velocity/macros.vm");
+ engine.setProperty(
+ "velocimacro.permissions.allow.inline.to.replace.global", true);
+ engine.setProperty("velocimacro.arguments.strict", true);
+ engine.setProperty("runtime.log.invalid.references", devMode);
+ engine.setProperty("runtime.log.logsystem.class", Log4JLogChute.class);
+ engine.setProperty("runtime.log.logsystem.log4j.logger",
+ Logger.getLogger("org.apache.velocity.Logger"));
+ engine.setProperty("parser.pool.size", 3);
+ return engine;
+ }
+
+ // private ClassLoader getBaseClassloader() throws MalformedURLException {
+ // final ClassLoader retVal;
+ //
+ // String hadoopHome = System.getenv("HADOOP_HOME");
+ // String hadoopConfDir = System.getenv("HADOOP_CONF_DIR");
+ //
+ // if (hadoopConfDir != null) {
+ // logger.info("Using hadoop config found in " + hadoopConfDir);
+ // retVal = new URLClassLoader(new URL[] { new File(hadoopConfDir)
+ // .toURI().toURL() }, getClass().getClassLoader());
+ // } else if (hadoopHome != null) {
+ // logger.info("Using hadoop config found in " + hadoopHome);
+ // retVal = new URLClassLoader(
+ // new URL[] { new File(hadoopHome, "conf").toURI().toURL() },
+ // getClass().getClassLoader());
+ // } else {
+ // logger.info("HADOOP_HOME not set, using default hadoop config.");
+ // retVal = getClass().getClassLoader();
+ // }
+ //
+ // return retVal;
+ // }
+
+ public ClassLoader getClassLoader() {
+ return baseClassLoader;
+ }
+
+ /**
+ * Returns the global azkaban properties
+ *
+ * @return
+ */
+ public Props getServerProps() {
+ return props;
+ }
+
+ /**
+ * Azkaban using Jetty
+ *
+ * @param args
+ */
+ public static void main(String[] args) throws Exception {
+ logger.error("Starting Jetty Azkaban Executor...");
+ Props azkabanSettings = AzkabanServer.loadProps(args);
+
+ if (azkabanSettings == null) {
+ logger.error("Azkaban Properties not loaded.");
+ logger.error("Exiting Azkaban...");
+ return;
+ }
+
+ int maxThreads =
+ azkabanSettings.getInt("jetty.maxThreads", DEFAULT_THREAD_NUMBER);
+ boolean isStatsOn =
+ azkabanSettings.getBoolean("jetty.connector.stats", true);
+ logger.info("Setting up connector with stats on: " + isStatsOn);
+
+ boolean ssl;
+ int port;
+ final Server server = new Server();
+ if (azkabanSettings.getBoolean("jetty.use.ssl", true)) {
+ int sslPortNumber =
+ azkabanSettings.getInt("jetty.ssl.port", DEFAULT_SSL_PORT_NUMBER);
+ port = sslPortNumber;
+ ssl = true;
+ logger.info("Setting up Jetty Https Server with port:" + sslPortNumber
+ + " and numThreads:" + maxThreads);
+
+ SslSocketConnector secureConnector = new SslSocketConnector();
+ secureConnector.setPort(sslPortNumber);
+ secureConnector.setKeystore(azkabanSettings.getString("jetty.keystore"));
+ secureConnector.setPassword(azkabanSettings.getString("jetty.password"));
+ secureConnector.setKeyPassword(azkabanSettings
+ .getString("jetty.keypassword"));
+ secureConnector.setTruststore(azkabanSettings
+ .getString("jetty.truststore"));
+ secureConnector.setTrustPassword(azkabanSettings
+ .getString("jetty.trustpassword"));
+ secureConnector.setHeaderBufferSize(MAX_HEADER_BUFFER_SIZE);
+
+ server.addConnector(secureConnector);
+ } else {
+ ssl = false;
+ port = azkabanSettings.getInt("jetty.port", DEFAULT_PORT_NUMBER);
+ SocketConnector connector = new SocketConnector();
+ connector.setPort(port);
+ connector.setHeaderBufferSize(MAX_HEADER_BUFFER_SIZE);
+ server.addConnector(connector);
+ }
+
+ // setting stats configuration for connectors
+ for (Connector connector : server.getConnectors()) {
+ connector.setStatsOn(isStatsOn);
+ }
+
+ String hostname = azkabanSettings.getString("jetty.hostname", "localhost");
+ azkabanSettings.put("server.hostname", hostname);
+ azkabanSettings.put("server.port", port);
+ azkabanSettings.put("server.useSSL", String.valueOf(ssl));
+
+ app = new AzkabanWebServer(server, azkabanSettings);
+
+ boolean checkDB =
+ azkabanSettings.getBoolean(AzkabanDatabaseSetup.DATABASE_CHECK_VERSION,
+ false);
+ if (checkDB) {
+ AzkabanDatabaseSetup setup = new AzkabanDatabaseSetup(azkabanSettings);
+ setup.loadTableInfo();
+ if (setup.needsUpdating()) {
+ logger.error("Database is out of date.");
+ setup.printUpgradePlan();
+
+ logger.error("Exiting with error.");
+ System.exit(-1);
+ }
+ }
+
+ QueuedThreadPool httpThreadPool = new QueuedThreadPool(maxThreads);
+ server.setThreadPool(httpThreadPool);
+
+ String staticDir =
+ azkabanSettings.getString("web.resource.dir", DEFAULT_STATIC_DIR);
+ logger.info("Setting up web resource dir " + staticDir);
+ Context root = new Context(server, "/", Context.SESSIONS);
+ root.setMaxFormContentSize(MAX_FORM_CONTENT_SIZE);
+
+ String defaultServletPath =
+ azkabanSettings.getString("azkaban.default.servlet.path", "/index");
+ root.setResourceBase(staticDir);
+ ServletHolder indexRedirect =
+ new ServletHolder(new IndexRedirectServlet(defaultServletPath));
+ root.addServlet(indexRedirect, "/");
+ ServletHolder index = new ServletHolder(new ProjectServlet());
+ root.addServlet(index, "/index");
+
+ ServletHolder staticServlet = new ServletHolder(new DefaultServlet());
+ root.addServlet(staticServlet, "/css/*");
+ root.addServlet(staticServlet, "/js/*");
+ root.addServlet(staticServlet, "/images/*");
+ root.addServlet(staticServlet, "/fonts/*");
+ root.addServlet(staticServlet, "/favicon.ico");
+
+ root.addServlet(new ServletHolder(new ProjectManagerServlet()), "/manager");
+ root.addServlet(new ServletHolder(new ExecutorServlet()), "/executor");
+ root.addServlet(new ServletHolder(new HistoryServlet()), "/history");
+ root.addServlet(new ServletHolder(new ScheduleServlet()), "/schedule");
+ root.addServlet(new ServletHolder(new JMXHttpServlet()), "/jmx");
+ root.addServlet(new ServletHolder(new TriggerManagerServlet()), "/triggers");
+
+ ServletHolder restliHolder = new ServletHolder(new RestliServlet());
+ restliHolder.setInitParameter("resourcePackages", "azkaban.restli");
+ root.addServlet(restliHolder, "/restli/*");
+
+ String viewerPluginDir =
+ azkabanSettings.getString("viewer.plugin.dir", "plugins/viewer");
+ loadViewerPlugins(root, viewerPluginDir, app.getVelocityEngine());
+
+ // triggerplugin
+ String triggerPluginDir =
+ azkabanSettings.getString("trigger.plugin.dir", "plugins/triggers");
+ Map<String, TriggerPlugin> triggerPlugins =
+ loadTriggerPlugins(root, triggerPluginDir, app);
+ app.setTriggerPlugins(triggerPlugins);
+ // always have basic time trigger
+ // TODO: find something else to do the job
+ // app.getTriggerManager().addTriggerAgent(app.getScheduleManager().getTriggerSource(),
+ // app.getScheduleManager());
+ // add additional triggers
+ // for(TriggerPlugin plugin : triggerPlugins.values()) {
+ // TriggerAgent agent = plugin.getAgent();
+ // app.getTriggerManager().addTriggerAgent(agent.getTriggerSource(), agent);
+ // }
+ // fire up
+ app.getTriggerManager().start();
+
+ root.setAttribute(
+ AzkabanServletContextListener.AZKABAN_SERVLET_CONTEXT_KEY, app);
+ try {
+ server.start();
+ } catch (Exception e) {
+ logger.warn(e);
+ Utils.croak(e.getMessage(), 1);
+ }
+
+ Runtime.getRuntime().addShutdownHook(new Thread() {
+
+ public void run() {
+ logger.info("Shutting down http server...");
+ try {
+ app.close();
+ server.stop();
+ server.destroy();
+ } catch (Exception e) {
+ logger.error("Error while shutting down http server.", e);
+ }
+ logger.info("kk thx bye.");
+ }
+ });
+ logger.info("Server running on " + (ssl ? "ssl" : "") + " port " + port
+ + ".");
+ }
+
+ private static Map<String, TriggerPlugin> loadTriggerPlugins(Context root,
+ String pluginPath, AzkabanWebServer azkabanWebApp) {
+ File triggerPluginPath = new File(pluginPath);
+ if (!triggerPluginPath.exists()) {
+ // return Collections.<String, TriggerPlugin>emptyMap();
+ return new HashMap<String, TriggerPlugin>();
+ }
+
+ Map<String, TriggerPlugin> installedTriggerPlugins =
+ new HashMap<String, TriggerPlugin>();
+ ClassLoader parentLoader = AzkabanWebServer.class.getClassLoader();
+ File[] pluginDirs = triggerPluginPath.listFiles();
+ ArrayList<String> jarPaths = new ArrayList<String>();
+ for (File pluginDir : pluginDirs) {
+ if (!pluginDir.exists()) {
+ logger.error("Error! Trigger plugin path " + pluginDir.getPath()
+ + " doesn't exist.");
+ continue;
+ }
+
+ if (!pluginDir.isDirectory()) {
+ logger.error("The plugin path " + pluginDir + " is not a directory.");
+ continue;
+ }
+
+ // Load the conf directory
+ File propertiesDir = new File(pluginDir, "conf");
+ Props pluginProps = null;
+ if (propertiesDir.exists() && propertiesDir.isDirectory()) {
+ File propertiesFile = new File(propertiesDir, "plugin.properties");
+ File propertiesOverrideFile =
+ new File(propertiesDir, "override.properties");
+
+ if (propertiesFile.exists()) {
+ if (propertiesOverrideFile.exists()) {
+ pluginProps =
+ PropsUtils.loadProps(null, propertiesFile,
+ propertiesOverrideFile);
+ } else {
+ pluginProps = PropsUtils.loadProps(null, propertiesFile);
+ }
+ } else {
+ logger.error("Plugin conf file " + propertiesFile + " not found.");
+ continue;
+ }
+ } else {
+ logger.error("Plugin conf path " + propertiesDir + " not found.");
+ continue;
+ }
+
+ String pluginName = pluginProps.getString("trigger.name");
+ // String pluginWebPath = pluginProps.getString("trigger.web.path");
+ // int pluginOrder = pluginProps.getInt("trigger.order", 0);
+ // boolean pluginHidden = pluginProps.getBoolean("trigger.hidden", false);
+ List<String> extLibClasspath =
+ pluginProps.getStringList("trigger.external.classpaths",
+ (List<String>) null);
+
+ String pluginClass = pluginProps.getString("trigger.class");
+ if (pluginClass == null) {
+ logger.error("Trigger class is not set.");
+ } else {
+ logger.error("Plugin class " + pluginClass);
+ }
+
+ URLClassLoader urlClassLoader = null;
+ File libDir = new File(pluginDir, "lib");
+ if (libDir.exists() && libDir.isDirectory()) {
+ File[] files = libDir.listFiles();
+
+ ArrayList<URL> urls = new ArrayList<URL>();
+ for (int i = 0; i < files.length; ++i) {
+ try {
+ URL url = files[i].toURI().toURL();
+ urls.add(url);
+ } catch (MalformedURLException e) {
+ logger.error(e);
+ }
+ }
+ if (extLibClasspath != null) {
+ for (String extLib : extLibClasspath) {
+ try {
+ File file = new File(pluginDir, extLib);
+ URL url = file.toURI().toURL();
+ urls.add(url);
+ } catch (MalformedURLException e) {
+ logger.error(e);
+ }
+ }
+ }
+
+ urlClassLoader =
+ new URLClassLoader(urls.toArray(new URL[urls.size()]), parentLoader);
+ } else {
+ logger.error("Library path " + propertiesDir + " not found.");
+ continue;
+ }
+
+ Class<?> triggerClass = null;
+ try {
+ triggerClass = urlClassLoader.loadClass(pluginClass);
+ } catch (ClassNotFoundException e) {
+ logger.error("Class " + pluginClass + " not found.");
+ continue;
+ }
+
+ String source = FileIOUtils.getSourcePathFromClass(triggerClass);
+ logger.info("Source jar " + source);
+ jarPaths.add("jar:file:" + source);
+
+ Constructor<?> constructor = null;
+ try {
+ constructor =
+ triggerClass.getConstructor(String.class, Props.class,
+ Context.class, AzkabanWebServer.class);
+ } catch (NoSuchMethodException e) {
+ logger.error("Constructor not found in " + pluginClass);
+ continue;
+ }
+
+ Object obj = null;
+ try {
+ obj =
+ constructor.newInstance(pluginName, pluginProps, root,
+ azkabanWebApp);
+ } catch (Exception e) {
+ logger.error(e);
+ }
+
+ if (!(obj instanceof TriggerPlugin)) {
+ logger.error("The object is not an TriggerPlugin");
+ continue;
+ }
+
+ TriggerPlugin plugin = (TriggerPlugin) obj;
+ installedTriggerPlugins.put(pluginName, plugin);
+ }
+
+ // Velocity needs the jar resource paths to be set.
+ String jarResourcePath = StringUtils.join(jarPaths, ", ");
+ logger.info("Setting jar resource path " + jarResourcePath);
+ VelocityEngine ve = azkabanWebApp.getVelocityEngine();
+ ve.addProperty("jar.resource.loader.path", jarResourcePath);
+
+ return installedTriggerPlugins;
+ }
+
+ public Map<String, TriggerPlugin> getTriggerPlugins() {
+ return triggerPlugins;
+ }
+
+ private static void loadViewerPlugins(Context root, String pluginPath,
+ VelocityEngine ve) {
+ File viewerPluginPath = new File(pluginPath);
+ if (!viewerPluginPath.exists()) {
+ return;
+ }
+
+ ClassLoader parentLoader = AzkabanWebServer.class.getClassLoader();
+ File[] pluginDirs = viewerPluginPath.listFiles();
+ ArrayList<String> jarPaths = new ArrayList<String>();
+ for (File pluginDir : pluginDirs) {
+ if (!pluginDir.exists()) {
+ logger.error("Error viewer plugin path " + pluginDir.getPath()
+ + " doesn't exist.");
+ continue;
+ }
+
+ if (!pluginDir.isDirectory()) {
+ logger.error("The plugin path " + pluginDir + " is not a directory.");
+ continue;
+ }
+
+ // Load the conf directory
+ File propertiesDir = new File(pluginDir, "conf");
+ Props pluginProps = null;
+ if (propertiesDir.exists() && propertiesDir.isDirectory()) {
+ File propertiesFile = new File(propertiesDir, "plugin.properties");
+ File propertiesOverrideFile =
+ new File(propertiesDir, "override.properties");
+
+ if (propertiesFile.exists()) {
+ if (propertiesOverrideFile.exists()) {
+ pluginProps =
+ PropsUtils.loadProps(null, propertiesFile,
+ propertiesOverrideFile);
+ } else {
+ pluginProps = PropsUtils.loadProps(null, propertiesFile);
+ }
+ } else {
+ logger.error("Plugin conf file " + propertiesFile + " not found.");
+ continue;
+ }
+ } else {
+ logger.error("Plugin conf path " + propertiesDir + " not found.");
+ continue;
+ }
+
+ String pluginName = pluginProps.getString("viewer.name");
+ String pluginWebPath = pluginProps.getString("viewer.path");
+ String pluginJobTypes = pluginProps.getString("viewer.jobtypes", null);
+ int pluginOrder = pluginProps.getInt("viewer.order", 0);
+ boolean pluginHidden = pluginProps.getBoolean("viewer.hidden", false);
+ List<String> extLibClasspath =
+ pluginProps.getStringList("viewer.external.classpaths",
+ (List<String>) null);
+
+ String pluginClass = pluginProps.getString("viewer.servlet.class");
+ if (pluginClass == null) {
+ logger.error("Viewer class is not set.");
+ } else {
+ logger.error("Plugin class " + pluginClass);
+ }
+
+ URLClassLoader urlClassLoader = null;
+ File libDir = new File(pluginDir, "lib");
+ if (libDir.exists() && libDir.isDirectory()) {
+ File[] files = libDir.listFiles();
+
+ ArrayList<URL> urls = new ArrayList<URL>();
+ for (int i = 0; i < files.length; ++i) {
+ try {
+ URL url = files[i].toURI().toURL();
+ urls.add(url);
+ } catch (MalformedURLException e) {
+ logger.error(e);
+ }
+ }
+
+ // Load any external libraries.
+ if (extLibClasspath != null) {
+ for (String extLib : extLibClasspath) {
+ File extLibFile = new File(pluginDir, extLib);
+ if (extLibFile.exists()) {
+ if (extLibFile.isDirectory()) {
+ // extLibFile is a directory; load all the files in the
+ // directory.
+ File[] extLibFiles = extLibFile.listFiles();
+ for (int i = 0; i < extLibFiles.length; ++i) {
+ try {
+ URL url = extLibFiles[i].toURI().toURL();
+ urls.add(url);
+ } catch (MalformedURLException e) {
+ logger.error(e);
+ }
+ }
+ } else { // extLibFile is a file
+ try {
+ URL url = extLibFile.toURI().toURL();
+ urls.add(url);
+ } catch (MalformedURLException e) {
+ logger.error(e);
+ }
+ }
+ } else {
+ logger.error("External library path "
+ + extLibFile.getAbsolutePath() + " not found.");
+ continue;
+ }
+ }
+ }
+
+ urlClassLoader =
+ new URLClassLoader(urls.toArray(new URL[urls.size()]), parentLoader);
+ } else {
+ logger
+ .error("Library path " + libDir.getAbsolutePath() + " not found.");
+ continue;
+ }
+
+ Class<?> viewerClass = null;
+ try {
+ viewerClass = urlClassLoader.loadClass(pluginClass);
+ } catch (ClassNotFoundException e) {
+ logger.error("Class " + pluginClass + " not found.");
+ continue;
+ }
+
+ String source = FileIOUtils.getSourcePathFromClass(viewerClass);
+ logger.info("Source jar " + source);
+ jarPaths.add("jar:file:" + source);
+
+ Constructor<?> constructor = null;
+ try {
+ constructor = viewerClass.getConstructor(Props.class);
+ } catch (NoSuchMethodException e) {
+ logger.error("Constructor not found in " + pluginClass);
+ continue;
+ }
+
+ Object obj = null;
+ try {
+ obj = constructor.newInstance(pluginProps);
+ } catch (Exception e) {
+ logger.error(e);
+ logger.error(e.getCause());
+ }
+
+ if (!(obj instanceof AbstractAzkabanServlet)) {
+ logger.error("The object is not an AbstractAzkabanServlet");
+ continue;
+ }
+
+ AbstractAzkabanServlet avServlet = (AbstractAzkabanServlet) obj;
+ root.addServlet(new ServletHolder(avServlet), "/" + pluginWebPath + "/*");
+ PluginRegistry.getRegistry().register(
+ new ViewerPlugin(pluginName, pluginWebPath, pluginOrder,
+ pluginHidden, pluginJobTypes));
+ }
+
+ // Velocity needs the jar resource paths to be set.
+ String jarResourcePath = StringUtils.join(jarPaths, ", ");
+ logger.info("Setting jar resource path " + jarResourcePath);
+ ve.addProperty("jar.resource.loader.path", jarResourcePath);
+ }
+
+ /**
+ * Loads the Azkaban property file from the AZKABAN_HOME conf directory
+ *
+ * @return
+ */
+ private static Props loadConfigurationFromAzkabanHome() {
+ String azkabanHome = System.getenv("AZKABAN_HOME");
+
+ if (azkabanHome == null) {
+ logger.error("AZKABAN_HOME not set. Will try default.");
+ return null;
+ }
+
+ if (!new File(azkabanHome).isDirectory()
+ || !new File(azkabanHome).canRead()) {
+ logger.error(azkabanHome + " is not a readable directory.");
+ return null;
+ }
+
+ File confPath = new File(azkabanHome, DEFAULT_CONF_PATH);
+ if (!confPath.exists() || !confPath.isDirectory() || !confPath.canRead()) {
+ logger
+ .error(azkabanHome + " does not contain a readable conf directory.");
+ return null;
+ }
+
+ return loadAzkabanConfigurationFromDirectory(confPath);
+ }
+
+ /**
+ * Returns the set temp dir
+ *
+ * @return
+ */
+ public File getTempDirectory() {
+ return tempDir;
+ }
+
+ private static Props loadAzkabanConfigurationFromDirectory(File dir) {
+ File azkabanPrivatePropsFile =
+ new File(dir, AZKABAN_PRIVATE_PROPERTIES_FILE);
+ File azkabanPropsFile = new File(dir, AZKABAN_PROPERTIES_FILE);
+
+ Props props = null;
+ try {
+ // This is purely optional
+ if (azkabanPrivatePropsFile.exists() && azkabanPrivatePropsFile.isFile()) {
+ logger.info("Loading azkaban private properties file");
+ props = new Props(null, azkabanPrivatePropsFile);
+ }
+
+ if (azkabanPropsFile.exists() && azkabanPropsFile.isFile()) {
+ logger.info("Loading azkaban properties file");
+ props = new Props(props, azkabanPropsFile);
+ }
+ } catch (FileNotFoundException e) {
+ logger.error("File not found. Could not load azkaban config file", e);
+ } catch (IOException e) {
+ logger.error(
+ "File found, but error reading. Could not load azkaban config file",
+ e);
+ }
+
+ return props;
+ }
+
+ private void configureMBeanServer() {
+ logger.info("Registering MBeans...");
+ mbeanServer = ManagementFactory.getPlatformMBeanServer();
+
+ registerMbean("jetty", new JmxJettyServer(server));
+ registerMbean("triggerManager", new JmxTriggerManager(triggerManager));
+ if (executorManager instanceof ExecutorManager) {
+ registerMbean("executorManager", new JmxExecutorManager(
+ (ExecutorManager) executorManager));
+ }
+ // registerMbean("executorManager", new
+ // JmxExecutorManager(executorManager));
+ // registerMbean("executorManager", new
+ // JmxExecutorManager(executorManager));
+ }
+
+ public void close() {
+ try {
+ for (ObjectName name : registeredMBeans) {
+ mbeanServer.unregisterMBean(name);
+ logger.info("Jmx MBean " + name.getCanonicalName() + " unregistered.");
+ }
+ } catch (Exception e) {
+ logger.error("Failed to cleanup MBeanServer", e);
+ }
+ scheduleManager.shutdown();
+ // if(executorManager instanceof ExecutorManagerLocalAdapter) {
+ // ((ExecutorManagerLocalAdapter)executorManager).getExecutorManager().shutdown();
+ // }
+ executorManager.shutdown();
+ }
+
+ private void registerMbean(String name, Object mbean) {
+ Class<?> mbeanClass = mbean.getClass();
+ ObjectName mbeanName;
+ try {
+ mbeanName = new ObjectName(mbeanClass.getName() + ":name=" + name);
+ mbeanServer.registerMBean(mbean, mbeanName);
+ logger.info("Bean " + mbeanClass.getCanonicalName() + " registered.");
+ registeredMBeans.add(mbeanName);
+ } catch (Exception e) {
+ logger.error("Error registering mbean " + mbeanClass.getCanonicalName(),
+ e);
+ }
+ }
+
+ public List<ObjectName> getMbeanNames() {
+ return registeredMBeans;
+ }
+
+ public MBeanInfo getMBeanInfo(ObjectName name) {
+ try {
+ return mbeanServer.getMBeanInfo(name);
+ } catch (Exception e) {
+ logger.error(e);
+ return null;
+ }
+ }
+
+ public Object getMBeanAttribute(ObjectName name, String attribute) {
+ try {
+ return mbeanServer.getAttribute(name, attribute);
+ } catch (Exception e) {
+ logger.error(e);
+ return null;
+ }
+ }
}
diff --git a/src/main/java/azkaban/webapp/plugin/PluginRegistry.java b/src/main/java/azkaban/webapp/plugin/PluginRegistry.java
index a6f74d2..6d19ceb 100644
--- a/src/main/java/azkaban/webapp/plugin/PluginRegistry.java
+++ b/src/main/java/azkaban/webapp/plugin/PluginRegistry.java
@@ -24,54 +24,53 @@ import java.util.TreeSet;
public class PluginRegistry {
- private static PluginRegistry registry;
+ private static PluginRegistry registry;
- public TreeSet<ViewerPlugin> viewerPlugins;
+ public TreeSet<ViewerPlugin> viewerPlugins;
- public Map<String, TreeSet<ViewerPlugin>> jobTypeViewerPlugins;
+ public Map<String, TreeSet<ViewerPlugin>> jobTypeViewerPlugins;
- private PluginRegistry() {
- viewerPlugins = new TreeSet<ViewerPlugin>(ViewerPlugin.COMPARATOR);
- jobTypeViewerPlugins = new HashMap<String, TreeSet<ViewerPlugin>>();
- }
+ private PluginRegistry() {
+ viewerPlugins = new TreeSet<ViewerPlugin>(ViewerPlugin.COMPARATOR);
+ jobTypeViewerPlugins = new HashMap<String, TreeSet<ViewerPlugin>>();
+ }
- public void register(ViewerPlugin plugin) {
- viewerPlugins.add(plugin);
- List<String> jobTypes = plugin.getJobTypes();
- if (jobTypes == null) {
- return;
- }
+ public void register(ViewerPlugin plugin) {
+ viewerPlugins.add(plugin);
+ List<String> jobTypes = plugin.getJobTypes();
+ if (jobTypes == null) {
+ return;
+ }
- for (String jobType : jobTypes) {
- TreeSet<ViewerPlugin> plugins = null;
- if (!jobTypeViewerPlugins.containsKey(jobType)) {
- plugins = new TreeSet<ViewerPlugin>(ViewerPlugin.COMPARATOR);
- plugins.add(plugin);
- jobTypeViewerPlugins.put(jobType, plugins);
- }
- else {
- plugins = jobTypeViewerPlugins.get(jobType);
- plugins.add(plugin);
- }
- }
- }
+ for (String jobType : jobTypes) {
+ TreeSet<ViewerPlugin> plugins = null;
+ if (!jobTypeViewerPlugins.containsKey(jobType)) {
+ plugins = new TreeSet<ViewerPlugin>(ViewerPlugin.COMPARATOR);
+ plugins.add(plugin);
+ jobTypeViewerPlugins.put(jobType, plugins);
+ } else {
+ plugins = jobTypeViewerPlugins.get(jobType);
+ plugins.add(plugin);
+ }
+ }
+ }
- public List<ViewerPlugin> getViewerPlugins() {
- return new ArrayList<ViewerPlugin>(viewerPlugins);
- }
+ public List<ViewerPlugin> getViewerPlugins() {
+ return new ArrayList<ViewerPlugin>(viewerPlugins);
+ }
- public List<ViewerPlugin> getViewerPluginsForJobType(String jobType) {
- TreeSet<ViewerPlugin> plugins = jobTypeViewerPlugins.get(jobType);
- if (plugins == null) {
- return null;
- }
- return new ArrayList<ViewerPlugin>(plugins);
- }
+ public List<ViewerPlugin> getViewerPluginsForJobType(String jobType) {
+ TreeSet<ViewerPlugin> plugins = jobTypeViewerPlugins.get(jobType);
+ if (plugins == null) {
+ return null;
+ }
+ return new ArrayList<ViewerPlugin>(plugins);
+ }
- public static PluginRegistry getRegistry() {
- if (registry == null) {
- registry = new PluginRegistry();
- }
- return registry;
- }
+ public static PluginRegistry getRegistry() {
+ if (registry == null) {
+ registry = new PluginRegistry();
+ }
+ return registry;
+ }
}
diff --git a/src/main/java/azkaban/webapp/plugin/TriggerPlugin.java b/src/main/java/azkaban/webapp/plugin/TriggerPlugin.java
index 6d51ab4..d98c4d2 100644
--- a/src/main/java/azkaban/webapp/plugin/TriggerPlugin.java
+++ b/src/main/java/azkaban/webapp/plugin/TriggerPlugin.java
@@ -21,28 +21,31 @@ import azkaban.trigger.TriggerAgent;
import azkaban.webapp.servlet.AbstractAzkabanServlet;
public interface TriggerPlugin {
-
-// public TriggerPlugin(String pluginName, Props props, AzkabanWebServer azkabanWebApp) {
-// this.pluginName = pluginName;
-// this.pluginPath = props.getString("trigger.path");
-// this.order = props.getInt("trigger.order", 0);
-// this.hidden = props.getBoolean("trigger.hidden", false);
-//
-// }
-
- public AbstractAzkabanServlet getServlet();
- public TriggerAgent getAgent();
- public void load();
-
- public String getPluginName();
-
- public String getPluginPath();
-
- public int getOrder();
-
- public boolean isHidden();
-
- public void setHidden(boolean hidden);
-
- public String getInputPanelVM();
+
+ // public TriggerPlugin(String pluginName, Props props, AzkabanWebServer
+ // azkabanWebApp) {
+ // this.pluginName = pluginName;
+ // this.pluginPath = props.getString("trigger.path");
+ // this.order = props.getInt("trigger.order", 0);
+ // this.hidden = props.getBoolean("trigger.hidden", false);
+ //
+ // }
+
+ public AbstractAzkabanServlet getServlet();
+
+ public TriggerAgent getAgent();
+
+ public void load();
+
+ public String getPluginName();
+
+ public String getPluginPath();
+
+ public int getOrder();
+
+ public boolean isHidden();
+
+ public void setHidden(boolean hidden);
+
+ public String getInputPanelVM();
}
diff --git a/src/main/java/azkaban/webapp/plugin/ViewerPlugin.java b/src/main/java/azkaban/webapp/plugin/ViewerPlugin.java
index b2bb851..101bc02 100644
--- a/src/main/java/azkaban/webapp/plugin/ViewerPlugin.java
+++ b/src/main/java/azkaban/webapp/plugin/ViewerPlugin.java
@@ -21,69 +21,65 @@ import java.util.List;
import java.util.Comparator;
public class ViewerPlugin {
- private final String pluginName;
- private final String pluginPath;
- private final int order;
- private boolean hidden;
- private final List<String> jobTypes;
+ private final String pluginName;
+ private final String pluginPath;
+ private final int order;
+ private boolean hidden;
+ private final List<String> jobTypes;
- public static final Comparator<ViewerPlugin> COMPARATOR =
- new Comparator<ViewerPlugin>() {
- @Override
- public int compare(ViewerPlugin o1, ViewerPlugin o2) {
- if (o1.getOrder() != o2.getOrder()) {
- return o1.getOrder() - o2.getOrder();
- }
- return o1.getPluginName().compareTo(o2.getPluginName());
- }
- };
+ public static final Comparator<ViewerPlugin> COMPARATOR =
+ new Comparator<ViewerPlugin>() {
+ @Override
+ public int compare(ViewerPlugin o1, ViewerPlugin o2) {
+ if (o1.getOrder() != o2.getOrder()) {
+ return o1.getOrder() - o2.getOrder();
+ }
+ return o1.getPluginName().compareTo(o2.getPluginName());
+ }
+ };
- public ViewerPlugin(
- String pluginName,
- String pluginPath,
- int order,
- boolean hidden,
- String jobTypes) {
- this.pluginName = pluginName;
- this.pluginPath = pluginPath;
- this.order = order;
- this.setHidden(hidden);
- this.jobTypes = parseJobTypes(jobTypes);
- }
+ public ViewerPlugin(String pluginName, String pluginPath, int order,
+ boolean hidden, String jobTypes) {
+ this.pluginName = pluginName;
+ this.pluginPath = pluginPath;
+ this.order = order;
+ this.setHidden(hidden);
+ this.jobTypes = parseJobTypes(jobTypes);
+ }
- public String getPluginName() {
- return pluginName;
- }
+ public String getPluginName() {
+ return pluginName;
+ }
- public String getPluginPath() {
- return pluginPath;
- }
+ public String getPluginPath() {
+ return pluginPath;
+ }
- public int getOrder() {
- return order;
- }
+ public int getOrder() {
+ return order;
+ }
- public boolean isHidden() {
- return hidden;
- }
+ public boolean isHidden() {
+ return hidden;
+ }
- public void setHidden(boolean hidden) {
- this.hidden = hidden;
- }
+ public void setHidden(boolean hidden) {
+ this.hidden = hidden;
+ }
- protected List<String> parseJobTypes(String jobTypesStr) {
- if (jobTypesStr == null) {
- return null;
- }
- String[] parts = jobTypesStr.split(",");
- List<String> jobTypes = new ArrayList<String>();
- for (int i = 0; i < parts.length; ++i) {
- jobTypes.add(parts[i].trim());
- }
- return jobTypes;
- }
+ protected List<String> parseJobTypes(String jobTypesStr) {
+ if (jobTypesStr == null) {
+ return null;
+ }
+ String[] parts = jobTypesStr.split(",");
+ List<String> jobTypes = new ArrayList<String>();
+ for (int i = 0; i < parts.length; ++i) {
+ jobTypes.add(parts[i].trim());
+ }
+ return jobTypes;
+ }
- public List<String> getJobTypes() {
- return jobTypes;
- }
+ public List<String> getJobTypes() {
+ return jobTypes;
+ }
}
diff --git a/src/main/java/azkaban/webapp/servlet/AbstractAzkabanServlet.java b/src/main/java/azkaban/webapp/servlet/AbstractAzkabanServlet.java
index 2fc6ecc..d0a80f2 100644
--- a/src/main/java/azkaban/webapp/servlet/AbstractAzkabanServlet.java
+++ b/src/main/java/azkaban/webapp/servlet/AbstractAzkabanServlet.java
@@ -47,352 +47,378 @@ import azkaban.webapp.plugin.PluginRegistry;
* Base Servlet for pages
*/
public abstract class AbstractAzkabanServlet extends HttpServlet {
- private static final DateTimeFormatter ZONE_FORMATTER = DateTimeFormat.forPattern("z");
- private static final String AZKABAN_SUCCESS_MESSAGE = "azkaban.success.message";
- private static final String AZKABAN_FAILURE_MESSAGE = "azkaban.failure.message";
-
- private static final long serialVersionUID = -1;
- public static final String DEFAULT_LOG_URL_PREFIX = "predefined_log_url_prefix";
- public static final String LOG_URL_PREFIX = "log_url_prefix";
- public static final String HTML_TYPE = "text/html";
- public static final String XML_MIME_TYPE = "application/xhtml+xml";
- public static final String JSON_MIME_TYPE = "application/json";
-
- protected static final WebUtils utils = new WebUtils();
-
- private AzkabanServer application;
- private String name;
- private String label;
- private String color;
-
- private List<ViewerPlugin> viewerPlugins;
- private List<TriggerPlugin> triggerPlugins;
-
-
- /**
- * To retrieve the application for the servlet
- *
- * @return
- */
- public AzkabanServer getApplication() {
- return application;
- }
-
- @Override
- public void init(ServletConfig config) throws ServletException {
- application = (AzkabanServer) config.getServletContext().getAttribute(AzkabanServletContextListener.AZKABAN_SERVLET_CONTEXT_KEY);
-
- if (application == null) {
- throw new IllegalStateException(
- "No batch application is defined in the servlet context!");
- }
-
- Props props = application.getServerProps();
- name = props.getString("azkaban.name", "");
- label = props.getString("azkaban.label", "");
- color = props.getString("azkaban.color", "#FF0000");
-
- if (application instanceof AzkabanWebServer) {
- AzkabanWebServer server = (AzkabanWebServer)application;
- viewerPlugins = PluginRegistry.getRegistry().getViewerPlugins();
- triggerPlugins = new ArrayList<TriggerPlugin>(server.getTriggerPlugins().values());
- }
- }
-
- /**
- * Checks for the existance of the parameter in the request
- *
- * @param request
- * @param param
- * @return
- */
- public boolean hasParam(HttpServletRequest request, String param) {
- return HttpRequestUtils.hasParam(request, param);
- }
-
- /**
- * Retrieves the param from the http servlet request. Will throw an
- * exception if not found
- *
- * @param request
- * @param name
- * @return
- * @throws ServletException
- */
- public String getParam(HttpServletRequest request, String name) throws ServletException {
- return HttpRequestUtils.getParam(request, name);
- }
-
- /**
- * Retrieves the param from the http servlet request.
- *
- * @param request
- * @param name
- * @param default
- *
- * @return
- */
- public String getParam(HttpServletRequest request, String name, String defaultVal){
- return HttpRequestUtils.getParam(request, name, defaultVal);
- }
-
-
- /**
- * Returns the param and parses it into an int. Will throw an exception if
- * not found, or a parse error if the type is incorrect.
- *
- * @param request
- * @param name
- * @return
- * @throws ServletException
- */
- public int getIntParam(HttpServletRequest request, String name) throws ServletException {
- return HttpRequestUtils.getIntParam(request, name);
- }
-
- public int getIntParam(HttpServletRequest request, String name, int defaultVal) {
- return HttpRequestUtils.getIntParam(request, name, defaultVal);
- }
-
- public long getLongParam(HttpServletRequest request, String name) throws ServletException {
- return HttpRequestUtils.getLongParam(request, name);
- }
-
- public long getLongParam(HttpServletRequest request, String name, long defaultVal) {
- return HttpRequestUtils.getLongParam(request, name, defaultVal);
- }
-
-
- public Map<String, String> getParamGroup(HttpServletRequest request, String groupName) throws ServletException {
- return HttpRequestUtils.getParamGroup(request, groupName);
- }
-
- /**
- * Returns the session value of the request.
- *
- * @param request
- * @param key
- * @param value
- */
- protected void setSessionValue(HttpServletRequest request, String key, Object value) {
- request.getSession(true).setAttribute(key, value);
- }
-
- /**
- * Adds a session value to the request
- *
- * @param request
- * @param key
- * @param value
- */
- @SuppressWarnings({ "unchecked", "rawtypes" })
- protected void addSessionValue(HttpServletRequest request, String key, Object value) {
- List l = (List) request.getSession(true).getAttribute(key);
- if (l == null)
- l = new ArrayList();
- l.add(value);
- request.getSession(true).setAttribute(key, l);
- }
-
- /**
- * Sets an error message in azkaban.failure.message in the cookie. This will
- * be used by the web client javascript to somehow display the message
- *
- * @param response
- * @param errorMsg
- */
- protected void setErrorMessageInCookie(HttpServletResponse response, String errorMsg) {
- Cookie cookie = new Cookie(AZKABAN_FAILURE_MESSAGE, errorMsg);
- cookie.setPath("/");
- response.addCookie(cookie);
- }
-
- /**
- * Sets a message in azkaban.success.message in the cookie. This will be
- * used by the web client javascript to somehow display the message
- *
- * @param response
- * @param errorMsg
- */
- protected void setSuccessMessageInCookie(HttpServletResponse response, String message) {
- Cookie cookie = new Cookie(AZKABAN_SUCCESS_MESSAGE, message);
- cookie.setPath("/");
- response.addCookie(cookie);
- }
-
- /**
- * Retrieves a success message from a cookie. azkaban.success.message
- *
- * @param request
- * @return
- */
- protected String getSuccessMessageFromCookie(HttpServletRequest request) {
- Cookie cookie = getCookieByName(request, AZKABAN_SUCCESS_MESSAGE);
-
- if (cookie == null) {
- return null;
- }
- return cookie.getValue();
- }
-
- /**
- * Retrieves a success message from a cookie. azkaban.failure.message
- *
- * @param request
- * @return
- */
- protected String getErrorMessageFromCookie(HttpServletRequest request) {
- Cookie cookie = getCookieByName(request, AZKABAN_FAILURE_MESSAGE);
- if (cookie == null) {
- return null;
- }
-
- return cookie.getValue();
- }
-
- /**
- * Retrieves a cookie by name. Potential issue in performance if a lot of
- * cookie variables are used.
- *
- * @param request
- * @return
- */
- protected Cookie getCookieByName(HttpServletRequest request, String name) {
- Cookie[] cookies = request.getCookies();
- if (cookies != null) {
- for (Cookie cookie : cookies) {
- //if (name.equals(cookie.getName()) && cookie.getPath()!=null && cookie.getPath().equals("/")) {
- if (name.equals(cookie.getName())) {
- return cookie;
- }
- }
- }
-
- return null;
- }
-
- /**
- * Creates a new velocity page to use. With session.
- *
- * @param req
- * @param resp
- * @param template
- * @return
- */
- protected Page newPage(HttpServletRequest req, HttpServletResponse resp, Session session, String template) {
- Page page = new Page(req, resp, application.getVelocityEngine(), template);
- page.add("azkaban_name", name);
- page.add("azkaban_label", label);
- page.add("azkaban_color", color);
- page.add("utils", utils);
- page.add("timezone", ZONE_FORMATTER.print(System.currentTimeMillis()));
- page.add("currentTime", (new DateTime()).getMillis());
- if (session != null && session.getUser() != null) {
- page.add("user_id", session.getUser().getUserId());
- }
- page.add("context", req.getContextPath());
-
- String errorMsg = getErrorMessageFromCookie(req);
- page.add("error_message", errorMsg == null || errorMsg.isEmpty() ? "null" : errorMsg);
- setErrorMessageInCookie(resp, null);
-
- String successMsg = getSuccessMessageFromCookie(req);
- page.add("success_message", successMsg == null || successMsg.isEmpty() ? "null" : successMsg);
- setSuccessMessageInCookie(resp, null);
-
- //@TODO, allow more than one type of viewer. For time sake, I only install the first one
- if (viewerPlugins != null && !viewerPlugins.isEmpty()) {
- page.add("viewers", viewerPlugins);
-// ViewerPlugin plugin = viewerPlugins.get(0);
-// page.add("viewerName", plugin.getPluginName());
-// page.add("viewerPath", plugin.getPluginPath());
- }
-
- if(triggerPlugins != null && !triggerPlugins.isEmpty()) {
- page.add("triggerPlugins", triggerPlugins);
- }
-
- return page;
- }
-
- /**
- * Creates a new velocity page to use.
- *
- * @param req
- * @param resp
- * @param template
- * @return
- */
- protected Page newPage(HttpServletRequest req, HttpServletResponse resp, String template) {
- Page page = new Page(req, resp, application.getVelocityEngine(), template);
- page.add("azkaban_name", name);
- page.add("azkaban_label", label);
- page.add("azkaban_color", color);
- page.add("timezone", ZONE_FORMATTER.print(System.currentTimeMillis()));
- page.add("currentTime", (new DateTime()).getMillis());
- page.add("context", req.getContextPath());
-
- //@TODO, allow more than one type of viewer. For time sake, I only install the first one
- if (viewerPlugins != null && !viewerPlugins.isEmpty()) {
- page.add("viewers", viewerPlugins);
- ViewerPlugin plugin = viewerPlugins.get(0);
- page.add("viewerName", plugin.getPluginName());
- page.add("viewerPath", plugin.getPluginPath());
- }
-
- if(triggerPlugins != null && !triggerPlugins.isEmpty()) {
- page.add("triggers", triggerPlugins);
- }
-
- return page;
- }
-
- /**
- * Writes json out to the stream.
- *
- * @param resp
- * @param obj
- * @throws IOException
- */
- protected void writeJSON(HttpServletResponse resp, Object obj) throws IOException {
- writeJSON(resp, obj, false);
- }
-
- protected void writeJSON(HttpServletResponse resp, Object obj, boolean pretty) throws IOException {
- resp.setContentType(JSON_MIME_TYPE);
- JSONUtils.toJSON(obj, resp.getOutputStream(), true);
- }
-
- /**
- * Retrieve the Azkaban application
- *
- * @param config
- * @return
- */
- public static AzkabanWebServer getApp(ServletConfig config) {
- AzkabanWebServer app = (AzkabanWebServer) config.getServletContext().getAttribute(AzkabanServletContextListener.AZKABAN_SERVLET_CONTEXT_KEY);
-
- if (app == null) {
- throw new IllegalStateException("No batch application is defined in the servlet context!");
- }
- else {
- return app;
- }
- }
-
- public static String createJsonResponse(String status, String message, String action, Map<String, Object> params) {
- HashMap<String, Object> response = new HashMap<String, Object>();
- response.put("status", status);
- if (message != null) {
- response.put("message", message);
- }
- if (action != null) {
- response.put("action", action);
- }
- if (params != null) {
- response.putAll(params);
- }
-
- return JSONUtils.toJSON(response);
- }
+ private static final DateTimeFormatter ZONE_FORMATTER = DateTimeFormat
+ .forPattern("z");
+ private static final String AZKABAN_SUCCESS_MESSAGE =
+ "azkaban.success.message";
+ private static final String AZKABAN_FAILURE_MESSAGE =
+ "azkaban.failure.message";
+
+ private static final long serialVersionUID = -1;
+ public static final String DEFAULT_LOG_URL_PREFIX =
+ "predefined_log_url_prefix";
+ public static final String LOG_URL_PREFIX = "log_url_prefix";
+ public static final String HTML_TYPE = "text/html";
+ public static final String XML_MIME_TYPE = "application/xhtml+xml";
+ public static final String JSON_MIME_TYPE = "application/json";
+
+ protected static final WebUtils utils = new WebUtils();
+
+ private AzkabanServer application;
+ private String name;
+ private String label;
+ private String color;
+
+ private List<ViewerPlugin> viewerPlugins;
+ private List<TriggerPlugin> triggerPlugins;
+
+ /**
+ * To retrieve the application for the servlet
+ *
+ * @return
+ */
+ public AzkabanServer getApplication() {
+ return application;
+ }
+
+ @Override
+ public void init(ServletConfig config) throws ServletException {
+ application =
+ (AzkabanServer) config.getServletContext().getAttribute(
+ AzkabanServletContextListener.AZKABAN_SERVLET_CONTEXT_KEY);
+
+ if (application == null) {
+ throw new IllegalStateException(
+ "No batch application is defined in the servlet context!");
+ }
+
+ Props props = application.getServerProps();
+ name = props.getString("azkaban.name", "");
+ label = props.getString("azkaban.label", "");
+ color = props.getString("azkaban.color", "#FF0000");
+
+ if (application instanceof AzkabanWebServer) {
+ AzkabanWebServer server = (AzkabanWebServer) application;
+ viewerPlugins = PluginRegistry.getRegistry().getViewerPlugins();
+ triggerPlugins =
+ new ArrayList<TriggerPlugin>(server.getTriggerPlugins().values());
+ }
+ }
+
+ /**
+ * Checks for the existance of the parameter in the request
+ *
+ * @param request
+ * @param param
+ * @return
+ */
+ public boolean hasParam(HttpServletRequest request, String param) {
+ return HttpRequestUtils.hasParam(request, param);
+ }
+
+ /**
+ * Retrieves the param from the http servlet request. Will throw an exception
+ * if not found
+ *
+ * @param request
+ * @param name
+ * @return
+ * @throws ServletException
+ */
+ public String getParam(HttpServletRequest request, String name)
+ throws ServletException {
+ return HttpRequestUtils.getParam(request, name);
+ }
+
+ /**
+ * Retrieves the param from the http servlet request.
+ *
+ * @param request
+ * @param name
+ * @param default
+ *
+ * @return
+ */
+ public String getParam(HttpServletRequest request, String name,
+ String defaultVal) {
+ return HttpRequestUtils.getParam(request, name, defaultVal);
+ }
+
+ /**
+ * Returns the param and parses it into an int. Will throw an exception if not
+ * found, or a parse error if the type is incorrect.
+ *
+ * @param request
+ * @param name
+ * @return
+ * @throws ServletException
+ */
+ public int getIntParam(HttpServletRequest request, String name)
+ throws ServletException {
+ return HttpRequestUtils.getIntParam(request, name);
+ }
+
+ public int getIntParam(HttpServletRequest request, String name, int defaultVal) {
+ return HttpRequestUtils.getIntParam(request, name, defaultVal);
+ }
+
+ public long getLongParam(HttpServletRequest request, String name)
+ throws ServletException {
+ return HttpRequestUtils.getLongParam(request, name);
+ }
+
+ public long getLongParam(HttpServletRequest request, String name,
+ long defaultVal) {
+ return HttpRequestUtils.getLongParam(request, name, defaultVal);
+ }
+
+ public Map<String, String> getParamGroup(HttpServletRequest request,
+ String groupName) throws ServletException {
+ return HttpRequestUtils.getParamGroup(request, groupName);
+ }
+
+ /**
+ * Returns the session value of the request.
+ *
+ * @param request
+ * @param key
+ * @param value
+ */
+ protected void setSessionValue(HttpServletRequest request, String key,
+ Object value) {
+ request.getSession(true).setAttribute(key, value);
+ }
+
+ /**
+ * Adds a session value to the request
+ *
+ * @param request
+ * @param key
+ * @param value
+ */
+ @SuppressWarnings({ "unchecked", "rawtypes" })
+ protected void addSessionValue(HttpServletRequest request, String key,
+ Object value) {
+ List l = (List) request.getSession(true).getAttribute(key);
+ if (l == null)
+ l = new ArrayList();
+ l.add(value);
+ request.getSession(true).setAttribute(key, l);
+ }
+
+ /**
+ * Sets an error message in azkaban.failure.message in the cookie. This will
+ * be used by the web client javascript to somehow display the message
+ *
+ * @param response
+ * @param errorMsg
+ */
+ protected void setErrorMessageInCookie(HttpServletResponse response,
+ String errorMsg) {
+ Cookie cookie = new Cookie(AZKABAN_FAILURE_MESSAGE, errorMsg);
+ cookie.setPath("/");
+ response.addCookie(cookie);
+ }
+
+ /**
+ * Sets a message in azkaban.success.message in the cookie. This will be used
+ * by the web client javascript to somehow display the message
+ *
+ * @param response
+ * @param errorMsg
+ */
+ protected void setSuccessMessageInCookie(HttpServletResponse response,
+ String message) {
+ Cookie cookie = new Cookie(AZKABAN_SUCCESS_MESSAGE, message);
+ cookie.setPath("/");
+ response.addCookie(cookie);
+ }
+
+ /**
+ * Retrieves a success message from a cookie. azkaban.success.message
+ *
+ * @param request
+ * @return
+ */
+ protected String getSuccessMessageFromCookie(HttpServletRequest request) {
+ Cookie cookie = getCookieByName(request, AZKABAN_SUCCESS_MESSAGE);
+
+ if (cookie == null) {
+ return null;
+ }
+ return cookie.getValue();
+ }
+
+ /**
+ * Retrieves a success message from a cookie. azkaban.failure.message
+ *
+ * @param request
+ * @return
+ */
+ protected String getErrorMessageFromCookie(HttpServletRequest request) {
+ Cookie cookie = getCookieByName(request, AZKABAN_FAILURE_MESSAGE);
+ if (cookie == null) {
+ return null;
+ }
+
+ return cookie.getValue();
+ }
+
+ /**
+ * Retrieves a cookie by name. Potential issue in performance if a lot of
+ * cookie variables are used.
+ *
+ * @param request
+ * @return
+ */
+ protected Cookie getCookieByName(HttpServletRequest request, String name) {
+ Cookie[] cookies = request.getCookies();
+ if (cookies != null) {
+ for (Cookie cookie : cookies) {
+ // if (name.equals(cookie.getName()) && cookie.getPath()!=null &&
+ // cookie.getPath().equals("/")) {
+ if (name.equals(cookie.getName())) {
+ return cookie;
+ }
+ }
+ }
+
+ return null;
+ }
+
+ /**
+ * Creates a new velocity page to use. With session.
+ *
+ * @param req
+ * @param resp
+ * @param template
+ * @return
+ */
+ protected Page newPage(HttpServletRequest req, HttpServletResponse resp,
+ Session session, String template) {
+ Page page = new Page(req, resp, application.getVelocityEngine(), template);
+ page.add("azkaban_name", name);
+ page.add("azkaban_label", label);
+ page.add("azkaban_color", color);
+ page.add("utils", utils);
+ page.add("timezone", ZONE_FORMATTER.print(System.currentTimeMillis()));
+ page.add("currentTime", (new DateTime()).getMillis());
+ if (session != null && session.getUser() != null) {
+ page.add("user_id", session.getUser().getUserId());
+ }
+ page.add("context", req.getContextPath());
+
+ String errorMsg = getErrorMessageFromCookie(req);
+ page.add("error_message", errorMsg == null || errorMsg.isEmpty() ? "null"
+ : errorMsg);
+ setErrorMessageInCookie(resp, null);
+
+ String successMsg = getSuccessMessageFromCookie(req);
+ page.add("success_message",
+ successMsg == null || successMsg.isEmpty() ? "null" : successMsg);
+ setSuccessMessageInCookie(resp, null);
+
+ // @TODO, allow more than one type of viewer. For time sake, I only install
+ // the first one
+ if (viewerPlugins != null && !viewerPlugins.isEmpty()) {
+ page.add("viewers", viewerPlugins);
+ // ViewerPlugin plugin = viewerPlugins.get(0);
+ // page.add("viewerName", plugin.getPluginName());
+ // page.add("viewerPath", plugin.getPluginPath());
+ }
+
+ if (triggerPlugins != null && !triggerPlugins.isEmpty()) {
+ page.add("triggerPlugins", triggerPlugins);
+ }
+
+ return page;
+ }
+
+ /**
+ * Creates a new velocity page to use.
+ *
+ * @param req
+ * @param resp
+ * @param template
+ * @return
+ */
+ protected Page newPage(HttpServletRequest req, HttpServletResponse resp,
+ String template) {
+ Page page = new Page(req, resp, application.getVelocityEngine(), template);
+ page.add("azkaban_name", name);
+ page.add("azkaban_label", label);
+ page.add("azkaban_color", color);
+ page.add("timezone", ZONE_FORMATTER.print(System.currentTimeMillis()));
+ page.add("currentTime", (new DateTime()).getMillis());
+ page.add("context", req.getContextPath());
+
+ // @TODO, allow more than one type of viewer. For time sake, I only install
+ // the first one
+ if (viewerPlugins != null && !viewerPlugins.isEmpty()) {
+ page.add("viewers", viewerPlugins);
+ ViewerPlugin plugin = viewerPlugins.get(0);
+ page.add("viewerName", plugin.getPluginName());
+ page.add("viewerPath", plugin.getPluginPath());
+ }
+
+ if (triggerPlugins != null && !triggerPlugins.isEmpty()) {
+ page.add("triggers", triggerPlugins);
+ }
+
+ return page;
+ }
+
+ /**
+ * Writes json out to the stream.
+ *
+ * @param resp
+ * @param obj
+ * @throws IOException
+ */
+ protected void writeJSON(HttpServletResponse resp, Object obj)
+ throws IOException {
+ writeJSON(resp, obj, false);
+ }
+
+ protected void writeJSON(HttpServletResponse resp, Object obj, boolean pretty)
+ throws IOException {
+ resp.setContentType(JSON_MIME_TYPE);
+ JSONUtils.toJSON(obj, resp.getOutputStream(), true);
+ }
+
+ /**
+ * Retrieve the Azkaban application
+ *
+ * @param config
+ * @return
+ */
+ public static AzkabanWebServer getApp(ServletConfig config) {
+ AzkabanWebServer app =
+ (AzkabanWebServer) config.getServletContext().getAttribute(
+ AzkabanServletContextListener.AZKABAN_SERVLET_CONTEXT_KEY);
+
+ if (app == null) {
+ throw new IllegalStateException(
+ "No batch application is defined in the servlet context!");
+ } else {
+ return app;
+ }
+ }
+
+ public static String createJsonResponse(String status, String message,
+ String action, Map<String, Object> params) {
+ HashMap<String, Object> response = new HashMap<String, Object>();
+ response.put("status", status);
+ if (message != null) {
+ response.put("message", message);
+ }
+ if (action != null) {
+ response.put("action", action);
+ }
+ if (params != null) {
+ response.putAll(params);
+ }
+
+ return JSONUtils.toJSON(response);
+ }
}
diff --git a/src/main/java/azkaban/webapp/servlet/AbstractServiceServlet.java b/src/main/java/azkaban/webapp/servlet/AbstractServiceServlet.java
index eb1013e..b29553e 100644
--- a/src/main/java/azkaban/webapp/servlet/AbstractServiceServlet.java
+++ b/src/main/java/azkaban/webapp/servlet/AbstractServiceServlet.java
@@ -10,82 +10,89 @@ import javax.servlet.http.HttpServletResponse;
import org.codehaus.jackson.map.ObjectMapper;
import azkaban.webapp.AzkabanServer;
-public class AbstractServiceServlet extends HttpServlet{
-
- private static final long serialVersionUID = 1L;
- public static final String JSON_MIME_TYPE = "application/json";
-
- private AzkabanServer application;
-
- @Override
- public void init(ServletConfig config) throws ServletException {
- application = (AzkabanServer) config.getServletContext().getAttribute(AzkabanServletContextListener.AZKABAN_SERVLET_CONTEXT_KEY);
-
- if (application == null) {
- throw new IllegalStateException(
- "No batch application is defined in the servlet context!");
- }
- }
-
- protected void writeJSON(HttpServletResponse resp, Object obj) throws IOException {
- resp.setContentType(JSON_MIME_TYPE);
- ObjectMapper mapper = new ObjectMapper();
- OutputStream stream = resp.getOutputStream();
- mapper.writeValue(stream, obj);
- }
-
- public boolean hasParam(HttpServletRequest request, String param) {
- return request.getParameter(param) != null;
- }
-
- public String getParam(HttpServletRequest request, String name)
- throws ServletException {
- String p = request.getParameter(name);
- if (p == null)
- throw new ServletException("Missing required parameter '" + name + "'.");
- else
- return p;
- }
-
- public String getParam(HttpServletRequest request, String name, String defaultVal ) {
- String p = request.getParameter(name);
- if (p == null) {
- return defaultVal;
- }
-
- return p;
- }
-
- public int getIntParam(HttpServletRequest request, String name) throws ServletException {
- String p = getParam(request, name);
- return Integer.parseInt(p);
- }
-
- public int getIntParam(HttpServletRequest request, String name, int defaultVal) {
- if (hasParam(request, name)) {
- try {
- return getIntParam(request, name);
- } catch (Exception e) {
- return defaultVal;
- }
- }
- return defaultVal;
- }
-
- public long getLongParam(HttpServletRequest request, String name) throws ServletException {
- String p = getParam(request, name);
- return Long.parseLong(p);
- }
-
- public long getLongParam(HttpServletRequest request, String name, long defaultVal) {
- if (hasParam(request, name)) {
- try {
- return getLongParam(request, name);
- } catch (Exception e) {
- return defaultVal;
- }
- }
- return defaultVal;
- }
-
+public class AbstractServiceServlet extends HttpServlet {
+
+ private static final long serialVersionUID = 1L;
+ public static final String JSON_MIME_TYPE = "application/json";
+
+ private AzkabanServer application;
+
+ @Override
+ public void init(ServletConfig config) throws ServletException {
+ application =
+ (AzkabanServer) config.getServletContext().getAttribute(
+ AzkabanServletContextListener.AZKABAN_SERVLET_CONTEXT_KEY);
+
+ if (application == null) {
+ throw new IllegalStateException(
+ "No batch application is defined in the servlet context!");
+ }
+ }
+
+ protected void writeJSON(HttpServletResponse resp, Object obj)
+ throws IOException {
+ resp.setContentType(JSON_MIME_TYPE);
+ ObjectMapper mapper = new ObjectMapper();
+ OutputStream stream = resp.getOutputStream();
+ mapper.writeValue(stream, obj);
+ }
+
+ public boolean hasParam(HttpServletRequest request, String param) {
+ return request.getParameter(param) != null;
+ }
+
+ public String getParam(HttpServletRequest request, String name)
+ throws ServletException {
+ String p = request.getParameter(name);
+ if (p == null)
+ throw new ServletException("Missing required parameter '" + name + "'.");
+ else
+ return p;
+ }
+
+ public String getParam(HttpServletRequest request, String name,
+ String defaultVal) {
+ String p = request.getParameter(name);
+ if (p == null) {
+ return defaultVal;
+ }
+
+ return p;
+ }
+
+ public int getIntParam(HttpServletRequest request, String name)
+ throws ServletException {
+ String p = getParam(request, name);
+ return Integer.parseInt(p);
+ }
+
+ public int getIntParam(HttpServletRequest request, String name, int defaultVal) {
+ if (hasParam(request, name)) {
+ try {
+ return getIntParam(request, name);
+ } catch (Exception e) {
+ return defaultVal;
+ }
+ }
+ return defaultVal;
+ }
+
+ public long getLongParam(HttpServletRequest request, String name)
+ throws ServletException {
+ String p = getParam(request, name);
+ return Long.parseLong(p);
+ }
+
+ public long getLongParam(HttpServletRequest request, String name,
+ long defaultVal) {
+ if (hasParam(request, name)) {
+ try {
+ return getLongParam(request, name);
+ } catch (Exception e) {
+ return defaultVal;
+ }
+ }
+ return defaultVal;
+ }
+
}
diff --git a/src/main/java/azkaban/webapp/servlet/AzkabanServletContextListener.java b/src/main/java/azkaban/webapp/servlet/AzkabanServletContextListener.java
index db4437d..309629a 100644
--- a/src/main/java/azkaban/webapp/servlet/AzkabanServletContextListener.java
+++ b/src/main/java/azkaban/webapp/servlet/AzkabanServletContextListener.java
@@ -25,28 +25,29 @@ import azkaban.webapp.AzkabanWebServer;
* A ServletContextListener that loads the batch application
*/
public class AzkabanServletContextListener implements ServletContextListener {
- public static final String AZKABAN_SERVLET_CONTEXT_KEY = "azkaban_app";
-
- private AzkabanWebServer app;
-
- /**
- * Delete the app
- */
- public void contextDestroyed(ServletContextEvent event) {
- this.app = null;
- }
-
- /**
- * Load the app for use in non jetty containers.
- */
- public void contextInitialized(ServletContextEvent event) {
- try {
- this.app = new AzkabanWebServer();
- } catch (Exception e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
-
- event.getServletContext().setAttribute(AZKABAN_SERVLET_CONTEXT_KEY, this.app);
- }
+ public static final String AZKABAN_SERVLET_CONTEXT_KEY = "azkaban_app";
+
+ private AzkabanWebServer app;
+
+ /**
+ * Delete the app
+ */
+ public void contextDestroyed(ServletContextEvent event) {
+ this.app = null;
+ }
+
+ /**
+ * Load the app for use in non jetty containers.
+ */
+ public void contextInitialized(ServletContextEvent event) {
+ try {
+ this.app = new AzkabanWebServer();
+ } catch (Exception e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+
+ event.getServletContext().setAttribute(AZKABAN_SERVLET_CONTEXT_KEY,
+ this.app);
+ }
}
src/main/java/azkaban/webapp/servlet/ExecutorServlet.java 1609(+833 -776)
diff --git a/src/main/java/azkaban/webapp/servlet/ExecutorServlet.java b/src/main/java/azkaban/webapp/servlet/ExecutorServlet.java
index 6f78ac8..e008cad 100644
--- a/src/main/java/azkaban/webapp/servlet/ExecutorServlet.java
+++ b/src/main/java/azkaban/webapp/servlet/ExecutorServlet.java
@@ -51,780 +51,837 @@ import azkaban.webapp.plugin.PluginRegistry;
import azkaban.webapp.plugin.ViewerPlugin;
public class ExecutorServlet extends LoginAbstractAzkabanServlet {
- private static final long serialVersionUID = 1L;
- private ProjectManager projectManager;
- private ExecutorManagerAdapter executorManager;
- private ScheduleManager scheduleManager;
- private ExecutorVelocityHelper velocityHelper;
-
- @Override
- public void init(ServletConfig config) throws ServletException {
- super.init(config);
- AzkabanWebServer server = (AzkabanWebServer)getApplication();
- projectManager = server.getProjectManager();
- executorManager = server.getExecutorManager();
- scheduleManager = server.getScheduleManager();
- velocityHelper = new ExecutorVelocityHelper();
- }
-
- @Override
- protected void handleGet(HttpServletRequest req, HttpServletResponse resp,
- Session session) throws ServletException, IOException {
- if (hasParam(req, "ajax")) {
- handleAJAXAction(req, resp, session);
- }
- else if (hasParam(req, "execid")) {
- if (hasParam(req, "job")) {
- handleExecutionJobDetailsPage(req, resp, session);
- }
- else {
- handleExecutionFlowPage(req, resp, session);
- }
- }
- else {
- handleExecutionsPage(req, resp, session);
- }
- }
-
- private void handleAJAXAction(HttpServletRequest req,
- HttpServletResponse resp, Session session)
- throws ServletException, IOException {
- HashMap<String, Object> ret = new HashMap<String, Object>();
- String ajaxName = getParam(req, "ajax");
-
- if (hasParam(req, "execid")) {
- int execid = getIntParam(req, "execid");
- ExecutableFlow exFlow = null;
-
- try {
- exFlow = executorManager.getExecutableFlow(execid);
- } catch (ExecutorManagerException e) {
- ret.put("error", "Error fetching execution '" + execid + "': " + e.getMessage());
- }
-
- if (exFlow == null) {
- ret.put("error", "Cannot find execution '" + execid + "'");
- }
- else {
- if (ajaxName.equals("fetchexecflow")) {
- ajaxFetchExecutableFlow(req, resp, ret, session.getUser(), exFlow);
- }
- else if (ajaxName.equals("fetchexecflowupdate")) {
- ajaxFetchExecutableFlowUpdate(req, resp, ret, session.getUser(), exFlow);
- }
- else if (ajaxName.equals("cancelFlow")) {
- ajaxCancelFlow(req, resp, ret, session.getUser(), exFlow);
- }
- else if (ajaxName.equals("pauseFlow")) {
- ajaxPauseFlow(req, resp, ret, session.getUser(), exFlow);
- }
- else if (ajaxName.equals("resumeFlow")) {
- ajaxResumeFlow(req, resp, ret, session.getUser(), exFlow);
- }
- else if (ajaxName.equals("fetchExecFlowLogs")) {
- ajaxFetchExecFlowLogs(req, resp, ret, session.getUser(), exFlow);
- }
- else if (ajaxName.equals("fetchExecJobLogs")) {
- ajaxFetchJobLogs(req, resp, ret, session.getUser(), exFlow);
- }
- else if (ajaxName.equals("fetchExecJobStats")) {
- ajaxFetchJobStats(req, resp, ret, session.getUser(), exFlow);
- }
- else if (ajaxName.equals("retryFailedJobs")) {
- ajaxRestartFailed(req, resp, ret, session.getUser(), exFlow);
- }
-// else if (ajaxName.equals("fetchLatestJobStatus")) {
-// ajaxFetchLatestJobStatus(req, resp, ret, session.getUser(), exFlow);
-// }
- else if (ajaxName.equals("flowInfo")) {
- //String projectName = getParam(req, "project");
- //Project project = projectManager.getProject(projectName);
- //String flowName = getParam(req, "flow");
- ajaxFetchExecutableFlowInfo(req, resp, ret, session.getUser(), exFlow);
- }
- }
- }
- else if (ajaxName.equals("getRunning")) {
- String projectName = getParam(req, "project");
- String flowName = getParam(req, "flow");
- ajaxGetFlowRunning(req, resp, ret, session.getUser(), projectName, flowName);
- }
- else if (ajaxName.equals("flowInfo")) {
- String projectName = getParam(req, "project");
- String flowName = getParam(req, "flow");
- ajaxFetchFlowInfo(req, resp, ret, session.getUser(), projectName, flowName);
- }
- else {
- String projectName = getParam(req, "project");
-
- ret.put("project", projectName);
- if (ajaxName.equals("executeFlow")) {
- ajaxAttemptExecuteFlow(req, resp, ret, session.getUser());
- }
- }
- if (ret != null) {
- this.writeJSON(resp, ret);
- }
- }
-
- @Override
- protected void handlePost(HttpServletRequest req, HttpServletResponse resp,
- Session session) throws ServletException, IOException {
- if (hasParam(req, "ajax")) {
- handleAJAXAction(req, resp, session);
- }
- }
-
- private void handleExecutionJobDetailsPage(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException, IOException {
- Page page = newPage(req, resp, session, "azkaban/webapp/servlet/velocity/jobdetailspage.vm");
- User user = session.getUser();
- int execId = getIntParam(req, "execid");
- String jobId = getParam(req, "job");
- int attempt = getIntParam(req, "attempt", 0);
- page.add("execid", execId);
- page.add("jobid", jobId);
- page.add("attempt", attempt);
-
- ExecutableFlow flow = null;
- ExecutableNode node = null;
- try {
- flow = executorManager.getExecutableFlow(execId);
- if (flow == null) {
- page.add("errorMsg", "Error loading executing flow " + execId + ": not found.");
- page.render();
- return;
- }
-
- node = flow.getExecutableNodePath(jobId);
- if (node == null) {
- page.add("errorMsg", "Job " + jobId + " doesn't exist in " + flow.getExecutionId());
- return;
- }
-
- List<ViewerPlugin> jobViewerPlugins = PluginRegistry.getRegistry()
- .getViewerPluginsForJobType(node.getType());
- page.add("jobViewerPlugins", jobViewerPlugins);
- }
- catch (ExecutorManagerException e) {
- page.add("errorMsg", "Error loading executing flow: " + e.getMessage());
- page.render();
- return;
- }
-
- int projectId = flow.getProjectId();
- Project project = getProjectPageByPermission(page, projectId, user, Type.READ);
- if (project == null) {
- page.render();
- return;
- }
-
- page.add("projectName", project.getName());
- page.add("flowid", flow.getId());
- page.add("parentflowid", node.getParentFlow().getFlowId());
- page.add("jobname", node.getId());
-
- page.render();
- }
-
- private void handleExecutionsPage(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException, IOException {
- Page page = newPage(req, resp, session, "azkaban/webapp/servlet/velocity/executionspage.vm");
-
- List<ExecutableFlow> runningFlows = executorManager.getRunningFlows();
- page.add("runningFlows", runningFlows.isEmpty() ? null : runningFlows);
-
- List<ExecutableFlow> finishedFlows = executorManager.getRecentlyFinishedFlows();
- page.add("recentlyFinished", finishedFlows.isEmpty() ? null : finishedFlows);
- page.add("vmutils", velocityHelper);
- page.render();
- }
-
- private void handleExecutionFlowPage(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException, IOException {
- Page page = newPage(req, resp, session, "azkaban/webapp/servlet/velocity/executingflowpage.vm");
- User user = session.getUser();
- int execId = getIntParam(req, "execid");
- page.add("execid", execId);
-
- ExecutableFlow flow = null;
- try {
- flow = executorManager.getExecutableFlow(execId);
- if (flow == null) {
- page.add("errorMsg", "Error loading executing flow " + execId + " not found.");
- page.render();
- return;
- }
- } catch (ExecutorManagerException e) {
- page.add("errorMsg", "Error loading executing flow: " + e.getMessage());
- page.render();
- return;
- }
-
- int projectId = flow.getProjectId();
- Project project = getProjectPageByPermission(page, projectId, user, Type.READ);
- if(project == null) {
- page.render();
- return;
- }
-
- page.add("projectId", project.getId());
- page.add("projectName", project.getName());
- page.add("flowid", flow.getFlowId());
-
- page.render();
- }
-
- protected Project getProjectPageByPermission(Page page, int projectId, User user, Permission.Type type) {
- Project project = projectManager.getProject(projectId);
-
- if (project == null) {
- page.add("errorMsg", "Project " + project + " not found.");
- }
- else if (!hasPermission(project, user, type)) {
- page.add("errorMsg", "User " + user.getUserId() + " doesn't have " + type.name() + " permissions on " + project.getName());
- }
- else {
- return project;
- }
-
- return null;
- }
-
- protected Project getProjectAjaxByPermission(Map<String, Object> ret, String projectName, User user, Permission.Type type) {
- Project project = projectManager.getProject(projectName);
-
- if (project == null) {
- ret.put("error", "Project '" + project + "' not found.");
- }
- else if (!hasPermission(project, user, type)) {
- ret.put("error", "User '" + user.getUserId() + "' doesn't have " + type.name() + " permissions on " + project.getName());
- }
- else {
- return project;
- }
-
- return null;
- }
-
- protected Project getProjectAjaxByPermission(Map<String, Object> ret, int projectId, User user, Permission.Type type) {
- Project project = projectManager.getProject(projectId);
-
- if (project == null) {
- ret.put("error", "Project '" + project + "' not found.");
- }
- else if (!hasPermission(project, user, type)) {
- ret.put("error", "User '" + user.getUserId() + "' doesn't have " + type.name() + " permissions on " + project.getName());
- }
- else {
- return project;
- }
-
- return null;
- }
-
-// private void ajaxFetchLatestJobStatus(HttpServletRequest req,HttpServletResponse resp, HashMap<String, Object> ret, User user, ExecutableFlow exFlow) {
-// Project project = getProjectAjaxByPermission(ret, exFlow.getProjectId(), user, Type.READ);
-// if (project == null) {
-// ret.put("error", "Project doesn't exist or incorrect access permission.");
-// return;
-// }
-//
-// String projectName;
-// String flowName;
-// String jobName;
-// try {
-// projectName = getParam(req, "projectName");
-// flowName = getParam(req, "flowName");
-// jobName = getParam(req, "jobName");
-// } catch (Exception e) {
-// ret.put("error", e.getMessage());
-// return;
-// }
-//
-// try {
-// ExecutableNode node = exFlow.getExecutableNode(jobId);
-// if (node == null) {
-// ret.put("error", "Job " + jobId + " doesn't exist in " + exFlow.getExecutionId());
-// return;
-// }
-//
-// int attempt = this.getIntParam(req, "attempt", node.getAttempt());
-// LogData data = executorManager.getExecutionJobLog(exFlow, jobId, offset, length, attempt);
-// if (data == null) {
-// ret.put("length", 0);
-// ret.put("offset", offset);
-// ret.put("data", "");
-// }
-// else {
-// ret.put("length", data.getLength());
-// ret.put("offset", data.getOffset());
-// ret.put("data", data.getData());
-// }
-// } catch (ExecutorManagerException e) {
-// throw new ServletException(e);
-// }
-//
-// }
-
- private void ajaxRestartFailed(HttpServletRequest req, HttpServletResponse resp, HashMap<String, Object> ret, User user, ExecutableFlow exFlow) throws ServletException {
- Project project = getProjectAjaxByPermission(ret, exFlow.getProjectId(), user, Type.EXECUTE);
- if (project == null) {
- return;
- }
-
- if (exFlow.getStatus() == Status.FAILED || exFlow.getStatus() == Status.SUCCEEDED) {
- ret.put("error", "Flow has already finished. Please re-execute.");
- return;
- }
-
- try {
- executorManager.retryFailures(exFlow, user.getUserId());
- } catch (ExecutorManagerException e) {
- ret.put("error", e.getMessage());
- }
- }
-
- /**
- * Gets the logs through plain text stream to reduce memory overhead.
- *
- * @param req
- * @param resp
- * @param user
- * @param exFlow
- * @throws ServletException
- */
- private void ajaxFetchExecFlowLogs(HttpServletRequest req, HttpServletResponse resp, HashMap<String, Object> ret, User user, ExecutableFlow exFlow) throws ServletException {
- Project project = getProjectAjaxByPermission(ret, exFlow.getProjectId(), user, Type.READ);
- if (project == null) {
- return;
- }
-
- int offset = this.getIntParam(req, "offset");
- int length = this.getIntParam(req, "length");
-
- resp.setCharacterEncoding("utf-8");
-
- try {
- LogData data = executorManager.getExecutableFlowLog(exFlow, offset, length);
- if (data == null) {
- ret.put("length", 0);
- ret.put("offset", offset);
- ret.put("data", "");
- }
- else {
- ret.put("length", data.getLength());
- ret.put("offset", data.getOffset());
- ret.put("data", data.getData());
- }
- } catch (ExecutorManagerException e) {
- throw new ServletException(e);
- }
- }
-
- /**
- * Gets the logs through ajax plain text stream to reduce memory overhead.
- *
- * @param req
- * @param resp
- * @param user
- * @param exFlow
- * @throws ServletException
- */
- private void ajaxFetchJobLogs(HttpServletRequest req, HttpServletResponse resp, HashMap<String, Object> ret, User user, ExecutableFlow exFlow) throws ServletException {
- Project project = getProjectAjaxByPermission(ret, exFlow.getProjectId(), user, Type.READ);
- if (project == null) {
- return;
- }
-
- int offset = this.getIntParam(req, "offset");
- int length = this.getIntParam(req, "length");
-
- String jobId = this.getParam(req, "jobId");
- resp.setCharacterEncoding("utf-8");
-
- try {
- ExecutableNode node = exFlow.getExecutableNodePath(jobId);
- if (node == null) {
- ret.put("error", "Job " + jobId + " doesn't exist in " + exFlow.getExecutionId());
- return;
- }
-
- int attempt = this.getIntParam(req, "attempt", node.getAttempt());
- LogData data = executorManager.getExecutionJobLog(exFlow, jobId, offset, length, attempt);
- if (data == null) {
- ret.put("length", 0);
- ret.put("offset", offset);
- ret.put("data", "");
- }
- else {
- ret.put("length", data.getLength());
- ret.put("offset", data.getOffset());
- ret.put("data", data.getData());
- }
- } catch (ExecutorManagerException e) {
- throw new ServletException(e);
- }
- }
-
- private void ajaxFetchJobStats(
- HttpServletRequest req,
- HttpServletResponse resp,
- HashMap<String, Object> ret,
- User user,
- ExecutableFlow exFlow) throws ServletException {
- Project project = getProjectAjaxByPermission(
- ret, exFlow.getProjectId(), user, Type.READ);
- if (project == null) {
- return;
- }
-
- String jobId = this.getParam(req, "jobid");
- resp.setCharacterEncoding("utf-8");
-
- try {
- ExecutableNode node = exFlow.getExecutableNodePath(jobId);
- if (node == null) {
- ret.put("error", "Job " + jobId + " doesn't exist in " +
- exFlow.getExecutionId());
- return;
- }
-
- List<Object> jsonObj = executorManager.getExecutionJobStats(
- exFlow, jobId, node.getAttempt());
- ret.put("jobStats", jsonObj);
- }
- catch (ExecutorManagerException e) {
- ret.put("error", "Error retrieving stats for job " + jobId);
- return;
- }
- }
-
- private void ajaxFetchFlowInfo(HttpServletRequest req, HttpServletResponse resp, HashMap<String, Object> ret, User user, String projectName, String flowId) throws ServletException {
- Project project = getProjectAjaxByPermission(ret, projectName, user, Type.READ);
- if (project == null) {
- return;
- }
-
- Flow flow = project.getFlow(flowId);
- if (flow == null) {
- ret.put("error", "Error loading flow. Flow " + flowId + " doesn't exist in " + projectName);
- return;
- }
-
- ret.put("successEmails", flow.getSuccessEmails());
- ret.put("failureEmails", flow.getFailureEmails());
-
- Schedule sflow = null;
- try {
- for (Schedule sched: scheduleManager.getSchedules()) {
- if (sched.getProjectId() == project.getId() && sched.getFlowName().equals(flowId)) {
- sflow = sched;
- break;
- }
- }
- } catch (ScheduleManagerException e) {
- // TODO Auto-generated catch block
- throw new ServletException(e);
- }
-
- if (sflow != null) {
- ret.put("scheduled", sflow.getNextExecTime());
- }
- }
-
- private void ajaxFetchExecutableFlowInfo(HttpServletRequest req, HttpServletResponse resp, HashMap<String, Object> ret, User user, ExecutableFlow exflow) throws ServletException {
- Project project = getProjectAjaxByPermission(ret, exflow.getProjectId(), user, Type.READ);
- if (project == null) {
- return;
- }
-
- Flow flow = project.getFlow(exflow.getFlowId());
- if (flow == null) {
- ret.put("error", "Error loading flow. Flow " + exflow.getFlowId() + " doesn't exist in " + exflow.getProjectId());
- return;
- }
-
- ExecutionOptions options = exflow.getExecutionOptions();
-
- ret.put("successEmails", options.getSuccessEmails());
- ret.put("failureEmails", options.getFailureEmails());
- ret.put("flowParam", options.getFlowParameters());
-
- FailureAction action = options.getFailureAction();
- String failureAction = null;
- switch (action) {
- case FINISH_CURRENTLY_RUNNING:
- failureAction = "finishCurrent";
- break;
- case CANCEL_ALL:
- failureAction = "cancelImmediately";
- break;
- case FINISH_ALL_POSSIBLE:
- failureAction = "finishPossible";
- break;
- }
- ret.put("failureAction", failureAction);
-
- ret.put("notifyFailureFirst", options.getNotifyOnFirstFailure());
- ret.put("notifyFailureLast", options.getNotifyOnLastFailure());
-
- ret.put("failureEmailsOverride", options.isFailureEmailsOverridden());
- ret.put("successEmailsOverride", options.isSuccessEmailsOverridden());
-
- ret.put("concurrentOptions", options.getConcurrentOption());
- ret.put("pipelineLevel", options.getPipelineLevel());
- ret.put("pipelineExecution", options.getPipelineExecutionId());
- ret.put("queueLevel", options.getQueueLevel());
-
- HashMap<String, String> nodeStatus = new HashMap<String, String>();
- for (ExecutableNode node : exflow.getExecutableNodes()) {
- nodeStatus.put(node.getId(), node.getStatus().toString());
- }
- ret.put("nodeStatus", nodeStatus);
- ret.put("disabled", options.getDisabledJobs());
- }
-
- private void ajaxCancelFlow(HttpServletRequest req, HttpServletResponse resp, HashMap<String, Object> ret, User user, ExecutableFlow exFlow) throws ServletException{
- Project project = getProjectAjaxByPermission(ret, exFlow.getProjectId(), user, Type.EXECUTE);
- if (project == null) {
- return;
- }
-
- try {
- executorManager.cancelFlow(exFlow, user.getUserId());
- } catch (ExecutorManagerException e) {
- ret.put("error", e.getMessage());
- }
- }
-
- private void ajaxGetFlowRunning(HttpServletRequest req, HttpServletResponse resp, HashMap<String, Object> ret, User user, String projectId, String flowId) throws ServletException{
- Project project = getProjectAjaxByPermission(ret, projectId, user, Type.EXECUTE);
- if (project == null) {
- return;
- }
-
- List<Integer> refs = executorManager.getRunningFlows(project.getId(), flowId);
- if (!refs.isEmpty()) {
- ret.put("execIds", refs);
- }
- }
-
- private void ajaxPauseFlow(HttpServletRequest req, HttpServletResponse resp, HashMap<String, Object> ret, User user, ExecutableFlow exFlow) throws ServletException{
- Project project = getProjectAjaxByPermission(ret, exFlow.getProjectId(), user, Type.EXECUTE);
- if (project == null) {
- return;
- }
-
- try {
- executorManager.pauseFlow(exFlow, user.getUserId());
- } catch (ExecutorManagerException e) {
- ret.put("error", e.getMessage());
- }
- }
-
- private void ajaxResumeFlow(HttpServletRequest req, HttpServletResponse resp, HashMap<String, Object> ret, User user, ExecutableFlow exFlow) throws ServletException{
- Project project = getProjectAjaxByPermission(ret, exFlow.getProjectId(), user, Type.EXECUTE);
- if (project == null) {
- return;
- }
-
- try {
- executorManager.resumeFlow(exFlow, user.getUserId());
- } catch (ExecutorManagerException e) {
- ret.put("resume", e.getMessage());
- }
- }
-
- private Map<String,Object> getExecutableFlowUpdateInfo(ExecutableNode node, long lastUpdateTime) {
- HashMap<String, Object> nodeObj = new HashMap<String,Object>();
- if (node instanceof ExecutableFlowBase) {
- ExecutableFlowBase base = (ExecutableFlowBase)node;
- ArrayList<Map<String, Object>> nodeList = new ArrayList<Map<String, Object>>();
-
- for (ExecutableNode subNode: base.getExecutableNodes()) {
- Map<String,Object> subNodeObj = getExecutableFlowUpdateInfo(subNode, lastUpdateTime);
- if (!subNodeObj.isEmpty()) {
- nodeList.add(subNodeObj);
- }
- }
-
- if (!nodeList.isEmpty()) {
- nodeObj.put("flow", base.getFlowId());
- nodeObj.put("nodes", nodeList);
- }
- }
-
- if (node.getUpdateTime() > lastUpdateTime || !nodeObj.isEmpty()) {
- nodeObj.put("id", node.getId());
- nodeObj.put("status", node.getStatus());
- nodeObj.put("startTime", node.getStartTime());
- nodeObj.put("endTime", node.getEndTime());
- nodeObj.put("updateTime", node.getUpdateTime());
-
- nodeObj.put("attempt", node.getAttempt());
- if (node.getAttempt() > 0) {
- nodeObj.put("pastAttempts", node.getAttemptObjects());
- }
- }
-
- return nodeObj;
- }
-
- private Map<String,Object> getExecutableNodeInfo(ExecutableNode node) {
- HashMap<String, Object> nodeObj = new HashMap<String,Object>();
- nodeObj.put("id", node.getId());
- nodeObj.put("status", node.getStatus());
- nodeObj.put("startTime", node.getStartTime());
- nodeObj.put("endTime", node.getEndTime());
- nodeObj.put("updateTime", node.getUpdateTime());
- nodeObj.put("type", node.getType());
- nodeObj.put("nestedId", node.getNestedId());
-
- nodeObj.put("attempt", node.getAttempt());
- if (node.getAttempt() > 0) {
- nodeObj.put("pastAttempts", node.getAttemptObjects());
- }
-
- if (node.getInNodes() != null && !node.getInNodes().isEmpty()) {
- nodeObj.put("in", node.getInNodes());
- }
-
- if (node instanceof ExecutableFlowBase) {
- ExecutableFlowBase base = (ExecutableFlowBase)node;
- ArrayList<Map<String, Object>> nodeList = new ArrayList<Map<String, Object>>();
-
- for (ExecutableNode subNode: base.getExecutableNodes()) {
- Map<String,Object> subNodeObj = getExecutableNodeInfo(subNode);
- if (!subNodeObj.isEmpty()) {
- nodeList.add(subNodeObj);
- }
- }
-
- nodeObj.put("flow", base.getFlowId());
- nodeObj.put("nodes", nodeList);
- nodeObj.put("flowId", base.getFlowId());
- }
-
- return nodeObj;
- }
-
- private void ajaxFetchExecutableFlowUpdate(
- HttpServletRequest req,
- HttpServletResponse resp,
- HashMap<String, Object> ret,
- User user,
- ExecutableFlow exFlow) throws ServletException {
- Long lastUpdateTime = Long.parseLong(getParam(req, "lastUpdateTime"));
- System.out.println("Fetching " + exFlow.getExecutionId());
-
- Project project = getProjectAjaxByPermission(ret, exFlow.getProjectId(), user, Type.READ);
- if (project == null) {
- return;
- }
-
- Map<String, Object> map = getExecutableFlowUpdateInfo(exFlow, lastUpdateTime);
- map.put("status", exFlow.getStatus());
- map.put("startTime", exFlow.getStartTime());
- map.put("endTime", exFlow.getEndTime());
- map.put("updateTime", exFlow.getUpdateTime());
- ret.putAll(map);
- }
-
- private void ajaxFetchExecutableFlow(
- HttpServletRequest req,
- HttpServletResponse resp,
- HashMap<String, Object> ret,
- User user,
- ExecutableFlow exFlow) throws ServletException {
- System.out.println("Fetching " + exFlow.getExecutionId());
-
- Project project = getProjectAjaxByPermission(ret,
- exFlow.getProjectId(), user, Type.READ);
- if (project == null) {
- return;
- }
-
- ret.put("submitTime", exFlow.getSubmitTime());
- ret.put("submitUser", exFlow.getSubmitUser());
- ret.put("execid", exFlow.getExecutionId());
- ret.put("projectId", exFlow.getProjectId());
- ret.put("project", project.getName());
-
- Map<String,Object> flowObj = getExecutableNodeInfo(exFlow);
- ret.putAll(flowObj);
- }
-
- private void ajaxAttemptExecuteFlow(HttpServletRequest req,
- HttpServletResponse resp, HashMap<String, Object> ret, User user)
- throws ServletException {
- String projectName = getParam(req, "project");
- String flowId = getParam(req, "flow");
-
- Project project = getProjectAjaxByPermission(ret, projectName, user, Type.EXECUTE);
- if (project == null) {
- ret.put("error", "Project '" + projectName + "' doesn't exist.");
- return;
- }
-
- ret.put("flow", flowId);
- Flow flow = project.getFlow(flowId);
- if (flow == null) {
- ret.put("error", "Flow '" + flowId + "' cannot be found in project " + project);
- return;
- }
-
- ajaxExecuteFlow(req, resp, ret, user);
- }
-
- private void ajaxExecuteFlow(HttpServletRequest req, HttpServletResponse resp, HashMap<String, Object> ret, User user) throws ServletException {
- String projectName = getParam(req, "project");
- String flowId = getParam(req, "flow");
-
- Project project = getProjectAjaxByPermission(ret, projectName, user, Type.EXECUTE);
- if (project == null) {
- ret.put("error", "Project '" + projectName + "' doesn't exist.");
- return;
- }
-
- ret.put("flow", flowId);
- Flow flow = project.getFlow(flowId);
- if (flow == null) {
- ret.put("error", "Flow '" + flowId + "' cannot be found in project " + project);
- return;
- }
-
- ExecutableFlow exflow = new ExecutableFlow(project, flow);
- exflow.setSubmitUser(user.getUserId());
- exflow.addAllProxyUsers(project.getProxyUsers());
-
- ExecutionOptions options = HttpRequestUtils.parseFlowOptions(req);
- exflow.setExecutionOptions(options);
- if (!options.isFailureEmailsOverridden()) {
- options.setFailureEmails(flow.getFailureEmails());
- }
- if (!options.isSuccessEmailsOverridden()) {
- options.setSuccessEmails(flow.getSuccessEmails());
- }
- options.setMailCreator(flow.getMailCreator());
-
- try {
- String message = executorManager.submitExecutableFlow(exflow, user.getUserId());
- ret.put("message", message);
- }
- catch (ExecutorManagerException e) {
- e.printStackTrace();
- ret.put("error", "Error submitting flow " + exflow.getFlowId() + ". " + e.getMessage());
- }
-
- ret.put("execid", exflow.getExecutionId());
- }
-
- public class ExecutorVelocityHelper {
- public String getProjectName(int id) {
- Project project = projectManager.getProject(id);
- if (project == null) {
- return String.valueOf(id);
- }
-
- return project.getName();
- }
- }
+ private static final long serialVersionUID = 1L;
+ private ProjectManager projectManager;
+ private ExecutorManagerAdapter executorManager;
+ private ScheduleManager scheduleManager;
+ private ExecutorVelocityHelper velocityHelper;
+
+ @Override
+ public void init(ServletConfig config) throws ServletException {
+ super.init(config);
+ AzkabanWebServer server = (AzkabanWebServer) getApplication();
+ projectManager = server.getProjectManager();
+ executorManager = server.getExecutorManager();
+ scheduleManager = server.getScheduleManager();
+ velocityHelper = new ExecutorVelocityHelper();
+ }
+
+ @Override
+ protected void handleGet(HttpServletRequest req, HttpServletResponse resp,
+ Session session) throws ServletException, IOException {
+ if (hasParam(req, "ajax")) {
+ handleAJAXAction(req, resp, session);
+ } else if (hasParam(req, "execid")) {
+ if (hasParam(req, "job")) {
+ handleExecutionJobDetailsPage(req, resp, session);
+ } else {
+ handleExecutionFlowPage(req, resp, session);
+ }
+ } else {
+ handleExecutionsPage(req, resp, session);
+ }
+ }
+
+ private void handleAJAXAction(HttpServletRequest req,
+ HttpServletResponse resp, Session session) throws ServletException,
+ IOException {
+ HashMap<String, Object> ret = new HashMap<String, Object>();
+ String ajaxName = getParam(req, "ajax");
+
+ if (hasParam(req, "execid")) {
+ int execid = getIntParam(req, "execid");
+ ExecutableFlow exFlow = null;
+
+ try {
+ exFlow = executorManager.getExecutableFlow(execid);
+ } catch (ExecutorManagerException e) {
+ ret.put("error",
+ "Error fetching execution '" + execid + "': " + e.getMessage());
+ }
+
+ if (exFlow == null) {
+ ret.put("error", "Cannot find execution '" + execid + "'");
+ } else {
+ if (ajaxName.equals("fetchexecflow")) {
+ ajaxFetchExecutableFlow(req, resp, ret, session.getUser(), exFlow);
+ } else if (ajaxName.equals("fetchexecflowupdate")) {
+ ajaxFetchExecutableFlowUpdate(req, resp, ret, session.getUser(),
+ exFlow);
+ } else if (ajaxName.equals("cancelFlow")) {
+ ajaxCancelFlow(req, resp, ret, session.getUser(), exFlow);
+ } else if (ajaxName.equals("pauseFlow")) {
+ ajaxPauseFlow(req, resp, ret, session.getUser(), exFlow);
+ } else if (ajaxName.equals("resumeFlow")) {
+ ajaxResumeFlow(req, resp, ret, session.getUser(), exFlow);
+ } else if (ajaxName.equals("fetchExecFlowLogs")) {
+ ajaxFetchExecFlowLogs(req, resp, ret, session.getUser(), exFlow);
+ } else if (ajaxName.equals("fetchExecJobLogs")) {
+ ajaxFetchJobLogs(req, resp, ret, session.getUser(), exFlow);
+ } else if (ajaxName.equals("fetchExecJobStats")) {
+ ajaxFetchJobStats(req, resp, ret, session.getUser(), exFlow);
+ } else if (ajaxName.equals("retryFailedJobs")) {
+ ajaxRestartFailed(req, resp, ret, session.getUser(), exFlow);
+ }
+ // else if (ajaxName.equals("fetchLatestJobStatus")) {
+ // ajaxFetchLatestJobStatus(req, resp, ret, session.getUser(), exFlow);
+ // }
+ else if (ajaxName.equals("flowInfo")) {
+ // String projectName = getParam(req, "project");
+ // Project project = projectManager.getProject(projectName);
+ // String flowName = getParam(req, "flow");
+ ajaxFetchExecutableFlowInfo(req, resp, ret, session.getUser(), exFlow);
+ }
+ }
+ } else if (ajaxName.equals("getRunning")) {
+ String projectName = getParam(req, "project");
+ String flowName = getParam(req, "flow");
+ ajaxGetFlowRunning(req, resp, ret, session.getUser(), projectName,
+ flowName);
+ } else if (ajaxName.equals("flowInfo")) {
+ String projectName = getParam(req, "project");
+ String flowName = getParam(req, "flow");
+ ajaxFetchFlowInfo(req, resp, ret, session.getUser(), projectName,
+ flowName);
+ } else {
+ String projectName = getParam(req, "project");
+
+ ret.put("project", projectName);
+ if (ajaxName.equals("executeFlow")) {
+ ajaxAttemptExecuteFlow(req, resp, ret, session.getUser());
+ }
+ }
+ if (ret != null) {
+ this.writeJSON(resp, ret);
+ }
+ }
+
+ @Override
+ protected void handlePost(HttpServletRequest req, HttpServletResponse resp,
+ Session session) throws ServletException, IOException {
+ if (hasParam(req, "ajax")) {
+ handleAJAXAction(req, resp, session);
+ }
+ }
+
+ private void handleExecutionJobDetailsPage(HttpServletRequest req,
+ HttpServletResponse resp, Session session) throws ServletException,
+ IOException {
+ Page page =
+ newPage(req, resp, session,
+ "azkaban/webapp/servlet/velocity/jobdetailspage.vm");
+ User user = session.getUser();
+ int execId = getIntParam(req, "execid");
+ String jobId = getParam(req, "job");
+ int attempt = getIntParam(req, "attempt", 0);
+ page.add("execid", execId);
+ page.add("jobid", jobId);
+ page.add("attempt", attempt);
+
+ ExecutableFlow flow = null;
+ ExecutableNode node = null;
+ try {
+ flow = executorManager.getExecutableFlow(execId);
+ if (flow == null) {
+ page.add("errorMsg", "Error loading executing flow " + execId
+ + ": not found.");
+ page.render();
+ return;
+ }
+
+ node = flow.getExecutableNodePath(jobId);
+ if (node == null) {
+ page.add("errorMsg",
+ "Job " + jobId + " doesn't exist in " + flow.getExecutionId());
+ return;
+ }
+
+ List<ViewerPlugin> jobViewerPlugins =
+ PluginRegistry.getRegistry().getViewerPluginsForJobType(
+ node.getType());
+ page.add("jobViewerPlugins", jobViewerPlugins);
+ } catch (ExecutorManagerException e) {
+ page.add("errorMsg", "Error loading executing flow: " + e.getMessage());
+ page.render();
+ return;
+ }
+
+ int projectId = flow.getProjectId();
+ Project project =
+ getProjectPageByPermission(page, projectId, user, Type.READ);
+ if (project == null) {
+ page.render();
+ return;
+ }
+
+ page.add("projectName", project.getName());
+ page.add("flowid", flow.getId());
+ page.add("parentflowid", node.getParentFlow().getFlowId());
+ page.add("jobname", node.getId());
+
+ page.render();
+ }
+
+ private void handleExecutionsPage(HttpServletRequest req,
+ HttpServletResponse resp, Session session) throws ServletException,
+ IOException {
+ Page page =
+ newPage(req, resp, session,
+ "azkaban/webapp/servlet/velocity/executionspage.vm");
+
+ List<ExecutableFlow> runningFlows = executorManager.getRunningFlows();
+ page.add("runningFlows", runningFlows.isEmpty() ? null : runningFlows);
+
+ List<ExecutableFlow> finishedFlows =
+ executorManager.getRecentlyFinishedFlows();
+ page.add("recentlyFinished", finishedFlows.isEmpty() ? null : finishedFlows);
+ page.add("vmutils", velocityHelper);
+ page.render();
+ }
+
+ private void handleExecutionFlowPage(HttpServletRequest req,
+ HttpServletResponse resp, Session session) throws ServletException,
+ IOException {
+ Page page =
+ newPage(req, resp, session,
+ "azkaban/webapp/servlet/velocity/executingflowpage.vm");
+ User user = session.getUser();
+ int execId = getIntParam(req, "execid");
+ page.add("execid", execId);
+
+ ExecutableFlow flow = null;
+ try {
+ flow = executorManager.getExecutableFlow(execId);
+ if (flow == null) {
+ page.add("errorMsg", "Error loading executing flow " + execId
+ + " not found.");
+ page.render();
+ return;
+ }
+ } catch (ExecutorManagerException e) {
+ page.add("errorMsg", "Error loading executing flow: " + e.getMessage());
+ page.render();
+ return;
+ }
+
+ int projectId = flow.getProjectId();
+ Project project =
+ getProjectPageByPermission(page, projectId, user, Type.READ);
+ if (project == null) {
+ page.render();
+ return;
+ }
+
+ page.add("projectId", project.getId());
+ page.add("projectName", project.getName());
+ page.add("flowid", flow.getFlowId());
+
+ page.render();
+ }
+
+ protected Project getProjectPageByPermission(Page page, int projectId,
+ User user, Permission.Type type) {
+ Project project = projectManager.getProject(projectId);
+
+ if (project == null) {
+ page.add("errorMsg", "Project " + project + " not found.");
+ } else if (!hasPermission(project, user, type)) {
+ page.add("errorMsg",
+ "User " + user.getUserId() + " doesn't have " + type.name()
+ + " permissions on " + project.getName());
+ } else {
+ return project;
+ }
+
+ return null;
+ }
+
+ protected Project getProjectAjaxByPermission(Map<String, Object> ret,
+ String projectName, User user, Permission.Type type) {
+ Project project = projectManager.getProject(projectName);
+
+ if (project == null) {
+ ret.put("error", "Project '" + project + "' not found.");
+ } else if (!hasPermission(project, user, type)) {
+ ret.put("error",
+ "User '" + user.getUserId() + "' doesn't have " + type.name()
+ + " permissions on " + project.getName());
+ } else {
+ return project;
+ }
+
+ return null;
+ }
+
+ protected Project getProjectAjaxByPermission(Map<String, Object> ret,
+ int projectId, User user, Permission.Type type) {
+ Project project = projectManager.getProject(projectId);
+
+ if (project == null) {
+ ret.put("error", "Project '" + project + "' not found.");
+ } else if (!hasPermission(project, user, type)) {
+ ret.put("error",
+ "User '" + user.getUserId() + "' doesn't have " + type.name()
+ + " permissions on " + project.getName());
+ } else {
+ return project;
+ }
+
+ return null;
+ }
+
+ // private void ajaxFetchLatestJobStatus(HttpServletRequest
+ // req,HttpServletResponse resp, HashMap<String, Object> ret, User user,
+ // ExecutableFlow exFlow) {
+ // Project project = getProjectAjaxByPermission(ret, exFlow.getProjectId(),
+ // user, Type.READ);
+ // if (project == null) {
+ // ret.put("error", "Project doesn't exist or incorrect access permission.");
+ // return;
+ // }
+ //
+ // String projectName;
+ // String flowName;
+ // String jobName;
+ // try {
+ // projectName = getParam(req, "projectName");
+ // flowName = getParam(req, "flowName");
+ // jobName = getParam(req, "jobName");
+ // } catch (Exception e) {
+ // ret.put("error", e.getMessage());
+ // return;
+ // }
+ //
+ // try {
+ // ExecutableNode node = exFlow.getExecutableNode(jobId);
+ // if (node == null) {
+ // ret.put("error", "Job " + jobId + " doesn't exist in " +
+ // exFlow.getExecutionId());
+ // return;
+ // }
+ //
+ // int attempt = this.getIntParam(req, "attempt", node.getAttempt());
+ // LogData data = executorManager.getExecutionJobLog(exFlow, jobId, offset,
+ // length, attempt);
+ // if (data == null) {
+ // ret.put("length", 0);
+ // ret.put("offset", offset);
+ // ret.put("data", "");
+ // }
+ // else {
+ // ret.put("length", data.getLength());
+ // ret.put("offset", data.getOffset());
+ // ret.put("data", data.getData());
+ // }
+ // } catch (ExecutorManagerException e) {
+ // throw new ServletException(e);
+ // }
+ //
+ // }
+
+ private void ajaxRestartFailed(HttpServletRequest req,
+ HttpServletResponse resp, HashMap<String, Object> ret, User user,
+ ExecutableFlow exFlow) throws ServletException {
+ Project project =
+ getProjectAjaxByPermission(ret, exFlow.getProjectId(), user,
+ Type.EXECUTE);
+ if (project == null) {
+ return;
+ }
+
+ if (exFlow.getStatus() == Status.FAILED
+ || exFlow.getStatus() == Status.SUCCEEDED) {
+ ret.put("error", "Flow has already finished. Please re-execute.");
+ return;
+ }
+
+ try {
+ executorManager.retryFailures(exFlow, user.getUserId());
+ } catch (ExecutorManagerException e) {
+ ret.put("error", e.getMessage());
+ }
+ }
+
+ /**
+ * Gets the logs through plain text stream to reduce memory overhead.
+ *
+ * @param req
+ * @param resp
+ * @param user
+ * @param exFlow
+ * @throws ServletException
+ */
+ private void ajaxFetchExecFlowLogs(HttpServletRequest req,
+ HttpServletResponse resp, HashMap<String, Object> ret, User user,
+ ExecutableFlow exFlow) throws ServletException {
+ Project project =
+ getProjectAjaxByPermission(ret, exFlow.getProjectId(), user, Type.READ);
+ if (project == null) {
+ return;
+ }
+
+ int offset = this.getIntParam(req, "offset");
+ int length = this.getIntParam(req, "length");
+
+ resp.setCharacterEncoding("utf-8");
+
+ try {
+ LogData data =
+ executorManager.getExecutableFlowLog(exFlow, offset, length);
+ if (data == null) {
+ ret.put("length", 0);
+ ret.put("offset", offset);
+ ret.put("data", "");
+ } else {
+ ret.put("length", data.getLength());
+ ret.put("offset", data.getOffset());
+ ret.put("data", data.getData());
+ }
+ } catch (ExecutorManagerException e) {
+ throw new ServletException(e);
+ }
+ }
+
+ /**
+ * Gets the logs through ajax plain text stream to reduce memory overhead.
+ *
+ * @param req
+ * @param resp
+ * @param user
+ * @param exFlow
+ * @throws ServletException
+ */
+ private void ajaxFetchJobLogs(HttpServletRequest req,
+ HttpServletResponse resp, HashMap<String, Object> ret, User user,
+ ExecutableFlow exFlow) throws ServletException {
+ Project project =
+ getProjectAjaxByPermission(ret, exFlow.getProjectId(), user, Type.READ);
+ if (project == null) {
+ return;
+ }
+
+ int offset = this.getIntParam(req, "offset");
+ int length = this.getIntParam(req, "length");
+
+ String jobId = this.getParam(req, "jobId");
+ resp.setCharacterEncoding("utf-8");
+
+ try {
+ ExecutableNode node = exFlow.getExecutableNodePath(jobId);
+ if (node == null) {
+ ret.put("error",
+ "Job " + jobId + " doesn't exist in " + exFlow.getExecutionId());
+ return;
+ }
+
+ int attempt = this.getIntParam(req, "attempt", node.getAttempt());
+ LogData data =
+ executorManager.getExecutionJobLog(exFlow, jobId, offset, length,
+ attempt);
+ if (data == null) {
+ ret.put("length", 0);
+ ret.put("offset", offset);
+ ret.put("data", "");
+ } else {
+ ret.put("length", data.getLength());
+ ret.put("offset", data.getOffset());
+ ret.put("data", data.getData());
+ }
+ } catch (ExecutorManagerException e) {
+ throw new ServletException(e);
+ }
+ }
+
+ private void ajaxFetchJobStats(HttpServletRequest req,
+ HttpServletResponse resp, HashMap<String, Object> ret, User user,
+ ExecutableFlow exFlow) throws ServletException {
+ Project project =
+ getProjectAjaxByPermission(ret, exFlow.getProjectId(), user, Type.READ);
+ if (project == null) {
+ return;
+ }
+
+ String jobId = this.getParam(req, "jobid");
+ resp.setCharacterEncoding("utf-8");
+
+ try {
+ ExecutableNode node = exFlow.getExecutableNodePath(jobId);
+ if (node == null) {
+ ret.put("error",
+ "Job " + jobId + " doesn't exist in " + exFlow.getExecutionId());
+ return;
+ }
+
+ List<Object> jsonObj =
+ executorManager
+ .getExecutionJobStats(exFlow, jobId, node.getAttempt());
+ ret.put("jobStats", jsonObj);
+ } catch (ExecutorManagerException e) {
+ ret.put("error", "Error retrieving stats for job " + jobId);
+ return;
+ }
+ }
+
+ private void ajaxFetchFlowInfo(HttpServletRequest req,
+ HttpServletResponse resp, HashMap<String, Object> ret, User user,
+ String projectName, String flowId) throws ServletException {
+ Project project =
+ getProjectAjaxByPermission(ret, projectName, user, Type.READ);
+ if (project == null) {
+ return;
+ }
+
+ Flow flow = project.getFlow(flowId);
+ if (flow == null) {
+ ret.put("error", "Error loading flow. Flow " + flowId
+ + " doesn't exist in " + projectName);
+ return;
+ }
+
+ ret.put("successEmails", flow.getSuccessEmails());
+ ret.put("failureEmails", flow.getFailureEmails());
+
+ Schedule sflow = null;
+ try {
+ for (Schedule sched : scheduleManager.getSchedules()) {
+ if (sched.getProjectId() == project.getId()
+ && sched.getFlowName().equals(flowId)) {
+ sflow = sched;
+ break;
+ }
+ }
+ } catch (ScheduleManagerException e) {
+ // TODO Auto-generated catch block
+ throw new ServletException(e);
+ }
+
+ if (sflow != null) {
+ ret.put("scheduled", sflow.getNextExecTime());
+ }
+ }
+
+ private void ajaxFetchExecutableFlowInfo(HttpServletRequest req,
+ HttpServletResponse resp, HashMap<String, Object> ret, User user,
+ ExecutableFlow exflow) throws ServletException {
+ Project project =
+ getProjectAjaxByPermission(ret, exflow.getProjectId(), user, Type.READ);
+ if (project == null) {
+ return;
+ }
+
+ Flow flow = project.getFlow(exflow.getFlowId());
+ if (flow == null) {
+ ret.put("error", "Error loading flow. Flow " + exflow.getFlowId()
+ + " doesn't exist in " + exflow.getProjectId());
+ return;
+ }
+
+ ExecutionOptions options = exflow.getExecutionOptions();
+
+ ret.put("successEmails", options.getSuccessEmails());
+ ret.put("failureEmails", options.getFailureEmails());
+ ret.put("flowParam", options.getFlowParameters());
+
+ FailureAction action = options.getFailureAction();
+ String failureAction = null;
+ switch (action) {
+ case FINISH_CURRENTLY_RUNNING:
+ failureAction = "finishCurrent";
+ break;
+ case CANCEL_ALL:
+ failureAction = "cancelImmediately";
+ break;
+ case FINISH_ALL_POSSIBLE:
+ failureAction = "finishPossible";
+ break;
+ }
+ ret.put("failureAction", failureAction);
+
+ ret.put("notifyFailureFirst", options.getNotifyOnFirstFailure());
+ ret.put("notifyFailureLast", options.getNotifyOnLastFailure());
+
+ ret.put("failureEmailsOverride", options.isFailureEmailsOverridden());
+ ret.put("successEmailsOverride", options.isSuccessEmailsOverridden());
+
+ ret.put("concurrentOptions", options.getConcurrentOption());
+ ret.put("pipelineLevel", options.getPipelineLevel());
+ ret.put("pipelineExecution", options.getPipelineExecutionId());
+ ret.put("queueLevel", options.getQueueLevel());
+
+ HashMap<String, String> nodeStatus = new HashMap<String, String>();
+ for (ExecutableNode node : exflow.getExecutableNodes()) {
+ nodeStatus.put(node.getId(), node.getStatus().toString());
+ }
+ ret.put("nodeStatus", nodeStatus);
+ ret.put("disabled", options.getDisabledJobs());
+ }
+
+ private void ajaxCancelFlow(HttpServletRequest req, HttpServletResponse resp,
+ HashMap<String, Object> ret, User user, ExecutableFlow exFlow)
+ throws ServletException {
+ Project project =
+ getProjectAjaxByPermission(ret, exFlow.getProjectId(), user,
+ Type.EXECUTE);
+ if (project == null) {
+ return;
+ }
+
+ try {
+ executorManager.cancelFlow(exFlow, user.getUserId());
+ } catch (ExecutorManagerException e) {
+ ret.put("error", e.getMessage());
+ }
+ }
+
+ private void ajaxGetFlowRunning(HttpServletRequest req,
+ HttpServletResponse resp, HashMap<String, Object> ret, User user,
+ String projectId, String flowId) throws ServletException {
+ Project project =
+ getProjectAjaxByPermission(ret, projectId, user, Type.EXECUTE);
+ if (project == null) {
+ return;
+ }
+
+ List<Integer> refs =
+ executorManager.getRunningFlows(project.getId(), flowId);
+ if (!refs.isEmpty()) {
+ ret.put("execIds", refs);
+ }
+ }
+
+ private void ajaxPauseFlow(HttpServletRequest req, HttpServletResponse resp,
+ HashMap<String, Object> ret, User user, ExecutableFlow exFlow)
+ throws ServletException {
+ Project project =
+ getProjectAjaxByPermission(ret, exFlow.getProjectId(), user,
+ Type.EXECUTE);
+ if (project == null) {
+ return;
+ }
+
+ try {
+ executorManager.pauseFlow(exFlow, user.getUserId());
+ } catch (ExecutorManagerException e) {
+ ret.put("error", e.getMessage());
+ }
+ }
+
+ private void ajaxResumeFlow(HttpServletRequest req, HttpServletResponse resp,
+ HashMap<String, Object> ret, User user, ExecutableFlow exFlow)
+ throws ServletException {
+ Project project =
+ getProjectAjaxByPermission(ret, exFlow.getProjectId(), user,
+ Type.EXECUTE);
+ if (project == null) {
+ return;
+ }
+
+ try {
+ executorManager.resumeFlow(exFlow, user.getUserId());
+ } catch (ExecutorManagerException e) {
+ ret.put("resume", e.getMessage());
+ }
+ }
+
+ private Map<String, Object> getExecutableFlowUpdateInfo(ExecutableNode node,
+ long lastUpdateTime) {
+ HashMap<String, Object> nodeObj = new HashMap<String, Object>();
+ if (node instanceof ExecutableFlowBase) {
+ ExecutableFlowBase base = (ExecutableFlowBase) node;
+ ArrayList<Map<String, Object>> nodeList =
+ new ArrayList<Map<String, Object>>();
+
+ for (ExecutableNode subNode : base.getExecutableNodes()) {
+ Map<String, Object> subNodeObj =
+ getExecutableFlowUpdateInfo(subNode, lastUpdateTime);
+ if (!subNodeObj.isEmpty()) {
+ nodeList.add(subNodeObj);
+ }
+ }
+
+ if (!nodeList.isEmpty()) {
+ nodeObj.put("flow", base.getFlowId());
+ nodeObj.put("nodes", nodeList);
+ }
+ }
+
+ if (node.getUpdateTime() > lastUpdateTime || !nodeObj.isEmpty()) {
+ nodeObj.put("id", node.getId());
+ nodeObj.put("status", node.getStatus());
+ nodeObj.put("startTime", node.getStartTime());
+ nodeObj.put("endTime", node.getEndTime());
+ nodeObj.put("updateTime", node.getUpdateTime());
+
+ nodeObj.put("attempt", node.getAttempt());
+ if (node.getAttempt() > 0) {
+ nodeObj.put("pastAttempts", node.getAttemptObjects());
+ }
+ }
+
+ return nodeObj;
+ }
+
+ private Map<String, Object> getExecutableNodeInfo(ExecutableNode node) {
+ HashMap<String, Object> nodeObj = new HashMap<String, Object>();
+ nodeObj.put("id", node.getId());
+ nodeObj.put("status", node.getStatus());
+ nodeObj.put("startTime", node.getStartTime());
+ nodeObj.put("endTime", node.getEndTime());
+ nodeObj.put("updateTime", node.getUpdateTime());
+ nodeObj.put("type", node.getType());
+ nodeObj.put("nestedId", node.getNestedId());
+
+ nodeObj.put("attempt", node.getAttempt());
+ if (node.getAttempt() > 0) {
+ nodeObj.put("pastAttempts", node.getAttemptObjects());
+ }
+
+ if (node.getInNodes() != null && !node.getInNodes().isEmpty()) {
+ nodeObj.put("in", node.getInNodes());
+ }
+
+ if (node instanceof ExecutableFlowBase) {
+ ExecutableFlowBase base = (ExecutableFlowBase) node;
+ ArrayList<Map<String, Object>> nodeList =
+ new ArrayList<Map<String, Object>>();
+
+ for (ExecutableNode subNode : base.getExecutableNodes()) {
+ Map<String, Object> subNodeObj = getExecutableNodeInfo(subNode);
+ if (!subNodeObj.isEmpty()) {
+ nodeList.add(subNodeObj);
+ }
+ }
+
+ nodeObj.put("flow", base.getFlowId());
+ nodeObj.put("nodes", nodeList);
+ nodeObj.put("flowId", base.getFlowId());
+ }
+
+ return nodeObj;
+ }
+
+ private void ajaxFetchExecutableFlowUpdate(HttpServletRequest req,
+ HttpServletResponse resp, HashMap<String, Object> ret, User user,
+ ExecutableFlow exFlow) throws ServletException {
+ Long lastUpdateTime = Long.parseLong(getParam(req, "lastUpdateTime"));
+ System.out.println("Fetching " + exFlow.getExecutionId());
+
+ Project project =
+ getProjectAjaxByPermission(ret, exFlow.getProjectId(), user, Type.READ);
+ if (project == null) {
+ return;
+ }
+
+ Map<String, Object> map =
+ getExecutableFlowUpdateInfo(exFlow, lastUpdateTime);
+ map.put("status", exFlow.getStatus());
+ map.put("startTime", exFlow.getStartTime());
+ map.put("endTime", exFlow.getEndTime());
+ map.put("updateTime", exFlow.getUpdateTime());
+ ret.putAll(map);
+ }
+
+ private void ajaxFetchExecutableFlow(HttpServletRequest req,
+ HttpServletResponse resp, HashMap<String, Object> ret, User user,
+ ExecutableFlow exFlow) throws ServletException {
+ System.out.println("Fetching " + exFlow.getExecutionId());
+
+ Project project =
+ getProjectAjaxByPermission(ret, exFlow.getProjectId(), user, Type.READ);
+ if (project == null) {
+ return;
+ }
+
+ ret.put("submitTime", exFlow.getSubmitTime());
+ ret.put("submitUser", exFlow.getSubmitUser());
+ ret.put("execid", exFlow.getExecutionId());
+ ret.put("projectId", exFlow.getProjectId());
+ ret.put("project", project.getName());
+
+ Map<String, Object> flowObj = getExecutableNodeInfo(exFlow);
+ ret.putAll(flowObj);
+ }
+
+ private void ajaxAttemptExecuteFlow(HttpServletRequest req,
+ HttpServletResponse resp, HashMap<String, Object> ret, User user)
+ throws ServletException {
+ String projectName = getParam(req, "project");
+ String flowId = getParam(req, "flow");
+
+ Project project =
+ getProjectAjaxByPermission(ret, projectName, user, Type.EXECUTE);
+ if (project == null) {
+ ret.put("error", "Project '" + projectName + "' doesn't exist.");
+ return;
+ }
+
+ ret.put("flow", flowId);
+ Flow flow = project.getFlow(flowId);
+ if (flow == null) {
+ ret.put("error", "Flow '" + flowId + "' cannot be found in project "
+ + project);
+ return;
+ }
+
+ ajaxExecuteFlow(req, resp, ret, user);
+ }
+
+ private void ajaxExecuteFlow(HttpServletRequest req,
+ HttpServletResponse resp, HashMap<String, Object> ret, User user)
+ throws ServletException {
+ String projectName = getParam(req, "project");
+ String flowId = getParam(req, "flow");
+
+ Project project =
+ getProjectAjaxByPermission(ret, projectName, user, Type.EXECUTE);
+ if (project == null) {
+ ret.put("error", "Project '" + projectName + "' doesn't exist.");
+ return;
+ }
+
+ ret.put("flow", flowId);
+ Flow flow = project.getFlow(flowId);
+ if (flow == null) {
+ ret.put("error", "Flow '" + flowId + "' cannot be found in project "
+ + project);
+ return;
+ }
+
+ ExecutableFlow exflow = new ExecutableFlow(project, flow);
+ exflow.setSubmitUser(user.getUserId());
+ exflow.addAllProxyUsers(project.getProxyUsers());
+
+ ExecutionOptions options = HttpRequestUtils.parseFlowOptions(req);
+ exflow.setExecutionOptions(options);
+ if (!options.isFailureEmailsOverridden()) {
+ options.setFailureEmails(flow.getFailureEmails());
+ }
+ if (!options.isSuccessEmailsOverridden()) {
+ options.setSuccessEmails(flow.getSuccessEmails());
+ }
+ options.setMailCreator(flow.getMailCreator());
+
+ try {
+ String message =
+ executorManager.submitExecutableFlow(exflow, user.getUserId());
+ ret.put("message", message);
+ } catch (ExecutorManagerException e) {
+ e.printStackTrace();
+ ret.put("error",
+ "Error submitting flow " + exflow.getFlowId() + ". " + e.getMessage());
+ }
+
+ ret.put("execid", exflow.getExecutionId());
+ }
+
+ public class ExecutorVelocityHelper {
+ public String getProjectName(int id) {
+ Project project = projectManager.getProject(id);
+ if (project == null) {
+ return String.valueOf(id);
+ }
+
+ return project.getName();
+ }
+ }
}
src/main/java/azkaban/webapp/servlet/HistoryServlet.java 549(+286 -263)
diff --git a/src/main/java/azkaban/webapp/servlet/HistoryServlet.java b/src/main/java/azkaban/webapp/servlet/HistoryServlet.java
index a46d645..615ec60 100644
--- a/src/main/java/azkaban/webapp/servlet/HistoryServlet.java
+++ b/src/main/java/azkaban/webapp/servlet/HistoryServlet.java
@@ -26,10 +26,8 @@ import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
-
import org.joda.time.format.DateTimeFormat;
-
import azkaban.executor.ExecutableFlow;
import azkaban.executor.ExecutorManagerAdapter;
import azkaban.executor.ExecutorManagerException;
@@ -40,265 +38,290 @@ import azkaban.webapp.session.Session;
public class HistoryServlet extends LoginAbstractAzkabanServlet {
- private static final long serialVersionUID = 1L;
- private ExecutorManagerAdapter executorManager;
- private ProjectManager projectManager;
- private ExecutorVMHelper vmHelper;
-
- @Override
- public void init(ServletConfig config) throws ServletException {
- super.init(config);
- AzkabanWebServer server = (AzkabanWebServer)getApplication();
- executorManager = server.getExecutorManager();
- projectManager = server.getProjectManager();
- vmHelper = new ExecutorVMHelper();
- }
-
- @Override
- protected void handleGet(HttpServletRequest req, HttpServletResponse resp,
- Session session) throws ServletException, IOException {
-
- if (hasParam(req, "ajax")) {
- handleAJAXAction(req, resp, session);
- }
- else if (hasParam(req, "days")) {
- handleHistoryDayPage(req, resp, session);
- }
- else if (hasParam(req, "timeline")) {
- handleHistoryTimelinePage(req, resp, session);
- }
- else {
- handleHistoryPage(req, resp, session);
- }
- }
-
- private void handleAJAXAction(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException, IOException {
- HashMap<String, Object> ret = new HashMap<String, Object>();
- String ajaxName = getParam(req, "ajax");
-
- if (ajaxName.equals("fetch")) {
- fetchHistoryData(req, resp, ret);
- }
-
- if (ret != null) {
- this.writeJSON(resp, ret);
- }
- }
-
- private void fetchHistoryData(HttpServletRequest req, HttpServletResponse resp, HashMap<String, Object> ret) throws ServletException {
-// long start = getLongParam(req, "start");
-// long end = getLongParam(req, "end");
-//
-// ret.put("start", start);
-// ret.put("end", end);
-//
-// List<ExecutionReference> refs = executorManager.getFlowHistory(start, end);
-// ArrayList<Object> refList = new ArrayList<Object>();
-// for (ExecutionReference ref: refs) {
-//
-// HashMap<String,Object> refObj = new HashMap<String,Object>();
-// refObj.put("execId", ref.getExecId());
-// refObj.put("start", ref.getStartTime());
-// refObj.put("end", ref.getEndTime());
-// refObj.put("status", ref.getStatus().toString());
-//
-// refList.add(refObj);
-// }
-//
-// ret.put("data", refList);
- }
-
- private void handleHistoryPage(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException {
- Page page = newPage(req, resp, session, "azkaban/webapp/servlet/velocity/historypage.vm");
- int pageNum = getIntParam(req, "page", 1);
- int pageSize = getIntParam(req, "size", 16);
- page.add("vmutils", vmHelper);
-
- if (pageNum < 0) {
- pageNum = 1;
- }
- List<ExecutableFlow> history = null;
- if(hasParam(req, "advfilter")) {
- String projContain = getParam(req, "projcontain");
- String flowContain = getParam(req, "flowcontain");
- String userContain = getParam(req, "usercontain");
- int status = getIntParam(req, "status");
- String begin = getParam(req, "begin");
- long beginTime = begin == "" ? -1 : DateTimeFormat.forPattern("MM/dd/yyyy-HH:mm").parseDateTime(begin).getMillis();
- String end = getParam(req, "end");
- long endTime = end == "" ? -1 : DateTimeFormat.forPattern("MM/dd/yyyy-HH:mm").parseDateTime(end).getMillis();
- try {
- history = executorManager.getExecutableFlows(projContain, flowContain, userContain, status, beginTime, endTime, (pageNum - 1)*pageSize, pageSize);
- } catch (ExecutorManagerException e) {
- page.add("error", e.getMessage());
- }
- }
- else if(hasParam(req, "search")) {
- String searchTerm = getParam(req, "searchterm");
- try {
- history = executorManager.getExecutableFlows(searchTerm, (pageNum - 1)*pageSize, pageSize);
- } catch (ExecutorManagerException e) {
- // TODO Auto-generated catch block
- page.add("error", e.getMessage());
- }
- }
- else {
- try {
- history = executorManager.getExecutableFlows((pageNum - 1)*pageSize, pageSize);
- } catch (ExecutorManagerException e) {
- e.printStackTrace();
- }
- }
- page.add("flowHistory", history);
- page.add("size", pageSize);
- page.add("page", pageNum);
- //keep the search terms so that we can navigate to later pages
- if(hasParam(req, "searchterm") && !getParam(req, "searchterm").equals("")) {
- page.add("search", "true");
- page.add("search_term", getParam(req, "searchterm"));
- }
-
- if(hasParam(req, "advfilter")) {
- page.add("advfilter", "true");
- page.add("projcontain", getParam(req, "projcontain"));
- page.add("flowcontain", getParam(req, "flowcontain"));
- page.add("usercontain", getParam(req, "usercontain"));
- page.add("status", getIntParam(req, "status"));
- page.add("begin", getParam(req, "begin"));
- page.add("end", getParam(req, "end"));
- }
-// else {
-// page.add("search_term", "");
-// }
-
- if (pageNum == 1) {
- page.add("previous", new PageSelection(1, pageSize, true, false));
- }
- else {
- page.add("previous", new PageSelection(pageNum-1, pageSize, false, false));
- }
- page.add("next", new PageSelection(pageNum + 1, pageSize, false, false));
- // Now for the 5 other values.
- int pageStartValue = 1;
- if (pageNum > 3) {
- pageStartValue = pageNum - 2;
- }
-
- page.add("page1", new PageSelection(pageStartValue, pageSize, false, pageStartValue == pageNum));
- pageStartValue++;
- page.add("page2", new PageSelection(pageStartValue, pageSize, false, pageStartValue == pageNum));
- pageStartValue++;
- page.add("page3", new PageSelection(pageStartValue, pageSize, false, pageStartValue == pageNum));
- pageStartValue++;
- page.add("page4", new PageSelection(pageStartValue, pageSize, false, pageStartValue == pageNum));
- pageStartValue++;
- page.add("page5", new PageSelection(pageStartValue, pageSize, false, pageStartValue == pageNum));
- pageStartValue++;
-
- page.render();
- }
-
- private void handleHistoryTimelinePage(HttpServletRequest req, HttpServletResponse resp, Session session) {
-// Page page = newPage(req, resp, session, "azkaban/webapp/servlet/velocity/historytimelinepage.vm");
-// long currentTime = System.currentTimeMillis();
-// long begin = getLongParam(req, "begin", currentTime - 86400000);
-// long end = getLongParam(req, "end", currentTime);
-//
-// page.add("begin", begin);
-// page.add("end", end);
-//
-// List<ExecutionReference> refs = executorManager.getFlowHistory(begin, end);
-// ArrayList<Object> refList = new ArrayList<Object>();
-// for (ExecutionReference ref: refs) {
-//
-// HashMap<String,Object> refObj = new HashMap<String,Object>();
-// refObj.put("execId", ref.getExecId());
-// refObj.put("start", ref.getStartTime());
-// refObj.put("end", ref.getEndTime());
-// refObj.put("status", ref.getStatus().toString());
-//
-// refList.add(refObj);
-// }
-//
-// page.add("data", JSONUtils.toJSON(refList));
-// page.render();
- }
-
- private void handleHistoryDayPage(HttpServletRequest req, HttpServletResponse resp, Session session) {
-// Page page = newPage(req, resp, session, "azkaban/webapp/servlet/velocity/historydaypage.vm");
-// long currentTime = System.currentTimeMillis();
-// long begin = getLongParam(req, "begin", currentTime - 86400000);
-// long end = getLongParam(req, "end", currentTime);
-//
-// page.add("begin", begin);
-// page.add("end", end);
-//
-// List<ExecutionReference> refs = executorManager.getFlowHistory(begin, end);
-// ArrayList<Object> refList = new ArrayList<Object>();
-// for (ExecutionReference ref: refs) {
-//
-// HashMap<String,Object> refObj = new HashMap<String,Object>();
-// refObj.put("execId", ref.getExecId());
-// refObj.put("start", ref.getStartTime());
-// refObj.put("end", ref.getEndTime());
-// refObj.put("status", ref.getStatus().toString());
-//
-// refList.add(refObj);
-// }
-//
-// page.add("data", JSONUtils.toJSON(refList));
-// page.render();
- }
-
- public class PageSelection {
- private int page;
- private int size;
- private boolean disabled;
- private boolean selected;
-
- public PageSelection(int page, int size, boolean disabled, boolean selected) {
- this.page = page;
- this.size = size;
- this.disabled = disabled;
- this.setSelected(selected);
- }
-
- public int getPage() {
- return page;
- }
-
- public int getSize() {
- return size;
- }
-
- public boolean getDisabled() {
- return disabled;
- }
-
- public boolean isSelected() {
- return selected;
- }
-
- public void setSelected(boolean selected) {
- this.selected = selected;
- }
- }
-
- @Override
- protected void handlePost(HttpServletRequest req, HttpServletResponse resp,
- Session session) throws ServletException, IOException {
- // TODO Auto-generated method stub
-
- }
-
- public class ExecutorVMHelper {
- public String getProjectName(int id) {
- Project project = projectManager.getProject(id);
- if (project == null) {
- return String.valueOf(id);
- }
-
- return project.getName();
- }
- }
+ private static final long serialVersionUID = 1L;
+ private ExecutorManagerAdapter executorManager;
+ private ProjectManager projectManager;
+ private ExecutorVMHelper vmHelper;
+
+ @Override
+ public void init(ServletConfig config) throws ServletException {
+ super.init(config);
+ AzkabanWebServer server = (AzkabanWebServer) getApplication();
+ executorManager = server.getExecutorManager();
+ projectManager = server.getProjectManager();
+ vmHelper = new ExecutorVMHelper();
+ }
+
+ @Override
+ protected void handleGet(HttpServletRequest req, HttpServletResponse resp,
+ Session session) throws ServletException, IOException {
+
+ if (hasParam(req, "ajax")) {
+ handleAJAXAction(req, resp, session);
+ } else if (hasParam(req, "days")) {
+ handleHistoryDayPage(req, resp, session);
+ } else if (hasParam(req, "timeline")) {
+ handleHistoryTimelinePage(req, resp, session);
+ } else {
+ handleHistoryPage(req, resp, session);
+ }
+ }
+
+ private void handleAJAXAction(HttpServletRequest req,
+ HttpServletResponse resp, Session session) throws ServletException,
+ IOException {
+ HashMap<String, Object> ret = new HashMap<String, Object>();
+ String ajaxName = getParam(req, "ajax");
+
+ if (ajaxName.equals("fetch")) {
+ fetchHistoryData(req, resp, ret);
+ }
+
+ if (ret != null) {
+ this.writeJSON(resp, ret);
+ }
+ }
+
+ private void fetchHistoryData(HttpServletRequest req,
+ HttpServletResponse resp, HashMap<String, Object> ret)
+ throws ServletException {
+ // long start = getLongParam(req, "start");
+ // long end = getLongParam(req, "end");
+ //
+ // ret.put("start", start);
+ // ret.put("end", end);
+ //
+ // List<ExecutionReference> refs = executorManager.getFlowHistory(start,
+ // end);
+ // ArrayList<Object> refList = new ArrayList<Object>();
+ // for (ExecutionReference ref: refs) {
+ //
+ // HashMap<String,Object> refObj = new HashMap<String,Object>();
+ // refObj.put("execId", ref.getExecId());
+ // refObj.put("start", ref.getStartTime());
+ // refObj.put("end", ref.getEndTime());
+ // refObj.put("status", ref.getStatus().toString());
+ //
+ // refList.add(refObj);
+ // }
+ //
+ // ret.put("data", refList);
+ }
+
+ private void handleHistoryPage(HttpServletRequest req,
+ HttpServletResponse resp, Session session) throws ServletException {
+ Page page =
+ newPage(req, resp, session,
+ "azkaban/webapp/servlet/velocity/historypage.vm");
+ int pageNum = getIntParam(req, "page", 1);
+ int pageSize = getIntParam(req, "size", 16);
+ page.add("vmutils", vmHelper);
+
+ if (pageNum < 0) {
+ pageNum = 1;
+ }
+ List<ExecutableFlow> history = null;
+ if (hasParam(req, "advfilter")) {
+ String projContain = getParam(req, "projcontain");
+ String flowContain = getParam(req, "flowcontain");
+ String userContain = getParam(req, "usercontain");
+ int status = getIntParam(req, "status");
+ String begin = getParam(req, "begin");
+ long beginTime =
+ begin == "" ? -1 : DateTimeFormat.forPattern("MM/dd/yyyy-HH:mm")
+ .parseDateTime(begin).getMillis();
+ String end = getParam(req, "end");
+ long endTime =
+ end == "" ? -1 : DateTimeFormat.forPattern("MM/dd/yyyy-HH:mm")
+ .parseDateTime(end).getMillis();
+ try {
+ history =
+ executorManager.getExecutableFlows(projContain, flowContain,
+ userContain, status, beginTime, endTime, (pageNum - 1)
+ * pageSize, pageSize);
+ } catch (ExecutorManagerException e) {
+ page.add("error", e.getMessage());
+ }
+ } else if (hasParam(req, "search")) {
+ String searchTerm = getParam(req, "searchterm");
+ try {
+ history =
+ executorManager.getExecutableFlows(searchTerm, (pageNum - 1)
+ * pageSize, pageSize);
+ } catch (ExecutorManagerException e) {
+ // TODO Auto-generated catch block
+ page.add("error", e.getMessage());
+ }
+ } else {
+ try {
+ history =
+ executorManager.getExecutableFlows((pageNum - 1) * pageSize,
+ pageSize);
+ } catch (ExecutorManagerException e) {
+ e.printStackTrace();
+ }
+ }
+ page.add("flowHistory", history);
+ page.add("size", pageSize);
+ page.add("page", pageNum);
+ // keep the search terms so that we can navigate to later pages
+ if (hasParam(req, "searchterm") && !getParam(req, "searchterm").equals("")) {
+ page.add("search", "true");
+ page.add("search_term", getParam(req, "searchterm"));
+ }
+
+ if (hasParam(req, "advfilter")) {
+ page.add("advfilter", "true");
+ page.add("projcontain", getParam(req, "projcontain"));
+ page.add("flowcontain", getParam(req, "flowcontain"));
+ page.add("usercontain", getParam(req, "usercontain"));
+ page.add("status", getIntParam(req, "status"));
+ page.add("begin", getParam(req, "begin"));
+ page.add("end", getParam(req, "end"));
+ }
+ // else {
+ // page.add("search_term", "");
+ // }
+
+ if (pageNum == 1) {
+ page.add("previous", new PageSelection(1, pageSize, true, false));
+ } else {
+ page.add("previous", new PageSelection(pageNum - 1, pageSize, false,
+ false));
+ }
+ page.add("next", new PageSelection(pageNum + 1, pageSize, false, false));
+ // Now for the 5 other values.
+ int pageStartValue = 1;
+ if (pageNum > 3) {
+ pageStartValue = pageNum - 2;
+ }
+
+ page.add("page1", new PageSelection(pageStartValue, pageSize, false,
+ pageStartValue == pageNum));
+ pageStartValue++;
+ page.add("page2", new PageSelection(pageStartValue, pageSize, false,
+ pageStartValue == pageNum));
+ pageStartValue++;
+ page.add("page3", new PageSelection(pageStartValue, pageSize, false,
+ pageStartValue == pageNum));
+ pageStartValue++;
+ page.add("page4", new PageSelection(pageStartValue, pageSize, false,
+ pageStartValue == pageNum));
+ pageStartValue++;
+ page.add("page5", new PageSelection(pageStartValue, pageSize, false,
+ pageStartValue == pageNum));
+ pageStartValue++;
+
+ page.render();
+ }
+
+ private void handleHistoryTimelinePage(HttpServletRequest req,
+ HttpServletResponse resp, Session session) {
+ // Page page = newPage(req, resp, session,
+ // "azkaban/webapp/servlet/velocity/historytimelinepage.vm");
+ // long currentTime = System.currentTimeMillis();
+ // long begin = getLongParam(req, "begin", currentTime - 86400000);
+ // long end = getLongParam(req, "end", currentTime);
+ //
+ // page.add("begin", begin);
+ // page.add("end", end);
+ //
+ // List<ExecutionReference> refs = executorManager.getFlowHistory(begin,
+ // end);
+ // ArrayList<Object> refList = new ArrayList<Object>();
+ // for (ExecutionReference ref: refs) {
+ //
+ // HashMap<String,Object> refObj = new HashMap<String,Object>();
+ // refObj.put("execId", ref.getExecId());
+ // refObj.put("start", ref.getStartTime());
+ // refObj.put("end", ref.getEndTime());
+ // refObj.put("status", ref.getStatus().toString());
+ //
+ // refList.add(refObj);
+ // }
+ //
+ // page.add("data", JSONUtils.toJSON(refList));
+ // page.render();
+ }
+
+ private void handleHistoryDayPage(HttpServletRequest req,
+ HttpServletResponse resp, Session session) {
+ // Page page = newPage(req, resp, session,
+ // "azkaban/webapp/servlet/velocity/historydaypage.vm");
+ // long currentTime = System.currentTimeMillis();
+ // long begin = getLongParam(req, "begin", currentTime - 86400000);
+ // long end = getLongParam(req, "end", currentTime);
+ //
+ // page.add("begin", begin);
+ // page.add("end", end);
+ //
+ // List<ExecutionReference> refs = executorManager.getFlowHistory(begin,
+ // end);
+ // ArrayList<Object> refList = new ArrayList<Object>();
+ // for (ExecutionReference ref: refs) {
+ //
+ // HashMap<String,Object> refObj = new HashMap<String,Object>();
+ // refObj.put("execId", ref.getExecId());
+ // refObj.put("start", ref.getStartTime());
+ // refObj.put("end", ref.getEndTime());
+ // refObj.put("status", ref.getStatus().toString());
+ //
+ // refList.add(refObj);
+ // }
+ //
+ // page.add("data", JSONUtils.toJSON(refList));
+ // page.render();
+ }
+
+ public class PageSelection {
+ private int page;
+ private int size;
+ private boolean disabled;
+ private boolean selected;
+
+ public PageSelection(int page, int size, boolean disabled, boolean selected) {
+ this.page = page;
+ this.size = size;
+ this.disabled = disabled;
+ this.setSelected(selected);
+ }
+
+ public int getPage() {
+ return page;
+ }
+
+ public int getSize() {
+ return size;
+ }
+
+ public boolean getDisabled() {
+ return disabled;
+ }
+
+ public boolean isSelected() {
+ return selected;
+ }
+
+ public void setSelected(boolean selected) {
+ this.selected = selected;
+ }
+ }
+
+ @Override
+ protected void handlePost(HttpServletRequest req, HttpServletResponse resp,
+ Session session) throws ServletException, IOException {
+ // TODO Auto-generated method stub
+
+ }
+
+ public class ExecutorVMHelper {
+ public String getProjectName(int id) {
+ Project project = projectManager.getProject(id);
+ if (project == null) {
+ return String.valueOf(id);
+ }
+
+ return project.getName();
+ }
+ }
}
src/main/java/azkaban/webapp/servlet/HttpRequestUtils.java 419(+214 -205)
diff --git a/src/main/java/azkaban/webapp/servlet/HttpRequestUtils.java b/src/main/java/azkaban/webapp/servlet/HttpRequestUtils.java
index 6da9d88..77f62ed 100644
--- a/src/main/java/azkaban/webapp/servlet/HttpRequestUtils.java
+++ b/src/main/java/azkaban/webapp/servlet/HttpRequestUtils.java
@@ -31,209 +31,218 @@ import azkaban.executor.mail.DefaultMailCreator;
import azkaban.utils.JSONUtils;
public class HttpRequestUtils {
- public static ExecutionOptions parseFlowOptions(HttpServletRequest req) throws ServletException {
- ExecutionOptions execOptions = new ExecutionOptions();
-
- if (hasParam(req, "failureAction")) {
- String option = getParam(req, "failureAction");
- if (option.equals("finishCurrent") ) {
- execOptions.setFailureAction(FailureAction.FINISH_CURRENTLY_RUNNING);
- }
- else if (option.equals("cancelImmediately")) {
- execOptions.setFailureAction(FailureAction.CANCEL_ALL);
- }
- else if (option.equals("finishPossible")) {
- execOptions.setFailureAction(FailureAction.FINISH_ALL_POSSIBLE);
- }
- }
-
- if (hasParam(req, "failureEmailsOverride")) {
- boolean override = getBooleanParam(req, "failureEmailsOverride", false);
- execOptions.setFailureEmailsOverridden(override);
- }
- if (hasParam(req, "successEmailsOverride")) {
- boolean override = getBooleanParam(req, "successEmailsOverride", false);
- execOptions.setSuccessEmailsOverridden(override);
- }
-
- if (hasParam(req, "failureEmails")) {
- String emails = getParam(req, "failureEmails");
- if (!emails.isEmpty()) {
- String[] emailSplit = emails.split("\\s*,\\s*|\\s*;\\s*|\\s+");
- execOptions.setFailureEmails(Arrays.asList(emailSplit));
- }
- }
- if (hasParam(req, "successEmails")) {
- String emails = getParam(req, "successEmails");
- if (!emails.isEmpty()) {
- String[] emailSplit = emails.split("\\s*,\\s*|\\s*;\\s*|\\s+");
- execOptions.setSuccessEmails(Arrays.asList(emailSplit));
- }
- }
- if (hasParam(req, "notifyFailureFirst")) {
- execOptions.setNotifyOnFirstFailure(Boolean.parseBoolean(getParam(req, "notifyFailureFirst")));
- }
- if (hasParam(req, "notifyFailureLast")) {
- execOptions.setNotifyOnLastFailure(Boolean.parseBoolean(getParam(req, "notifyFailureLast")));
- }
-
- String concurrentOption = "skip";
- if (hasParam(req, "concurrentOption")) {
- concurrentOption = getParam(req, "concurrentOption");
- execOptions.setConcurrentOption(concurrentOption);
- if (concurrentOption.equals("pipeline")) {
- int pipelineLevel = getIntParam(req, "pipelineLevel");
- execOptions.setPipelineLevel(pipelineLevel);
- }
- else if (concurrentOption.equals("queue")) {
- // Not yet implemented
- int queueLevel = getIntParam(req, "queueLevel", 1);
- execOptions.setPipelineLevel(queueLevel);
- }
- }
- String mailCreator = DefaultMailCreator.DEFAULT_MAIL_CREATOR;
- if (hasParam(req, "mailCreator")) {
- mailCreator = getParam(req, "mailCreator");
- execOptions.setMailCreator(mailCreator);
- }
-
- Map<String, String> flowParamGroup = getParamGroup(req, "flowOverride");
- execOptions.addAllFlowParameters(flowParamGroup);
-
- if (hasParam(req, "disabled")) {
- String disabled = getParam(req, "disabled");
- if (!disabled.isEmpty()) {
- @SuppressWarnings("unchecked")
- List<Object> disabledList = (List<Object>)JSONUtils.parseJSONFromStringQuiet(disabled);
- execOptions.setDisabledJobs(disabledList);
- }
- }
- return execOptions;
- }
-
- /**
- * Checks for the existance of the parameter in the request
- *
- * @param request
- * @param param
- * @return
- */
- public static boolean hasParam(HttpServletRequest request, String param) {
- return request.getParameter(param) != null;
- }
-
- /**
- * Retrieves the param from the http servlet request. Will throw an
- * exception if not found
- *
- * @param request
- * @param name
- * @return
- * @throws ServletException
- */
- public static String getParam(HttpServletRequest request, String name) throws ServletException {
- String p = request.getParameter(name);
- if (p == null) {
- throw new ServletException("Missing required parameter '" + name + "'.");
- }
- else {
- return p;
- }
- }
-
- /**
- * Retrieves the param from the http servlet request.
- *
- * @param request
- * @param name
- * @param default
- *
- * @return
- */
- public static String getParam(HttpServletRequest request, String name, String defaultVal){
- String p = request.getParameter(name);
- if (p == null) {
- return defaultVal;
- }
- return p;
- }
-
-
- /**
- * Returns the param and parses it into an int. Will throw an exception if
- * not found, or a parse error if the type is incorrect.
- *
- * @param request
- * @param name
- * @return
- * @throws ServletException
- */
- public static int getIntParam(HttpServletRequest request, String name) throws ServletException {
- String p = getParam(request, name);
- return Integer.parseInt(p);
- }
-
- public static int getIntParam(HttpServletRequest request, String name, int defaultVal) {
- if (hasParam(request, name)) {
- try {
- return getIntParam(request, name);
- } catch (Exception e) {
- return defaultVal;
- }
- }
-
- return defaultVal;
- }
-
- public static boolean getBooleanParam(HttpServletRequest request, String name) throws ServletException {
- String p = getParam(request, name);
- return Boolean.parseBoolean(p);
- }
-
- public static boolean getBooleanParam(HttpServletRequest request, String name, boolean defaultVal) {
- if (hasParam(request, name)) {
- try {
- return getBooleanParam(request, name);
- } catch (Exception e) {
- return defaultVal;
- }
- }
-
- return defaultVal;
- }
-
- public static long getLongParam(HttpServletRequest request, String name) throws ServletException {
- String p = getParam(request, name);
- return Long.valueOf(p);
- }
-
- public static long getLongParam(HttpServletRequest request, String name, long defaultVal) {
- if (hasParam(request, name)) {
- try {
- return getLongParam(request, name);
- } catch (Exception e) {
- return defaultVal;
- }
- }
-
- return defaultVal;
- }
-
-
- public static Map<String, String> getParamGroup(HttpServletRequest request, String groupName) throws ServletException {
- @SuppressWarnings("unchecked")
- Enumeration<Object> enumerate = (Enumeration<Object>)request.getParameterNames();
- String matchString = groupName + "[";
-
- HashMap<String, String> groupParam = new HashMap<String, String>();
- while( enumerate.hasMoreElements() ) {
- String str = (String)enumerate.nextElement();
- if (str.startsWith(matchString)) {
- groupParam.put(str.substring(matchString.length(), str.length() - 1), request.getParameter(str));
- }
-
- }
- return groupParam;
- }
-
+ public static ExecutionOptions parseFlowOptions(HttpServletRequest req)
+ throws ServletException {
+ ExecutionOptions execOptions = new ExecutionOptions();
+
+ if (hasParam(req, "failureAction")) {
+ String option = getParam(req, "failureAction");
+ if (option.equals("finishCurrent")) {
+ execOptions.setFailureAction(FailureAction.FINISH_CURRENTLY_RUNNING);
+ } else if (option.equals("cancelImmediately")) {
+ execOptions.setFailureAction(FailureAction.CANCEL_ALL);
+ } else if (option.equals("finishPossible")) {
+ execOptions.setFailureAction(FailureAction.FINISH_ALL_POSSIBLE);
+ }
+ }
+
+ if (hasParam(req, "failureEmailsOverride")) {
+ boolean override = getBooleanParam(req, "failureEmailsOverride", false);
+ execOptions.setFailureEmailsOverridden(override);
+ }
+ if (hasParam(req, "successEmailsOverride")) {
+ boolean override = getBooleanParam(req, "successEmailsOverride", false);
+ execOptions.setSuccessEmailsOverridden(override);
+ }
+
+ if (hasParam(req, "failureEmails")) {
+ String emails = getParam(req, "failureEmails");
+ if (!emails.isEmpty()) {
+ String[] emailSplit = emails.split("\\s*,\\s*|\\s*;\\s*|\\s+");
+ execOptions.setFailureEmails(Arrays.asList(emailSplit));
+ }
+ }
+ if (hasParam(req, "successEmails")) {
+ String emails = getParam(req, "successEmails");
+ if (!emails.isEmpty()) {
+ String[] emailSplit = emails.split("\\s*,\\s*|\\s*;\\s*|\\s+");
+ execOptions.setSuccessEmails(Arrays.asList(emailSplit));
+ }
+ }
+ if (hasParam(req, "notifyFailureFirst")) {
+ execOptions.setNotifyOnFirstFailure(Boolean.parseBoolean(getParam(req,
+ "notifyFailureFirst")));
+ }
+ if (hasParam(req, "notifyFailureLast")) {
+ execOptions.setNotifyOnLastFailure(Boolean.parseBoolean(getParam(req,
+ "notifyFailureLast")));
+ }
+
+ String concurrentOption = "skip";
+ if (hasParam(req, "concurrentOption")) {
+ concurrentOption = getParam(req, "concurrentOption");
+ execOptions.setConcurrentOption(concurrentOption);
+ if (concurrentOption.equals("pipeline")) {
+ int pipelineLevel = getIntParam(req, "pipelineLevel");
+ execOptions.setPipelineLevel(pipelineLevel);
+ } else if (concurrentOption.equals("queue")) {
+ // Not yet implemented
+ int queueLevel = getIntParam(req, "queueLevel", 1);
+ execOptions.setPipelineLevel(queueLevel);
+ }
+ }
+ String mailCreator = DefaultMailCreator.DEFAULT_MAIL_CREATOR;
+ if (hasParam(req, "mailCreator")) {
+ mailCreator = getParam(req, "mailCreator");
+ execOptions.setMailCreator(mailCreator);
+ }
+
+ Map<String, String> flowParamGroup = getParamGroup(req, "flowOverride");
+ execOptions.addAllFlowParameters(flowParamGroup);
+
+ if (hasParam(req, "disabled")) {
+ String disabled = getParam(req, "disabled");
+ if (!disabled.isEmpty()) {
+ @SuppressWarnings("unchecked")
+ List<Object> disabledList =
+ (List<Object>) JSONUtils.parseJSONFromStringQuiet(disabled);
+ execOptions.setDisabledJobs(disabledList);
+ }
+ }
+ return execOptions;
+ }
+
+ /**
+ * Checks for the existance of the parameter in the request
+ *
+ * @param request
+ * @param param
+ * @return
+ */
+ public static boolean hasParam(HttpServletRequest request, String param) {
+ return request.getParameter(param) != null;
+ }
+
+ /**
+ * Retrieves the param from the http servlet request. Will throw an exception
+ * if not found
+ *
+ * @param request
+ * @param name
+ * @return
+ * @throws ServletException
+ */
+ public static String getParam(HttpServletRequest request, String name)
+ throws ServletException {
+ String p = request.getParameter(name);
+ if (p == null) {
+ throw new ServletException("Missing required parameter '" + name + "'.");
+ } else {
+ return p;
+ }
+ }
+
+ /**
+ * Retrieves the param from the http servlet request.
+ *
+ * @param request
+ * @param name
+ * @param default
+ *
+ * @return
+ */
+ public static String getParam(HttpServletRequest request, String name,
+ String defaultVal) {
+ String p = request.getParameter(name);
+ if (p == null) {
+ return defaultVal;
+ }
+ return p;
+ }
+
+ /**
+ * Returns the param and parses it into an int. Will throw an exception if not
+ * found, or a parse error if the type is incorrect.
+ *
+ * @param request
+ * @param name
+ * @return
+ * @throws ServletException
+ */
+ public static int getIntParam(HttpServletRequest request, String name)
+ throws ServletException {
+ String p = getParam(request, name);
+ return Integer.parseInt(p);
+ }
+
+ public static int getIntParam(HttpServletRequest request, String name,
+ int defaultVal) {
+ if (hasParam(request, name)) {
+ try {
+ return getIntParam(request, name);
+ } catch (Exception e) {
+ return defaultVal;
+ }
+ }
+
+ return defaultVal;
+ }
+
+ public static boolean getBooleanParam(HttpServletRequest request, String name)
+ throws ServletException {
+ String p = getParam(request, name);
+ return Boolean.parseBoolean(p);
+ }
+
+ public static boolean getBooleanParam(HttpServletRequest request,
+ String name, boolean defaultVal) {
+ if (hasParam(request, name)) {
+ try {
+ return getBooleanParam(request, name);
+ } catch (Exception e) {
+ return defaultVal;
+ }
+ }
+
+ return defaultVal;
+ }
+
+ public static long getLongParam(HttpServletRequest request, String name)
+ throws ServletException {
+ String p = getParam(request, name);
+ return Long.valueOf(p);
+ }
+
+ public static long getLongParam(HttpServletRequest request, String name,
+ long defaultVal) {
+ if (hasParam(request, name)) {
+ try {
+ return getLongParam(request, name);
+ } catch (Exception e) {
+ return defaultVal;
+ }
+ }
+
+ return defaultVal;
+ }
+
+ public static Map<String, String> getParamGroup(HttpServletRequest request,
+ String groupName) throws ServletException {
+ @SuppressWarnings("unchecked")
+ Enumeration<Object> enumerate =
+ (Enumeration<Object>) request.getParameterNames();
+ String matchString = groupName + "[";
+
+ HashMap<String, String> groupParam = new HashMap<String, String>();
+ while (enumerate.hasMoreElements()) {
+ String str = (String) enumerate.nextElement();
+ if (str.startsWith(matchString)) {
+ groupParam.put(str.substring(matchString.length(), str.length() - 1),
+ request.getParameter(str));
+ }
+
+ }
+ return groupParam;
+ }
+
}
diff --git a/src/main/java/azkaban/webapp/servlet/IndexRedirectServlet.java b/src/main/java/azkaban/webapp/servlet/IndexRedirectServlet.java
index f788b40..8cf31af 100644
--- a/src/main/java/azkaban/webapp/servlet/IndexRedirectServlet.java
+++ b/src/main/java/azkaban/webapp/servlet/IndexRedirectServlet.java
@@ -28,23 +28,26 @@ import azkaban.webapp.session.Session;
* The main page
*/
public class IndexRedirectServlet extends LoginAbstractAzkabanServlet {
- private static final long serialVersionUID = -1;
- private String defaultServletPath;
-
- public IndexRedirectServlet(String defaultServletPath) {
- this.defaultServletPath = defaultServletPath;
- if (this.defaultServletPath.isEmpty() || this.defaultServletPath.equals("/")) {
- this.defaultServletPath = "/index";
- }
- }
-
- @Override
- protected void handleGet(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException, IOException {
- resp.sendRedirect(defaultServletPath);
- }
-
- @Override
- protected void handlePost(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException, IOException {
- resp.sendRedirect(defaultServletPath);
- }
+ private static final long serialVersionUID = -1;
+ private String defaultServletPath;
+
+ public IndexRedirectServlet(String defaultServletPath) {
+ this.defaultServletPath = defaultServletPath;
+ if (this.defaultServletPath.isEmpty()
+ || this.defaultServletPath.equals("/")) {
+ this.defaultServletPath = "/index";
+ }
+ }
+
+ @Override
+ protected void handleGet(HttpServletRequest req, HttpServletResponse resp,
+ Session session) throws ServletException, IOException {
+ resp.sendRedirect(defaultServletPath);
+ }
+
+ @Override
+ protected void handlePost(HttpServletRequest req, HttpServletResponse resp,
+ Session session) throws ServletException, IOException {
+ resp.sendRedirect(defaultServletPath);
+ }
}
src/main/java/azkaban/webapp/servlet/JMXHttpServlet.java 444(+228 -216)
diff --git a/src/main/java/azkaban/webapp/servlet/JMXHttpServlet.java b/src/main/java/azkaban/webapp/servlet/JMXHttpServlet.java
index 616cc76..287e466 100644
--- a/src/main/java/azkaban/webapp/servlet/JMXHttpServlet.java
+++ b/src/main/java/azkaban/webapp/servlet/JMXHttpServlet.java
@@ -43,222 +43,234 @@ import azkaban.webapp.session.Session;
/**
* Limited set of jmx calls for when you cannot attach to the jvm
*/
-public class JMXHttpServlet extends LoginAbstractAzkabanServlet implements ConnectorParams {
- /**
+public class JMXHttpServlet extends LoginAbstractAzkabanServlet implements
+ ConnectorParams {
+ /**
*
*/
- private static final long serialVersionUID = 1L;
-
- private static final Logger logger = Logger.getLogger(JMXHttpServlet.class.getName());
-
- private UserManager userManager;
- private AzkabanWebServer server;
- private ExecutorManagerAdapter executorManager;
- private TriggerManager triggerManager;
-
- @Override
- public void init(ServletConfig config) throws ServletException {
- super.init(config);
-
- server = (AzkabanWebServer)getApplication();
- userManager = server.getUserManager();
- executorManager = server.getExecutorManager();
-
- triggerManager = server.getTriggerManager();
- }
-
- @Override
- protected void handleGet(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException, IOException {
- if (hasParam(req, "ajax")){
- Map<String,Object> ret = new HashMap<String,Object>();
-
- if(!hasPermission(session.getUser(), Permission.Type.METRICS)) {
- ret.put("error", "User " + session.getUser().getUserId() + " has no permission.");
- this.writeJSON(resp, ret, true);
- return;
- }
- String ajax = getParam(req, "ajax");
- if (JMX_GET_ALL_EXECUTOR_ATTRIBUTES.equals(ajax)) {
- if (!hasParam(req, JMX_MBEAN) || !hasParam(req, JMX_HOSTPORT)) {
- ret.put("error", "Parameters '" + JMX_MBEAN + "' and '"+ JMX_HOSTPORT +"' must be set");
- this.writeJSON(resp, ret, true);
- return;
- }
-
- String hostPort = getParam(req, JMX_HOSTPORT);
- String mbean = getParam(req, JMX_MBEAN);
- Map<String, Object> result = executorManager.callExecutorJMX(hostPort, JMX_GET_ALL_MBEAN_ATTRIBUTES, mbean);
- ret = result;
- }
-// else
-// if (TriggerConnectorParams.JMX_GET_ALL_TRIGGER_SERVER_ATTRIBUTES.equals(ajax)) {
-// if(!hasParam(req, JMX_MBEAN) || !hasParam(req, JMX_HOSTPORT)) {
-// ret.put("error", "Parameters '" + JMX_MBEAN + "' and '"+ JMX_HOSTPORT +"' must be set");
-// this.writeJSON(resp, ret, true);
-// return;
-// }
-//// String hostPort = getParam(req, JMX_HOSTPORT);
-//// String mbean = getParam(req, JMX_MBEAN);
-// ret = triggerManager.getJMX().getAllJMXMbeans();
-// }
- else if (JMX_GET_MBEANS.equals(ajax)) {
- ret.put("mbeans", server.getMbeanNames());
- }
- else if (JMX_GET_MBEAN_INFO.equals(ajax)) {
- if (hasParam(req, JMX_MBEAN)) {
- String mbeanName = getParam(req, JMX_MBEAN);
- try {
- ObjectName name = new ObjectName(mbeanName);
- MBeanInfo info = server.getMBeanInfo(name);
- ret.put("attributes", info.getAttributes());
- ret.put("description", info.getDescription());
- } catch (Exception e) {
- logger.error(e);
- ret.put("error", "'" + mbeanName + "' is not a valid mBean name");
- }
- }
- else {
- ret.put("error", "No 'mbean' name parameter specified" );
- }
- }
- else if (JMX_GET_MBEAN_ATTRIBUTE.equals(ajax)) {
- if (!hasParam(req, JMX_MBEAN) || !hasParam(req, JMX_ATTRIBUTE)) {
- ret.put("error", "Parameters 'mbean' and 'attribute' must be set");
- }
- else {
- String mbeanName = getParam(req, JMX_MBEAN);
- String attribute = getParam(req, JMX_ATTRIBUTE);
-
- try {
- ObjectName name = new ObjectName(mbeanName);
- Object obj = server.getMBeanAttribute(name, attribute);
- ret.put("value", obj);
- } catch (Exception e) {
- logger.error(e);
- ret.put("error", "'" + mbeanName + "' is not a valid mBean name");
- }
- }
- }
- else if (JMX_GET_ALL_MBEAN_ATTRIBUTES.equals(ajax)) {
- if (!hasParam(req, JMX_MBEAN)) {
- ret.put("error", "Parameters 'mbean' must be set");
- }
- else {
- String mbeanName = getParam(req, JMX_MBEAN);
- try {
- ObjectName name = new ObjectName(mbeanName);
- MBeanInfo info = server.getMBeanInfo(name);
-
- MBeanAttributeInfo[] mbeanAttrs = info.getAttributes();
- HashMap<String, Object> attributes = new HashMap<String,Object>();
-
- for (MBeanAttributeInfo attrInfo: mbeanAttrs) {
- Object obj = server.getMBeanAttribute(name, attrInfo.getName());
- attributes.put(attrInfo.getName(), obj);
- }
-
- ret.put("attributes", attributes);
- } catch (Exception e) {
- logger.error(e);
- ret.put("error", "'" + mbeanName + "' is not a valid mBean name");
- }
- }
- }
- else {
- ret.put("commands", new String[] {
- JMX_GET_MBEANS,
- JMX_GET_MBEAN_INFO+"&"+JMX_MBEAN+"=<name>",
- JMX_GET_MBEAN_ATTRIBUTE+"&"+JMX_MBEAN+"=<name>&"+JMX_ATTRIBUTE+"=<attributename>"}
- );
- }
- this.writeJSON(resp, ret, true);
- }
- else {
- handleJMXPage(req, resp, session);
- }
- }
-
- private void handleJMXPage(HttpServletRequest req, HttpServletResponse resp, Session session) throws IOException {
- Page page = newPage(req, resp, session, "azkaban/webapp/servlet/velocity/jmxpage.vm");
-
- if(!hasPermission(session.getUser(), Permission.Type.METRICS)) {
- page.add("errorMsg", "User " + session.getUser().getUserId() + " has no permission.");
- page.render();
- return;
- }
-
- page.add("mbeans", server.getMbeanNames());
-
- Map<String, Object> executorMBeans = new HashMap<String,Object>();
-// Set<String> primaryServerHosts = executorManager.getPrimaryServerHosts();
- for (String hostPort: executorManager.getAllActiveExecutorServerHosts()) {
- try {
- Map<String, Object> mbeans = executorManager.callExecutorJMX(hostPort, JMX_GET_MBEANS, null);
-
- executorMBeans.put(hostPort, mbeans.get("mbeans"));
-// if (primaryServerHosts.contains(hostPort)) {
-// executorMBeans.put(hostPort, mbeans.get("mbeans"));
-// }
-// else {
-// executorMBeans.put(hostPort, mbeans.get("mbeans"));
-// }
- }
- catch (IOException e) {
- logger.error("Cannot contact executor " + hostPort, e);
- }
- }
-
- page.add("executorRemoteMBeans", executorMBeans);
-
- Map<String, Object> triggerserverMBeans = new HashMap<String,Object>();
-// Set<String> primaryTriggerServerHosts = triggerManager.getPrimaryServerHosts();
-// for (String hostPort: triggerManager.getAllActiveTriggerServerHosts()) {
-// try {
-// Map<String, Object> mbeans = triggerManager.callTriggerServerJMX(hostPort, TriggerConnectorParams.JMX_GET_MBEANS, null);
-//
-// if (primaryTriggerServerHosts.contains(hostPort)) {
-// triggerserverMBeans.put(hostPort, mbeans.get("mbeans"));
-// }
-// else {
-// triggerserverMBeans.put(hostPort, mbeans.get("mbeans"));
-// }
-// }
-// catch (IOException e) {
-// logger.error("Cannot contact executor " + hostPort, e);
-// }
-// }
- triggerserverMBeans.put(triggerManager.getJMX().getPrimaryServerHost(), triggerManager.getJMX().getAllJMXMbeans());
-
- page.add("triggerserverRemoteMBeans", triggerserverMBeans);
-
- page.render();
- }
-
- @Override
- protected void handlePost(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException, IOException {
-
- }
-
-// private boolean hasAdminRole(User user) {
-// for(String roleName: user.getRoles()) {
-// Role role = userManager.getRole(roleName);
-// Permission perm = role.getPermission();
-// if (perm.isPermissionSet(Permission.Type.ADMIN)) {
-// return true;
-// }
-// }
-//
-// return false;
-// }
-
- protected boolean hasPermission(User user, Permission.Type type) {
- for(String roleName: user.getRoles()) {
- Role role = userManager.getRole(roleName);
- if (role.getPermission().isPermissionSet(type) || role.getPermission().isPermissionSet(Permission.Type.ADMIN)) {
- return true;
- }
- }
-
- return false;
- }
+ private static final long serialVersionUID = 1L;
+
+ private static final Logger logger = Logger.getLogger(JMXHttpServlet.class
+ .getName());
+
+ private UserManager userManager;
+ private AzkabanWebServer server;
+ private ExecutorManagerAdapter executorManager;
+ private TriggerManager triggerManager;
+
+ @Override
+ public void init(ServletConfig config) throws ServletException {
+ super.init(config);
+
+ server = (AzkabanWebServer) getApplication();
+ userManager = server.getUserManager();
+ executorManager = server.getExecutorManager();
+
+ triggerManager = server.getTriggerManager();
+ }
+
+ @Override
+ protected void handleGet(HttpServletRequest req, HttpServletResponse resp,
+ Session session) throws ServletException, IOException {
+ if (hasParam(req, "ajax")) {
+ Map<String, Object> ret = new HashMap<String, Object>();
+
+ if (!hasPermission(session.getUser(), Permission.Type.METRICS)) {
+ ret.put("error", "User " + session.getUser().getUserId()
+ + " has no permission.");
+ this.writeJSON(resp, ret, true);
+ return;
+ }
+ String ajax = getParam(req, "ajax");
+ if (JMX_GET_ALL_EXECUTOR_ATTRIBUTES.equals(ajax)) {
+ if (!hasParam(req, JMX_MBEAN) || !hasParam(req, JMX_HOSTPORT)) {
+ ret.put("error", "Parameters '" + JMX_MBEAN + "' and '"
+ + JMX_HOSTPORT + "' must be set");
+ this.writeJSON(resp, ret, true);
+ return;
+ }
+
+ String hostPort = getParam(req, JMX_HOSTPORT);
+ String mbean = getParam(req, JMX_MBEAN);
+ Map<String, Object> result =
+ executorManager.callExecutorJMX(hostPort,
+ JMX_GET_ALL_MBEAN_ATTRIBUTES, mbean);
+ ret = result;
+ }
+ // else
+ // if
+ // (TriggerConnectorParams.JMX_GET_ALL_TRIGGER_SERVER_ATTRIBUTES.equals(ajax))
+ // {
+ // if(!hasParam(req, JMX_MBEAN) || !hasParam(req, JMX_HOSTPORT)) {
+ // ret.put("error", "Parameters '" + JMX_MBEAN + "' and '"+ JMX_HOSTPORT
+ // +"' must be set");
+ // this.writeJSON(resp, ret, true);
+ // return;
+ // }
+ // // String hostPort = getParam(req, JMX_HOSTPORT);
+ // // String mbean = getParam(req, JMX_MBEAN);
+ // ret = triggerManager.getJMX().getAllJMXMbeans();
+ // }
+ else if (JMX_GET_MBEANS.equals(ajax)) {
+ ret.put("mbeans", server.getMbeanNames());
+ } else if (JMX_GET_MBEAN_INFO.equals(ajax)) {
+ if (hasParam(req, JMX_MBEAN)) {
+ String mbeanName = getParam(req, JMX_MBEAN);
+ try {
+ ObjectName name = new ObjectName(mbeanName);
+ MBeanInfo info = server.getMBeanInfo(name);
+ ret.put("attributes", info.getAttributes());
+ ret.put("description", info.getDescription());
+ } catch (Exception e) {
+ logger.error(e);
+ ret.put("error", "'" + mbeanName + "' is not a valid mBean name");
+ }
+ } else {
+ ret.put("error", "No 'mbean' name parameter specified");
+ }
+ } else if (JMX_GET_MBEAN_ATTRIBUTE.equals(ajax)) {
+ if (!hasParam(req, JMX_MBEAN) || !hasParam(req, JMX_ATTRIBUTE)) {
+ ret.put("error", "Parameters 'mbean' and 'attribute' must be set");
+ } else {
+ String mbeanName = getParam(req, JMX_MBEAN);
+ String attribute = getParam(req, JMX_ATTRIBUTE);
+
+ try {
+ ObjectName name = new ObjectName(mbeanName);
+ Object obj = server.getMBeanAttribute(name, attribute);
+ ret.put("value", obj);
+ } catch (Exception e) {
+ logger.error(e);
+ ret.put("error", "'" + mbeanName + "' is not a valid mBean name");
+ }
+ }
+ } else if (JMX_GET_ALL_MBEAN_ATTRIBUTES.equals(ajax)) {
+ if (!hasParam(req, JMX_MBEAN)) {
+ ret.put("error", "Parameters 'mbean' must be set");
+ } else {
+ String mbeanName = getParam(req, JMX_MBEAN);
+ try {
+ ObjectName name = new ObjectName(mbeanName);
+ MBeanInfo info = server.getMBeanInfo(name);
+
+ MBeanAttributeInfo[] mbeanAttrs = info.getAttributes();
+ HashMap<String, Object> attributes = new HashMap<String, Object>();
+
+ for (MBeanAttributeInfo attrInfo : mbeanAttrs) {
+ Object obj = server.getMBeanAttribute(name, attrInfo.getName());
+ attributes.put(attrInfo.getName(), obj);
+ }
+
+ ret.put("attributes", attributes);
+ } catch (Exception e) {
+ logger.error(e);
+ ret.put("error", "'" + mbeanName + "' is not a valid mBean name");
+ }
+ }
+ } else {
+ ret.put("commands", new String[] {
+ JMX_GET_MBEANS,
+ JMX_GET_MBEAN_INFO + "&" + JMX_MBEAN + "=<name>",
+ JMX_GET_MBEAN_ATTRIBUTE + "&" + JMX_MBEAN + "=<name>&"
+ + JMX_ATTRIBUTE + "=<attributename>" });
+ }
+ this.writeJSON(resp, ret, true);
+ } else {
+ handleJMXPage(req, resp, session);
+ }
+ }
+
+ private void handleJMXPage(HttpServletRequest req, HttpServletResponse resp,
+ Session session) throws IOException {
+ Page page =
+ newPage(req, resp, session,
+ "azkaban/webapp/servlet/velocity/jmxpage.vm");
+
+ if (!hasPermission(session.getUser(), Permission.Type.METRICS)) {
+ page.add("errorMsg", "User " + session.getUser().getUserId()
+ + " has no permission.");
+ page.render();
+ return;
+ }
+
+ page.add("mbeans", server.getMbeanNames());
+
+ Map<String, Object> executorMBeans = new HashMap<String, Object>();
+ // Set<String> primaryServerHosts = executorManager.getPrimaryServerHosts();
+ for (String hostPort : executorManager.getAllActiveExecutorServerHosts()) {
+ try {
+ Map<String, Object> mbeans =
+ executorManager.callExecutorJMX(hostPort, JMX_GET_MBEANS, null);
+
+ executorMBeans.put(hostPort, mbeans.get("mbeans"));
+ // if (primaryServerHosts.contains(hostPort)) {
+ // executorMBeans.put(hostPort, mbeans.get("mbeans"));
+ // }
+ // else {
+ // executorMBeans.put(hostPort, mbeans.get("mbeans"));
+ // }
+ } catch (IOException e) {
+ logger.error("Cannot contact executor " + hostPort, e);
+ }
+ }
+
+ page.add("executorRemoteMBeans", executorMBeans);
+
+ Map<String, Object> triggerserverMBeans = new HashMap<String, Object>();
+ // Set<String> primaryTriggerServerHosts =
+ // triggerManager.getPrimaryServerHosts();
+ // for (String hostPort: triggerManager.getAllActiveTriggerServerHosts()) {
+ // try {
+ // Map<String, Object> mbeans =
+ // triggerManager.callTriggerServerJMX(hostPort,
+ // TriggerConnectorParams.JMX_GET_MBEANS, null);
+ //
+ // if (primaryTriggerServerHosts.contains(hostPort)) {
+ // triggerserverMBeans.put(hostPort, mbeans.get("mbeans"));
+ // }
+ // else {
+ // triggerserverMBeans.put(hostPort, mbeans.get("mbeans"));
+ // }
+ // }
+ // catch (IOException e) {
+ // logger.error("Cannot contact executor " + hostPort, e);
+ // }
+ // }
+ triggerserverMBeans.put(triggerManager.getJMX().getPrimaryServerHost(),
+ triggerManager.getJMX().getAllJMXMbeans());
+
+ page.add("triggerserverRemoteMBeans", triggerserverMBeans);
+
+ page.render();
+ }
+
+ @Override
+ protected void handlePost(HttpServletRequest req, HttpServletResponse resp,
+ Session session) throws ServletException, IOException {
+
+ }
+
+ // private boolean hasAdminRole(User user) {
+ // for(String roleName: user.getRoles()) {
+ // Role role = userManager.getRole(roleName);
+ // Permission perm = role.getPermission();
+ // if (perm.isPermissionSet(Permission.Type.ADMIN)) {
+ // return true;
+ // }
+ // }
+ //
+ // return false;
+ // }
+
+ protected boolean hasPermission(User user, Permission.Type type) {
+ for (String roleName : user.getRoles()) {
+ Role role = userManager.getRole(roleName);
+ if (role.getPermission().isPermissionSet(type)
+ || role.getPermission().isPermissionSet(Permission.Type.ADMIN)) {
+ return true;
+ }
+ }
+
+ return false;
+ }
}
diff --git a/src/main/java/azkaban/webapp/servlet/LoginAbstractAzkabanServlet.java b/src/main/java/azkaban/webapp/servlet/LoginAbstractAzkabanServlet.java
index b48e2c9..8497020 100644
--- a/src/main/java/azkaban/webapp/servlet/LoginAbstractAzkabanServlet.java
+++ b/src/main/java/azkaban/webapp/servlet/LoginAbstractAzkabanServlet.java
@@ -48,336 +48,354 @@ import azkaban.webapp.session.Session;
* Abstract Servlet that handles auto login when the session hasn't been
* verified.
*/
-public abstract class LoginAbstractAzkabanServlet extends AbstractAzkabanServlet {
-
- private static final long serialVersionUID = 1L;
-
- private static final Logger logger = Logger.getLogger(LoginAbstractAzkabanServlet.class.getName());
- private static final String SESSION_ID_NAME = "azkaban.browser.session.id";
- private static final int DEFAULT_UPLOAD_DISK_SPOOL_SIZE = 20 * 1024 * 1024;
-
- private static HashMap<String, String> contextType = new HashMap<String,String>();
- static {
- contextType.put(".js", "application/javascript");
- contextType.put(".css", "text/css");
- contextType.put(".png", "image/png");
- contextType.put(".jpeg", "image/jpeg");
- contextType.put(".gif", "image/gif");
- contextType.put(".jpg", "image/jpeg");
- contextType.put(".eot", "application/vnd.ms-fontobject");
- contextType.put(".svg", "image/svg+xml");
- contextType.put(".ttf", "application/octet-stream");
- contextType.put(".woff", "application/x-font-woff");
- }
-
- private File webResourceDirectory = null;
-
- private MultipartParser multipartParser;
-
- @Override
- public void init(ServletConfig config) throws ServletException {
- super.init(config);
-
- multipartParser = new MultipartParser(DEFAULT_UPLOAD_DISK_SPOOL_SIZE);
- }
-
- public void setResourceDirectory(File file) {
- this.webResourceDirectory = file;
- }
-
- @Override
- protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
- // Set session id
- Session session = getSessionFromRequest(req);
- if (hasParam(req, "logout")) {
- resp.sendRedirect(req.getContextPath());
- if (session != null) {
- getApplication().getSessionCache().removeSession(session.getSessionId());
- }
- return;
- }
-
- if (session != null) {
- logger.info("Found session " + session.getUser());
- if (handleFileGet(req, resp)) {
- return;
- }
-
- handleGet(req, resp, session);
- } else {
- if (hasParam(req, "ajax")) {
- HashMap<String, String> retVal = new HashMap<String, String>();
- retVal.put("error", "session");
- this.writeJSON(resp, retVal);
- }
- else {
- handleLogin(req, resp);
- }
- }
- }
-
- private boolean handleFileGet(HttpServletRequest req, HttpServletResponse resp) throws IOException {
- if (webResourceDirectory == null) {
- return false;
- }
-
- // Check if it's a resource
- String prefix = req.getContextPath() + req.getServletPath();
- String path = req.getRequestURI().substring(prefix.length());
- int index = path.lastIndexOf('.');
- if (index == -1) {
- return false;
- }
-
- String extension = path.substring(index);
- if (contextType.containsKey(extension)) {
- File file = new File(webResourceDirectory, path);
- if (!file.exists() || !file.isFile()) {
- return false;
- }
-
- resp.setContentType(contextType.get(extension));
-
- OutputStream output = resp.getOutputStream();
- BufferedInputStream input = null;
- try {
- input = new BufferedInputStream(new FileInputStream(file));
- IOUtils.copy(input, output);
- }
- finally {
- if (input != null) {
- input.close();
- }
- }
- output.flush();
- return true;
- }
-
- return false;
- }
-
- private Session getSessionFromRequest(HttpServletRequest req) throws ServletException {
- String remoteIp = req.getRemoteAddr();
- Cookie cookie = getCookieByName(req, SESSION_ID_NAME);
- String sessionId = null;
-
- if (cookie != null) {
- sessionId = cookie.getValue();
- logger.info("Session id " + sessionId);
- }
-
- if (sessionId == null && hasParam(req, "session.id")) {
- sessionId = getParam(req, "session.id");
- }
- return getSessionFromSessionId(sessionId, remoteIp);
- }
-
- private Session getSessionFromSessionId(String sessionId, String remoteIp) {
- if (sessionId == null) {
- return null;
- }
-
- Session session = getApplication().getSessionCache().getSession(sessionId);
- // Check if the IP's are equal. If not, we invalidate the sesson.
- if (session == null || !remoteIp.equals(session.getIp())) {
- return null;
- }
-
- return session;
- }
-
- private void handleLogin(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
- handleLogin(req, resp, null);
- }
-
- private void handleLogin(HttpServletRequest req, HttpServletResponse resp, String errorMsg) throws ServletException, IOException {
- Page page = newPage(req, resp,"azkaban/webapp/servlet/velocity/login.vm");
- if (errorMsg != null) {
- page.add("errorMsg", errorMsg);
- }
-
- page.render();
- }
-
- @Override
- protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
- Session session = getSessionFromRequest(req);
-
- // Handle Multipart differently from other post messages
- if(ServletFileUpload.isMultipartContent(req)) {
- Map<String, Object> params = multipartParser.parseMultipart(req);
- if (session == null) {
- // See if the session id is properly set.
- if (params.containsKey("session.id")) {
- String sessionId = (String)params.get("session.id");
- String ip = req.getRemoteAddr();
-
- session = getSessionFromSessionId(sessionId, ip);
- if (session != null) {
- handleMultiformPost(req, resp, params, session);
- return;
- }
- }
-
- // if there's no valid session, see if it's a one time session.
- if (!params.containsKey("username") || !params.containsKey("password")) {
- writeResponse(resp, "Login error. Need username and password");
- return;
- }
-
- String username = (String)params.get("username");
- String password = (String)params.get("password");
- String ip = req.getRemoteAddr();
-
- try {
- session = createSession(username, password, ip);
- } catch (UserManagerException e) {
- writeResponse(resp, "Login error: " + e.getMessage());
- return;
- }
- }
-
- handleMultiformPost(req, resp, params, session);
- }
- else if (hasParam(req, "action") && getParam(req, "action").equals("login")) {
- HashMap<String,Object> obj = new HashMap<String,Object>();
- handleAjaxLoginAction(req, resp, obj);
- this.writeJSON(resp, obj);
- }
- else if (session == null) {
- if (hasParam(req, "username") && hasParam(req, "password")) {
- // If it's a post command with curl, we create a temporary session
- try {
- session = createSession(req);
- } catch (UserManagerException e) {
- writeResponse(resp, "Login error: " + e.getMessage());
- }
-
- handlePost(req, resp, session);
- }
- else {
- // There are no valid sessions and temporary logins, no we either pass back a message or redirect.
- if (isAjaxCall(req)) {
- String response = createJsonResponse("error", "Invalid Session. Need to re-login", "login", null);
- writeResponse(resp, response);
- }
- else {
- handleLogin(req, resp, "Enter username and password");
- }
- }
- }
- else {
- handlePost(req, resp, session);
- }
- }
-
- private Session createSession(HttpServletRequest req) throws UserManagerException, ServletException {
- String username = getParam(req, "username");
- String password = getParam(req, "password");
- String ip = req.getRemoteAddr();
-
- return createSession(username, password, ip);
- }
-
- private Session createSession(String username, String password, String ip) throws UserManagerException, ServletException {
- UserManager manager = getApplication().getUserManager();
- User user = manager.getUser(username, password);
-
- String randomUID = UUID.randomUUID().toString();
- Session session = new Session(randomUID, user, ip);
-
- return session;
- }
-
- protected boolean hasPermission(Project project, User user, Permission.Type type) {
- UserManager userManager = getApplication().getUserManager();
- if (project.hasPermission(user, type)) {
- return true;
- }
-
- for (String roleName: user.getRoles()) {
- Role role = userManager.getRole(roleName);
- if (role.getPermission().isPermissionSet(type) ||
- role.getPermission().isPermissionSet(Permission.Type.ADMIN)) {
- return true;
- }
- }
-
- return false;
- }
-
- protected void handleAjaxLoginAction(HttpServletRequest req, HttpServletResponse resp, Map<String, Object> ret) throws ServletException {
- if (hasParam(req, "username") && hasParam(req, "password")) {
- Session session = null;
- try {
- session = createSession(req);
- } catch (UserManagerException e) {
- ret.put("error", "Incorrect Login. " + e.getMessage());
- return;
- }
-
- Cookie cookie = new Cookie(SESSION_ID_NAME, session.getSessionId());
- cookie.setPath("/");
- resp.addCookie(cookie);
- getApplication().getSessionCache().addSession(session);
- ret.put("status", "success");
- ret.put("session.id", session.getSessionId());
- }
- else {
- ret.put("error", "Incorrect Login.");
- }
- }
-
- protected void writeResponse(HttpServletResponse resp, String response) throws IOException {
- Writer writer = resp.getWriter();
- writer.append(response);
- writer.flush();
- }
-
- protected boolean isAjaxCall(HttpServletRequest req) throws ServletException {
- String value = req.getHeader("X-Requested-With");
- if (value != null) {
- logger.info("has X-Requested-With " + value);
- return value.equals("XMLHttpRequest");
- }
-
- return false;
- }
-
- /**
- * The get request is handed off to the implementor after the user is logged
- * in.
- *
- * @param req
- * @param resp
- * @param session
- * @throws ServletException
- * @throws IOException
- */
- protected abstract void handleGet(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException, IOException;
-
- /**
- * The post request is handed off to the implementor after the user is
- * logged in.
- *
- * @param req
- * @param resp
- * @param session
- * @throws ServletException
- * @throws IOException
- */
- protected abstract void handlePost(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException, IOException;
-
- /**
- * The post request is handed off to the implementor after the user is
- * logged in.
- *
- * @param req
- * @param resp
- * @param session
- * @throws ServletException
- * @throws IOException
- */
- protected void handleMultiformPost(HttpServletRequest req, HttpServletResponse resp, Map<String,Object> multipart, Session session) throws ServletException, IOException {
- }
+public abstract class LoginAbstractAzkabanServlet extends
+ AbstractAzkabanServlet {
+
+ private static final long serialVersionUID = 1L;
+
+ private static final Logger logger = Logger
+ .getLogger(LoginAbstractAzkabanServlet.class.getName());
+ private static final String SESSION_ID_NAME = "azkaban.browser.session.id";
+ private static final int DEFAULT_UPLOAD_DISK_SPOOL_SIZE = 20 * 1024 * 1024;
+
+ private static HashMap<String, String> contextType =
+ new HashMap<String, String>();
+ static {
+ contextType.put(".js", "application/javascript");
+ contextType.put(".css", "text/css");
+ contextType.put(".png", "image/png");
+ contextType.put(".jpeg", "image/jpeg");
+ contextType.put(".gif", "image/gif");
+ contextType.put(".jpg", "image/jpeg");
+ contextType.put(".eot", "application/vnd.ms-fontobject");
+ contextType.put(".svg", "image/svg+xml");
+ contextType.put(".ttf", "application/octet-stream");
+ contextType.put(".woff", "application/x-font-woff");
+ }
+
+ private File webResourceDirectory = null;
+
+ private MultipartParser multipartParser;
+
+ @Override
+ public void init(ServletConfig config) throws ServletException {
+ super.init(config);
+
+ multipartParser = new MultipartParser(DEFAULT_UPLOAD_DISK_SPOOL_SIZE);
+ }
+
+ public void setResourceDirectory(File file) {
+ this.webResourceDirectory = file;
+ }
+
+ @Override
+ protected void doGet(HttpServletRequest req, HttpServletResponse resp)
+ throws ServletException, IOException {
+ // Set session id
+ Session session = getSessionFromRequest(req);
+ if (hasParam(req, "logout")) {
+ resp.sendRedirect(req.getContextPath());
+ if (session != null) {
+ getApplication().getSessionCache()
+ .removeSession(session.getSessionId());
+ }
+ return;
+ }
+
+ if (session != null) {
+ logger.info("Found session " + session.getUser());
+ if (handleFileGet(req, resp)) {
+ return;
+ }
+
+ handleGet(req, resp, session);
+ } else {
+ if (hasParam(req, "ajax")) {
+ HashMap<String, String> retVal = new HashMap<String, String>();
+ retVal.put("error", "session");
+ this.writeJSON(resp, retVal);
+ } else {
+ handleLogin(req, resp);
+ }
+ }
+ }
+
+ private boolean handleFileGet(HttpServletRequest req, HttpServletResponse resp)
+ throws IOException {
+ if (webResourceDirectory == null) {
+ return false;
+ }
+
+ // Check if it's a resource
+ String prefix = req.getContextPath() + req.getServletPath();
+ String path = req.getRequestURI().substring(prefix.length());
+ int index = path.lastIndexOf('.');
+ if (index == -1) {
+ return false;
+ }
+
+ String extension = path.substring(index);
+ if (contextType.containsKey(extension)) {
+ File file = new File(webResourceDirectory, path);
+ if (!file.exists() || !file.isFile()) {
+ return false;
+ }
+
+ resp.setContentType(contextType.get(extension));
+
+ OutputStream output = resp.getOutputStream();
+ BufferedInputStream input = null;
+ try {
+ input = new BufferedInputStream(new FileInputStream(file));
+ IOUtils.copy(input, output);
+ } finally {
+ if (input != null) {
+ input.close();
+ }
+ }
+ output.flush();
+ return true;
+ }
+
+ return false;
+ }
+
+ private Session getSessionFromRequest(HttpServletRequest req)
+ throws ServletException {
+ String remoteIp = req.getRemoteAddr();
+ Cookie cookie = getCookieByName(req, SESSION_ID_NAME);
+ String sessionId = null;
+
+ if (cookie != null) {
+ sessionId = cookie.getValue();
+ logger.info("Session id " + sessionId);
+ }
+
+ if (sessionId == null && hasParam(req, "session.id")) {
+ sessionId = getParam(req, "session.id");
+ }
+ return getSessionFromSessionId(sessionId, remoteIp);
+ }
+
+ private Session getSessionFromSessionId(String sessionId, String remoteIp) {
+ if (sessionId == null) {
+ return null;
+ }
+
+ Session session = getApplication().getSessionCache().getSession(sessionId);
+ // Check if the IP's are equal. If not, we invalidate the sesson.
+ if (session == null || !remoteIp.equals(session.getIp())) {
+ return null;
+ }
+
+ return session;
+ }
+
+ private void handleLogin(HttpServletRequest req, HttpServletResponse resp)
+ throws ServletException, IOException {
+ handleLogin(req, resp, null);
+ }
+
+ private void handleLogin(HttpServletRequest req, HttpServletResponse resp,
+ String errorMsg) throws ServletException, IOException {
+ Page page = newPage(req, resp, "azkaban/webapp/servlet/velocity/login.vm");
+ if (errorMsg != null) {
+ page.add("errorMsg", errorMsg);
+ }
+
+ page.render();
+ }
+
+ @Override
+ protected void doPost(HttpServletRequest req, HttpServletResponse resp)
+ throws ServletException, IOException {
+ Session session = getSessionFromRequest(req);
+
+ // Handle Multipart differently from other post messages
+ if (ServletFileUpload.isMultipartContent(req)) {
+ Map<String, Object> params = multipartParser.parseMultipart(req);
+ if (session == null) {
+ // See if the session id is properly set.
+ if (params.containsKey("session.id")) {
+ String sessionId = (String) params.get("session.id");
+ String ip = req.getRemoteAddr();
+
+ session = getSessionFromSessionId(sessionId, ip);
+ if (session != null) {
+ handleMultiformPost(req, resp, params, session);
+ return;
+ }
+ }
+
+ // if there's no valid session, see if it's a one time session.
+ if (!params.containsKey("username") || !params.containsKey("password")) {
+ writeResponse(resp, "Login error. Need username and password");
+ return;
+ }
+
+ String username = (String) params.get("username");
+ String password = (String) params.get("password");
+ String ip = req.getRemoteAddr();
+
+ try {
+ session = createSession(username, password, ip);
+ } catch (UserManagerException e) {
+ writeResponse(resp, "Login error: " + e.getMessage());
+ return;
+ }
+ }
+
+ handleMultiformPost(req, resp, params, session);
+ } else if (hasParam(req, "action")
+ && getParam(req, "action").equals("login")) {
+ HashMap<String, Object> obj = new HashMap<String, Object>();
+ handleAjaxLoginAction(req, resp, obj);
+ this.writeJSON(resp, obj);
+ } else if (session == null) {
+ if (hasParam(req, "username") && hasParam(req, "password")) {
+ // If it's a post command with curl, we create a temporary session
+ try {
+ session = createSession(req);
+ } catch (UserManagerException e) {
+ writeResponse(resp, "Login error: " + e.getMessage());
+ }
+
+ handlePost(req, resp, session);
+ } else {
+ // There are no valid sessions and temporary logins, no we either pass
+ // back a message or redirect.
+ if (isAjaxCall(req)) {
+ String response =
+ createJsonResponse("error", "Invalid Session. Need to re-login",
+ "login", null);
+ writeResponse(resp, response);
+ } else {
+ handleLogin(req, resp, "Enter username and password");
+ }
+ }
+ } else {
+ handlePost(req, resp, session);
+ }
+ }
+
+ private Session createSession(HttpServletRequest req)
+ throws UserManagerException, ServletException {
+ String username = getParam(req, "username");
+ String password = getParam(req, "password");
+ String ip = req.getRemoteAddr();
+
+ return createSession(username, password, ip);
+ }
+
+ private Session createSession(String username, String password, String ip)
+ throws UserManagerException, ServletException {
+ UserManager manager = getApplication().getUserManager();
+ User user = manager.getUser(username, password);
+
+ String randomUID = UUID.randomUUID().toString();
+ Session session = new Session(randomUID, user, ip);
+
+ return session;
+ }
+
+ protected boolean hasPermission(Project project, User user,
+ Permission.Type type) {
+ UserManager userManager = getApplication().getUserManager();
+ if (project.hasPermission(user, type)) {
+ return true;
+ }
+
+ for (String roleName : user.getRoles()) {
+ Role role = userManager.getRole(roleName);
+ if (role.getPermission().isPermissionSet(type)
+ || role.getPermission().isPermissionSet(Permission.Type.ADMIN)) {
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ protected void handleAjaxLoginAction(HttpServletRequest req,
+ HttpServletResponse resp, Map<String, Object> ret)
+ throws ServletException {
+ if (hasParam(req, "username") && hasParam(req, "password")) {
+ Session session = null;
+ try {
+ session = createSession(req);
+ } catch (UserManagerException e) {
+ ret.put("error", "Incorrect Login. " + e.getMessage());
+ return;
+ }
+
+ Cookie cookie = new Cookie(SESSION_ID_NAME, session.getSessionId());
+ cookie.setPath("/");
+ resp.addCookie(cookie);
+ getApplication().getSessionCache().addSession(session);
+ ret.put("status", "success");
+ ret.put("session.id", session.getSessionId());
+ } else {
+ ret.put("error", "Incorrect Login.");
+ }
+ }
+
+ protected void writeResponse(HttpServletResponse resp, String response)
+ throws IOException {
+ Writer writer = resp.getWriter();
+ writer.append(response);
+ writer.flush();
+ }
+
+ protected boolean isAjaxCall(HttpServletRequest req) throws ServletException {
+ String value = req.getHeader("X-Requested-With");
+ if (value != null) {
+ logger.info("has X-Requested-With " + value);
+ return value.equals("XMLHttpRequest");
+ }
+
+ return false;
+ }
+
+ /**
+ * The get request is handed off to the implementor after the user is logged
+ * in.
+ *
+ * @param req
+ * @param resp
+ * @param session
+ * @throws ServletException
+ * @throws IOException
+ */
+ protected abstract void handleGet(HttpServletRequest req,
+ HttpServletResponse resp, Session session) throws ServletException,
+ IOException;
+
+ /**
+ * The post request is handed off to the implementor after the user is logged
+ * in.
+ *
+ * @param req
+ * @param resp
+ * @param session
+ * @throws ServletException
+ * @throws IOException
+ */
+ protected abstract void handlePost(HttpServletRequest req,
+ HttpServletResponse resp, Session session) throws ServletException,
+ IOException;
+
+ /**
+ * The post request is handed off to the implementor after the user is logged
+ * in.
+ *
+ * @param req
+ * @param resp
+ * @param session
+ * @throws ServletException
+ * @throws IOException
+ */
+ protected void handleMultiformPost(HttpServletRequest req,
+ HttpServletResponse resp, Map<String, Object> multipart, Session session)
+ throws ServletException, IOException {
+ }
}
diff --git a/src/main/java/azkaban/webapp/servlet/MultipartParser.java b/src/main/java/azkaban/webapp/servlet/MultipartParser.java
index 5bad2fc..3897b77 100644
--- a/src/main/java/azkaban/webapp/servlet/MultipartParser.java
+++ b/src/main/java/azkaban/webapp/servlet/MultipartParser.java
@@ -31,32 +31,32 @@ import org.apache.commons.fileupload.servlet.ServletFileUpload;
public class MultipartParser {
- private DiskFileItemFactory _uploadItemFactory;
-
- public MultipartParser(int spillToDiskSize) {
- _uploadItemFactory = new DiskFileItemFactory();
- _uploadItemFactory.setSizeThreshold(spillToDiskSize);
- }
-
- @SuppressWarnings("unchecked")
- public Map<String, Object> parseMultipart(HttpServletRequest request)
- throws IOException, ServletException {
- ServletFileUpload upload = new ServletFileUpload(_uploadItemFactory);
- List<FileItem> items = null;
- try {
- items = upload.parseRequest(request);
- } catch (FileUploadException e) {
- throw new ServletException(e);
- }
-
- Map<String, Object> params = new HashMap<String, Object>();
- for (FileItem item : items) {
- if (item.isFormField())
- params.put(item.getFieldName(), item.getString());
- else
- params.put(item.getFieldName(), item);
- }
- return params;
- }
+ private DiskFileItemFactory _uploadItemFactory;
+
+ public MultipartParser(int spillToDiskSize) {
+ _uploadItemFactory = new DiskFileItemFactory();
+ _uploadItemFactory.setSizeThreshold(spillToDiskSize);
+ }
+
+ @SuppressWarnings("unchecked")
+ public Map<String, Object> parseMultipart(HttpServletRequest request)
+ throws IOException, ServletException {
+ ServletFileUpload upload = new ServletFileUpload(_uploadItemFactory);
+ List<FileItem> items = null;
+ try {
+ items = upload.parseRequest(request);
+ } catch (FileUploadException e) {
+ throw new ServletException(e);
+ }
+
+ Map<String, Object> params = new HashMap<String, Object>();
+ for (FileItem item : items) {
+ if (item.isFormField())
+ params.put(item.getFieldName(), item.getString());
+ else
+ params.put(item.getFieldName(), item);
+ }
+ return params;
+ }
}
src/main/java/azkaban/webapp/servlet/Page.java 96(+48 -48)
diff --git a/src/main/java/azkaban/webapp/servlet/Page.java b/src/main/java/azkaban/webapp/servlet/Page.java
index 5e48880..df2ba7a 100644
--- a/src/main/java/azkaban/webapp/servlet/Page.java
+++ b/src/main/java/azkaban/webapp/servlet/Page.java
@@ -28,56 +28,56 @@ import azkaban.utils.Utils;
* A page to display
*/
public class Page {
- private static final String DEFAULT_MIME_TYPE = "text/html";
- @SuppressWarnings("unused")
- private final HttpServletRequest request;
- private final HttpServletResponse response;
- private final VelocityEngine engine;
- private final VelocityContext context;
- private final String template;
- private String mimeType = DEFAULT_MIME_TYPE;
+ private static final String DEFAULT_MIME_TYPE = "text/html";
+ @SuppressWarnings("unused")
+ private final HttpServletRequest request;
+ private final HttpServletResponse response;
+ private final VelocityEngine engine;
+ private final VelocityContext context;
+ private final String template;
+ private String mimeType = DEFAULT_MIME_TYPE;
- /**
- * Creates a page and sets up the velocity engine to render
- *
- * @param request
- * @param response
- * @param engine
- * @param template
- */
- public Page(HttpServletRequest request, HttpServletResponse response,
- VelocityEngine engine, String template) {
- this.request = Utils.nonNull(request);
- this.response = Utils.nonNull(response);
- this.engine = Utils.nonNull(engine);
- this.template = Utils.nonNull(template);
- this.context = new VelocityContext();
- this.context.put("session", request.getSession(true));
- this.context.put("context", request.getContextPath());
- }
+ /**
+ * Creates a page and sets up the velocity engine to render
+ *
+ * @param request
+ * @param response
+ * @param engine
+ * @param template
+ */
+ public Page(HttpServletRequest request, HttpServletResponse response,
+ VelocityEngine engine, String template) {
+ this.request = Utils.nonNull(request);
+ this.response = Utils.nonNull(response);
+ this.engine = Utils.nonNull(engine);
+ this.template = Utils.nonNull(template);
+ this.context = new VelocityContext();
+ this.context.put("session", request.getSession(true));
+ this.context.put("context", request.getContextPath());
+ }
- /**
- * Renders the page in UTF-8
- */
- public void render() {
- try {
- response.setHeader ("Content-type", "text/html; charset=UTF-8");
- response.setCharacterEncoding ("UTF-8");
- response.setContentType(mimeType);
- engine.mergeTemplate(template, "UTF-8", context, response.getWriter());
- } catch (Exception e) {
- throw new PageRenderException(e);
- }
- }
+ /**
+ * Renders the page in UTF-8
+ */
+ public void render() {
+ try {
+ response.setHeader("Content-type", "text/html; charset=UTF-8");
+ response.setCharacterEncoding("UTF-8");
+ response.setContentType(mimeType);
+ engine.mergeTemplate(template, "UTF-8", context, response.getWriter());
+ } catch (Exception e) {
+ throw new PageRenderException(e);
+ }
+ }
- /**
- * Adds variables to the velocity context.
- */
- public void add(String name, Object value) {
- context.put(name, value);
- }
+ /**
+ * Adds variables to the velocity context.
+ */
+ public void add(String name, Object value) {
+ context.put(name, value);
+ }
- public void setMimeType(String type) {
- mimeType = type;
- }
+ public void setMimeType(String type) {
+ mimeType = type;
+ }
}
diff --git a/src/main/java/azkaban/webapp/servlet/PageRenderException.java b/src/main/java/azkaban/webapp/servlet/PageRenderException.java
index c05c3ab..2672988 100644
--- a/src/main/java/azkaban/webapp/servlet/PageRenderException.java
+++ b/src/main/java/azkaban/webapp/servlet/PageRenderException.java
@@ -20,9 +20,9 @@ package azkaban.webapp.servlet;
* Thrown if there is an error rendering the page
*/
public class PageRenderException extends RuntimeException {
- private static final long serialVersionUID = -1;
+ private static final long serialVersionUID = -1;
- public PageRenderException(Throwable cause) {
- super(cause);
- }
+ public PageRenderException(Throwable cause) {
+ super(cause);
+ }
}
src/main/java/azkaban/webapp/servlet/ProjectManagerServlet.java 2612(+1344 -1268)
diff --git a/src/main/java/azkaban/webapp/servlet/ProjectManagerServlet.java b/src/main/java/azkaban/webapp/servlet/ProjectManagerServlet.java
index f8811fa..868b7d9 100644
--- a/src/main/java/azkaban/webapp/servlet/ProjectManagerServlet.java
+++ b/src/main/java/azkaban/webapp/servlet/ProjectManagerServlet.java
@@ -73,253 +73,244 @@ import azkaban.webapp.AzkabanWebServer;
import azkaban.webapp.session.Session;
public class ProjectManagerServlet extends LoginAbstractAzkabanServlet {
- private static final long serialVersionUID = 1;
- private static final Logger logger = Logger.getLogger(ProjectManagerServlet.class);
- private static final NodeLevelComparator NODE_LEVEL_COMPARATOR = new NodeLevelComparator();
- private static final String LOCKDOWN_CREATE_PROJECTS_KEY = "lockdown.create.projects";
-
- private ProjectManager projectManager;
- private ExecutorManagerAdapter executorManager;
- private ScheduleManager scheduleManager;
- private UserManager userManager;
-
- private boolean lockdownCreateProjects = false;
-
- private static Comparator<Flow> FLOW_ID_COMPARATOR = new Comparator<Flow>() {
- @Override
- public int compare(Flow f1, Flow f2) {
- return f1.getId().compareTo(f2.getId());
- }
- };
-
- @Override
- public void init(ServletConfig config) throws ServletException {
- super.init(config);
-
- AzkabanWebServer server = (AzkabanWebServer)getApplication();
- projectManager = server.getProjectManager();
- executorManager = server.getExecutorManager();
- scheduleManager = server.getScheduleManager();
- userManager = server.getUserManager();
- lockdownCreateProjects = server.getServerProps().getBoolean(LOCKDOWN_CREATE_PROJECTS_KEY, false);
- if (lockdownCreateProjects) {
- logger.info("Creation of projects is locked down");
- }
- }
-
- @Override
- protected void handleGet(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException, IOException {
- if (hasParam(req, "project") ) {
- if (hasParam(req, "ajax")) {
- handleAJAXAction(req, resp, session);
- }
- else if (hasParam(req, "logs")) {
- handleProjectLogsPage(req, resp, session);
- }
- else if (hasParam(req, "permissions")) {
- handlePermissionPage(req, resp, session);
- }
- else if (hasParam(req, "prop")) {
- handlePropertyPage(req, resp, session);
- }
- else if (hasParam(req, "history")) {
- handleJobHistoryPage(req, resp, session);
- }
- else if (hasParam(req, "job")) {
- handleJobPage(req, resp, session);
- }
- else if (hasParam(req, "flow")) {
- handleFlowPage(req, resp, session);
- }
- else if (hasParam(req, "delete")) {
- handleRemoveProject(req, resp, session);
- }
- else {
- handleProjectPage(req, resp, session);
- }
- return;
- }
-
- Page page = newPage(req, resp, session, "azkaban/webapp/servlet/velocity/projectpage.vm");
- page.add("errorMsg", "No project set.");
- page.render();
- }
-
- @Override
- protected void handleMultiformPost(HttpServletRequest req, HttpServletResponse resp, Map<String, Object> params, Session session) throws ServletException, IOException {
- // Looks like a duplicate, but this is a move away from the regular multiform post + redirect
- // to a more ajax like command.
- if (params.containsKey("ajax")) {
- String action = (String)params.get("ajax");
- HashMap<String, String> ret = new HashMap<String, String>();
- if (action.equals("upload")) {
- ajaxHandleUpload(req, ret, params, session);
- }
- this.writeJSON(resp, ret);
- }
- else if (params.containsKey("action")) {
- String action = (String)params.get("action");
- if (action.equals("upload")) {
- handleUpload(req, resp, params, session);
- }
- }
- }
-
- @Override
- protected void handlePost(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException, IOException {
- if (hasParam(req, "action")) {
- String action = getParam(req, "action");
- if (action.equals("create")) {
- handleCreate(req, resp, session);
- }
- }
- }
-
- private void handleAJAXAction(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException, IOException {
- String projectName = getParam(req, "project");
- User user = session.getUser();
-
- HashMap<String, Object> ret = new HashMap<String, Object>();
- ret.put("project", projectName);
-
- Project project = projectManager.getProject(projectName);
- if (project == null) {
- ret.put("error", "Project " + projectName + " doesn't exist.");
- return;
- }
-
- ret.put("projectId", project.getId());
- String ajaxName = getParam(req, "ajax");
- if (ajaxName.equals("fetchProjectLogs")) {
- if (handleAjaxPermission(project, user, Type.READ, ret)) {
- ajaxFetchProjectLogEvents(project, req, resp, ret, user);
- }
- }
- else if (ajaxName.equals("fetchflowjobs")) {
- if (handleAjaxPermission(project, user, Type.READ, ret)) {
- ajaxFetchFlow(project, ret, req, resp);
- }
- }
- else if (ajaxName.equals("fetchflowdetails")) {
- if (handleAjaxPermission(project, user, Type.READ, ret)) {
- ajaxFetchFlowDetails(project, ret, req);
- }
- }
- else if (ajaxName.equals("fetchflowgraph")) {
- if (handleAjaxPermission(project, user, Type.READ, ret)) {
- ajaxFetchFlowGraph(project, ret, req);
- }
- }
- else if (ajaxName.equals("fetchflownodedata")) {
- if (handleAjaxPermission(project, user, Type.READ, ret)) {
- ajaxFetchFlowNodeData(project, ret, req);
- }
- }
- else if (ajaxName.equals("fetchprojectflows")) {
- if (handleAjaxPermission(project, user, Type.READ, ret)) {
- ajaxFetchProjectFlows(project, ret, req);
- }
- }
- else if (ajaxName.equals("changeDescription")) {
- if (handleAjaxPermission(project, user, Type.WRITE, ret)) {
- ajaxChangeDescription(project, ret, req, user);
- }
- }
- else if (ajaxName.equals("getPermissions")) {
- if (handleAjaxPermission(project, user, Type.READ, ret)) {
- ajaxGetPermissions(project, ret);
- }
- }
- else if (ajaxName.equals("changePermission")) {
- if (handleAjaxPermission(project, user, Type.ADMIN, ret)) {
- ajaxChangePermissions(project, ret, req, user);
- }
- }
- else if (ajaxName.equals("addPermission")) {
- if (handleAjaxPermission(project, user, Type.ADMIN, ret)) {
- ajaxAddPermission(project, ret, req, user);
- }
- }
- else if (ajaxName.equals("addProxyUser")) {
- if (handleAjaxPermission(project, user, Type.ADMIN, ret)) {
- ajaxAddProxyUser(project, ret, req, user);
- }
- }
- else if (ajaxName.equals("removeProxyUser")) {
- if (handleAjaxPermission(project, user, Type.ADMIN, ret)) {
- ajaxRemoveProxyUser(project, ret, req, user);
- }
- }
- else if (ajaxName.equals("fetchFlowExecutions")) {
- if (handleAjaxPermission(project, user, Type.READ, ret)) {
- ajaxFetchFlowExecutions(project, ret, req);
- }
- }
- else if (ajaxName.equals("fetchLastSuccessfulFlowExecution")) {
- if (handleAjaxPermission(project, user, Type.READ, ret)) {
- ajaxFetchLastSuccessfulFlowExecution(project, ret, req);
- }
- }
- else if (ajaxName.equals("fetchJobInfo")) {
- if (handleAjaxPermission(project, user, Type.READ, ret)) {
- ajaxFetchJobInfo(project, ret, req);
- }
- }
- else if (ajaxName.equals("setJobOverrideProperty")) {
- if (handleAjaxPermission(project, user, Type.WRITE, ret)) {
- ajaxSetJobOverrideProperty(project, ret, req);
- }
- }
- else {
- ret.put("error", "Cannot execute command " + ajaxName);
- }
-
- if (ret != null) {
- this.writeJSON(resp, ret);
- }
- }
-
- private boolean handleAjaxPermission(Project project, User user, Type type, Map<String, Object> ret) {
- if (hasPermission(project,user,type)) {
- return true;
- }
-
- ret.put("error", "Permission denied. Need " + type.toString() + " access.");
- return false;
- }
-
- private void ajaxFetchProjectLogEvents(Project project, HttpServletRequest req, HttpServletResponse resp, HashMap<String, Object> ret, User user) throws ServletException {
- if (!hasPermission(project,user,Type.READ)) {
- ret.put("error", "Permission denied. Need READ access.");
- return;
- }
-
- int num = this.getIntParam(req, "size", 1000);
- int skip = this.getIntParam(req, "skip", 0);
-
- List<ProjectLogEvent> logEvents = null;
- try {
- logEvents = projectManager.getProjectEventLogs(project, num, skip);
- } catch (ProjectManagerException e) {
- throw new ServletException(e);
- }
-
- String[] columns = new String[] {"user", "time", "type", "message"};
- ret.put("columns", columns);
-
- List<Object[]> eventData = new ArrayList<Object[]>();
- for (ProjectLogEvent events: logEvents) {
- Object[] entry = new Object[4];
- entry[0] = events.getUser();
- entry[1] = events.getTime();
- entry[2] = events.getType();
- entry[3] = events.getMessage();
-
- eventData.add(entry);
- }
-
- ret.put("logData", eventData);
- }
+ private static final long serialVersionUID = 1;
+ private static final Logger logger = Logger
+ .getLogger(ProjectManagerServlet.class);
+ private static final NodeLevelComparator NODE_LEVEL_COMPARATOR =
+ new NodeLevelComparator();
+ private static final String LOCKDOWN_CREATE_PROJECTS_KEY =
+ "lockdown.create.projects";
+
+ private ProjectManager projectManager;
+ private ExecutorManagerAdapter executorManager;
+ private ScheduleManager scheduleManager;
+ private UserManager userManager;
+
+ private boolean lockdownCreateProjects = false;
+
+ private static Comparator<Flow> FLOW_ID_COMPARATOR = new Comparator<Flow>() {
+ @Override
+ public int compare(Flow f1, Flow f2) {
+ return f1.getId().compareTo(f2.getId());
+ }
+ };
+
+ @Override
+ public void init(ServletConfig config) throws ServletException {
+ super.init(config);
+
+ AzkabanWebServer server = (AzkabanWebServer) getApplication();
+ projectManager = server.getProjectManager();
+ executorManager = server.getExecutorManager();
+ scheduleManager = server.getScheduleManager();
+ userManager = server.getUserManager();
+ lockdownCreateProjects =
+ server.getServerProps().getBoolean(LOCKDOWN_CREATE_PROJECTS_KEY, false);
+ if (lockdownCreateProjects) {
+ logger.info("Creation of projects is locked down");
+ }
+ }
+
+ @Override
+ protected void handleGet(HttpServletRequest req, HttpServletResponse resp,
+ Session session) throws ServletException, IOException {
+ if (hasParam(req, "project")) {
+ if (hasParam(req, "ajax")) {
+ handleAJAXAction(req, resp, session);
+ } else if (hasParam(req, "logs")) {
+ handleProjectLogsPage(req, resp, session);
+ } else if (hasParam(req, "permissions")) {
+ handlePermissionPage(req, resp, session);
+ } else if (hasParam(req, "prop")) {
+ handlePropertyPage(req, resp, session);
+ } else if (hasParam(req, "history")) {
+ handleJobHistoryPage(req, resp, session);
+ } else if (hasParam(req, "job")) {
+ handleJobPage(req, resp, session);
+ } else if (hasParam(req, "flow")) {
+ handleFlowPage(req, resp, session);
+ } else if (hasParam(req, "delete")) {
+ handleRemoveProject(req, resp, session);
+ } else {
+ handleProjectPage(req, resp, session);
+ }
+ return;
+ }
+
+ Page page =
+ newPage(req, resp, session,
+ "azkaban/webapp/servlet/velocity/projectpage.vm");
+ page.add("errorMsg", "No project set.");
+ page.render();
+ }
+
+ @Override
+ protected void handleMultiformPost(HttpServletRequest req,
+ HttpServletResponse resp, Map<String, Object> params, Session session)
+ throws ServletException, IOException {
+ // Looks like a duplicate, but this is a move away from the regular
+ // multiform post + redirect
+ // to a more ajax like command.
+ if (params.containsKey("ajax")) {
+ String action = (String) params.get("ajax");
+ HashMap<String, String> ret = new HashMap<String, String>();
+ if (action.equals("upload")) {
+ ajaxHandleUpload(req, ret, params, session);
+ }
+ this.writeJSON(resp, ret);
+ } else if (params.containsKey("action")) {
+ String action = (String) params.get("action");
+ if (action.equals("upload")) {
+ handleUpload(req, resp, params, session);
+ }
+ }
+ }
+
+ @Override
+ protected void handlePost(HttpServletRequest req, HttpServletResponse resp,
+ Session session) throws ServletException, IOException {
+ if (hasParam(req, "action")) {
+ String action = getParam(req, "action");
+ if (action.equals("create")) {
+ handleCreate(req, resp, session);
+ }
+ }
+ }
+
+ private void handleAJAXAction(HttpServletRequest req,
+ HttpServletResponse resp, Session session) throws ServletException,
+ IOException {
+ String projectName = getParam(req, "project");
+ User user = session.getUser();
+
+ HashMap<String, Object> ret = new HashMap<String, Object>();
+ ret.put("project", projectName);
+
+ Project project = projectManager.getProject(projectName);
+ if (project == null) {
+ ret.put("error", "Project " + projectName + " doesn't exist.");
+ return;
+ }
+
+ ret.put("projectId", project.getId());
+ String ajaxName = getParam(req, "ajax");
+ if (ajaxName.equals("fetchProjectLogs")) {
+ if (handleAjaxPermission(project, user, Type.READ, ret)) {
+ ajaxFetchProjectLogEvents(project, req, resp, ret, user);
+ }
+ } else if (ajaxName.equals("fetchflowjobs")) {
+ if (handleAjaxPermission(project, user, Type.READ, ret)) {
+ ajaxFetchFlow(project, ret, req, resp);
+ }
+ } else if (ajaxName.equals("fetchflowdetails")) {
+ if (handleAjaxPermission(project, user, Type.READ, ret)) {
+ ajaxFetchFlowDetails(project, ret, req);
+ }
+ } else if (ajaxName.equals("fetchflowgraph")) {
+ if (handleAjaxPermission(project, user, Type.READ, ret)) {
+ ajaxFetchFlowGraph(project, ret, req);
+ }
+ } else if (ajaxName.equals("fetchflownodedata")) {
+ if (handleAjaxPermission(project, user, Type.READ, ret)) {
+ ajaxFetchFlowNodeData(project, ret, req);
+ }
+ } else if (ajaxName.equals("fetchprojectflows")) {
+ if (handleAjaxPermission(project, user, Type.READ, ret)) {
+ ajaxFetchProjectFlows(project, ret, req);
+ }
+ } else if (ajaxName.equals("changeDescription")) {
+ if (handleAjaxPermission(project, user, Type.WRITE, ret)) {
+ ajaxChangeDescription(project, ret, req, user);
+ }
+ } else if (ajaxName.equals("getPermissions")) {
+ if (handleAjaxPermission(project, user, Type.READ, ret)) {
+ ajaxGetPermissions(project, ret);
+ }
+ } else if (ajaxName.equals("changePermission")) {
+ if (handleAjaxPermission(project, user, Type.ADMIN, ret)) {
+ ajaxChangePermissions(project, ret, req, user);
+ }
+ } else if (ajaxName.equals("addPermission")) {
+ if (handleAjaxPermission(project, user, Type.ADMIN, ret)) {
+ ajaxAddPermission(project, ret, req, user);
+ }
+ } else if (ajaxName.equals("addProxyUser")) {
+ if (handleAjaxPermission(project, user, Type.ADMIN, ret)) {
+ ajaxAddProxyUser(project, ret, req, user);
+ }
+ } else if (ajaxName.equals("removeProxyUser")) {
+ if (handleAjaxPermission(project, user, Type.ADMIN, ret)) {
+ ajaxRemoveProxyUser(project, ret, req, user);
+ }
+ } else if (ajaxName.equals("fetchFlowExecutions")) {
+ if (handleAjaxPermission(project, user, Type.READ, ret)) {
+ ajaxFetchFlowExecutions(project, ret, req);
+ }
+ } else if (ajaxName.equals("fetchLastSuccessfulFlowExecution")) {
+ if (handleAjaxPermission(project, user, Type.READ, ret)) {
+ ajaxFetchLastSuccessfulFlowExecution(project, ret, req);
+ }
+ } else if (ajaxName.equals("fetchJobInfo")) {
+ if (handleAjaxPermission(project, user, Type.READ, ret)) {
+ ajaxFetchJobInfo(project, ret, req);
+ }
+ } else if (ajaxName.equals("setJobOverrideProperty")) {
+ if (handleAjaxPermission(project, user, Type.WRITE, ret)) {
+ ajaxSetJobOverrideProperty(project, ret, req);
+ }
+ } else {
+ ret.put("error", "Cannot execute command " + ajaxName);
+ }
+
+ if (ret != null) {
+ this.writeJSON(resp, ret);
+ }
+ }
+
+ private boolean handleAjaxPermission(Project project, User user, Type type,
+ Map<String, Object> ret) {
+ if (hasPermission(project, user, type)) {
+ return true;
+ }
+
+ ret.put("error", "Permission denied. Need " + type.toString() + " access.");
+ return false;
+ }
+
+ private void ajaxFetchProjectLogEvents(Project project,
+ HttpServletRequest req, HttpServletResponse resp,
+ HashMap<String, Object> ret, User user) throws ServletException {
+ if (!hasPermission(project, user, Type.READ)) {
+ ret.put("error", "Permission denied. Need READ access.");
+ return;
+ }
+
+ int num = this.getIntParam(req, "size", 1000);
+ int skip = this.getIntParam(req, "skip", 0);
+
+ List<ProjectLogEvent> logEvents = null;
+ try {
+ logEvents = projectManager.getProjectEventLogs(project, num, skip);
+ } catch (ProjectManagerException e) {
+ throw new ServletException(e);
+ }
+
+ String[] columns = new String[] { "user", "time", "type", "message" };
+ ret.put("columns", columns);
+
+ List<Object[]> eventData = new ArrayList<Object[]>();
+ for (ProjectLogEvent events : logEvents) {
+ Object[] entry = new Object[4];
+ entry[0] = events.getUser();
+ entry[1] = events.getTime();
+ entry[2] = events.getType();
+ entry[3] = events.getMessage();
+
+ eventData.add(entry);
+ }
+
+ ret.put("logData", eventData);
+ }
private List<String> getFlowJobTypes(Flow flow) {
Set<String> jobTypeSet = new HashSet<String>();
@@ -330,14 +321,14 @@ public class ProjectManagerServlet extends LoginAbstractAzkabanServlet {
jobTypes.addAll(jobTypeSet);
return jobTypes;
}
-
- private void ajaxFetchFlowDetails(Project project,
- HashMap<String, Object> ret, HttpServletRequest req)
+
+ private void ajaxFetchFlowDetails(Project project,
+ HashMap<String, Object> ret, HttpServletRequest req)
throws ServletException {
- String flowName = getParam(req, "flow");
+ String flowName = getParam(req, "flow");
- Flow flow = null;
- try {
+ Flow flow = null;
+ try {
flow = project.getFlow(flowName);
if (flow == null) {
ret.put("error", "Flow " + flowName + " not found.");
@@ -345,10 +336,9 @@ public class ProjectManagerServlet extends LoginAbstractAzkabanServlet {
}
ret.put("jobTypes", getFlowJobTypes(flow));
- }
- catch (AccessControlException e) {
- ret.put("error", e.getMessage());
- }
+ } catch (AccessControlException e) {
+ ret.put("error", e.getMessage());
+ }
}
private void ajaxFetchLastSuccessfulFlowExecution(Project project,
@@ -357,706 +347,774 @@ public class ProjectManagerServlet extends LoginAbstractAzkabanServlet {
String flowId = getParam(req, "flow");
List<ExecutableFlow> exFlows = null;
try {
- exFlows = executorManager.getExecutableFlows(
- project.getId(), flowId, 0, 1, Status.SUCCEEDED);
- }
- catch (ExecutorManagerException e) {
- ret.put("error", "Error retrieving executable flows");
- return;
- }
-
- if (exFlows.size() == 0) {
- ret.put("success", "false");
- ret.put("message", "This flow has no successful run.");
- return;
- }
-
- ret.put("success", "true");
- ret.put("message", "");
- ret.put("execId", exFlows.get(0).getExecutionId());
+ exFlows =
+ executorManager.getExecutableFlows(project.getId(), flowId, 0, 1,
+ Status.SUCCEEDED);
+ } catch (ExecutorManagerException e) {
+ ret.put("error", "Error retrieving executable flows");
+ return;
+ }
+
+ if (exFlows.size() == 0) {
+ ret.put("success", "false");
+ ret.put("message", "This flow has no successful run.");
+ return;
+ }
+
+ ret.put("success", "true");
+ ret.put("message", "");
+ ret.put("execId", exFlows.get(0).getExecutionId());
+ }
+
+ private void ajaxFetchFlowExecutions(Project project,
+ HashMap<String, Object> ret, HttpServletRequest req)
+ throws ServletException {
+ String flowId = getParam(req, "flow");
+ int from = Integer.valueOf(getParam(req, "start"));
+ int length = Integer.valueOf(getParam(req, "length"));
+
+ ArrayList<ExecutableFlow> exFlows = new ArrayList<ExecutableFlow>();
+ int total = 0;
+ try {
+ total =
+ executorManager.getExecutableFlows(project.getId(), flowId, from,
+ length, exFlows);
+ } catch (ExecutorManagerException e) {
+ ret.put("error", "Error retrieving executable flows");
+ }
+
+ ret.put("flow", flowId);
+ ret.put("total", total);
+ ret.put("from", from);
+ ret.put("length", length);
+
+ ArrayList<Object> history = new ArrayList<Object>();
+ for (ExecutableFlow flow : exFlows) {
+ HashMap<String, Object> flowInfo = new HashMap<String, Object>();
+ flowInfo.put("execId", flow.getExecutionId());
+ flowInfo.put("flowId", flow.getFlowId());
+ flowInfo.put("projectId", flow.getProjectId());
+ flowInfo.put("status", flow.getStatus().toString());
+ flowInfo.put("submitTime", flow.getSubmitTime());
+ flowInfo.put("startTime", flow.getStartTime());
+ flowInfo.put("endTime", flow.getEndTime());
+ flowInfo.put("submitUser", flow.getSubmitUser());
+
+ history.add(flowInfo);
+ }
+
+ ret.put("executions", history);
+ }
+
+ private void handleRemoveProject(HttpServletRequest req,
+ HttpServletResponse resp, Session session) throws ServletException,
+ IOException {
+ User user = session.getUser();
+ String projectName = getParam(req, "project");
+
+ Project project = projectManager.getProject(projectName);
+ if (project == null) {
+ this.setErrorMessageInCookie(resp, "Project " + projectName
+ + " doesn't exist.");
+ resp.sendRedirect(req.getContextPath());
+ return;
+ }
+
+ if (!hasPermission(project, user, Type.ADMIN)) {
+ this.setErrorMessageInCookie(resp,
+ "Cannot delete. User '" + user.getUserId() + "' is not an ADMIN.");
+ resp.sendRedirect(req.getRequestURI() + "?project=" + projectName);
+ return;
+ }
+
+ // Check if scheduled
+ Schedule sflow = null;
+ try {
+ for (Schedule flow : scheduleManager.getSchedules()) {
+
+ if (flow.getProjectId() == project.getId()) {
+ sflow = flow;
+ break;
+ }
+ }
+ } catch (ScheduleManagerException e) {
+ // TODO Auto-generated catch block
+ throw new ServletException(e);
+ }
+
+ if (sflow != null) {
+ this.setErrorMessageInCookie(resp, "Cannot delete. Please unschedule "
+ + sflow.getScheduleName() + ".");
+
+ resp.sendRedirect(req.getRequestURI() + "?project=" + projectName);
+ return;
+ }
+
+ // Check if executing
+ ExecutableFlow exflow = null;
+ for (ExecutableFlow flow : executorManager.getRunningFlows()) {
+ if (flow.getProjectId() == project.getId()) {
+ exflow = flow;
+ break;
+ }
+ }
+ if (exflow != null) {
+ this.setErrorMessageInCookie(resp, "Cannot delete. Executable flow "
+ + exflow.getExecutionId() + " is still running.");
+ resp.sendRedirect(req.getRequestURI() + "?project=" + projectName);
+ return;
+ }
+
+ // project.info("Project removing by '" + user.getUserId() + "'");
+ try {
+ projectManager.removeProject(project, user);
+ } catch (ProjectManagerException e) {
+ this.setErrorMessageInCookie(resp, e.getMessage());
+ resp.sendRedirect(req.getRequestURI() + "?project=" + projectName);
+ return;
+ }
+
+ this.setSuccessMessageInCookie(resp, "Project '" + projectName
+ + "' was successfully deleted.");
+ resp.sendRedirect(req.getContextPath());
+ }
+
+ private void ajaxChangeDescription(Project project,
+ HashMap<String, Object> ret, HttpServletRequest req, User user)
+ throws ServletException {
+ String description = getParam(req, "description");
+ project.setDescription(description);
+
+ try {
+ projectManager.updateProjectDescription(project, description, user);
+ } catch (ProjectManagerException e) {
+ ret.put("error", e.getMessage());
+ }
+ }
+
+ private void ajaxFetchJobInfo(Project project, HashMap<String, Object> ret,
+ HttpServletRequest req) throws ServletException {
+ String flowName = getParam(req, "flowName");
+ String jobName = getParam(req, "jobName");
+
+ Flow flow = project.getFlow(flowName);
+ if (flow == null) {
+ ret.put("error",
+ "Flow " + flowName + " not found in project " + project.getName());
+ return;
+ }
+
+ Node node = flow.getNode(jobName);
+ if (node == null) {
+ ret.put("error", "Job " + jobName + " not found in flow " + flowName);
+ return;
+ }
+
+ Props prop;
+ try {
+ prop = projectManager.getProperties(project, node.getJobSource());
+ } catch (ProjectManagerException e) {
+ ret.put("error", "Failed to retrieve job properties!");
+ return;
+ }
+
+ Props overrideProp;
+ try {
+ overrideProp = projectManager.getJobOverrideProperty(project, jobName);
+ } catch (ProjectManagerException e) {
+ ret.put("error", "Failed to retrieve job override properties!");
+ return;
+ }
+
+ ret.put("jobName", node.getId());
+ ret.put("jobType", prop.get("type"));
+
+ if (overrideProp == null) {
+ overrideProp = new Props(prop);
+ }
+
+ Map<String, String> generalParams = new HashMap<String, String>();
+ Map<String, String> overrideParams = new HashMap<String, String>();
+ for (String ps : prop.getKeySet()) {
+ generalParams.put(ps, prop.getString(ps));
+ }
+ for (String ops : overrideProp.getKeySet()) {
+ // generalParams.put(ops, overrideProp.getString(ops));
+ overrideParams.put(ops, overrideProp.getString(ops));
+ }
+ ret.put("generalParams", generalParams);
+ ret.put("overrideParams", overrideParams);
+ }
+
+ private void ajaxSetJobOverrideProperty(Project project,
+ HashMap<String, Object> ret, HttpServletRequest req)
+ throws ServletException {
+ String flowName = getParam(req, "flowName");
+ String jobName = getParam(req, "jobName");
+
+ Flow flow = project.getFlow(flowName);
+ if (flow == null) {
+ ret.put("error",
+ "Flow " + flowName + " not found in project " + project.getName());
+ return;
+ }
+
+ Node node = flow.getNode(jobName);
+ if (node == null) {
+ ret.put("error", "Job " + jobName + " not found in flow " + flowName);
+ return;
+ }
+
+ Map<String, String> jobParamGroup = this.getParamGroup(req, "jobOverride");
+ @SuppressWarnings("unchecked")
+ Props overrideParams = new Props(null, jobParamGroup);
+ try {
+ projectManager.setJobOverrideProperty(project, overrideParams, jobName);
+ } catch (ProjectManagerException e) {
+ ret.put("error", "Failed to upload job override property");
+ }
+
+ }
+
+ private void ajaxFetchProjectFlows(Project project,
+ HashMap<String, Object> ret, HttpServletRequest req)
+ throws ServletException {
+ ArrayList<Map<String, Object>> flowList =
+ new ArrayList<Map<String, Object>>();
+ for (Flow flow : project.getFlows()) {
+ HashMap<String, Object> flowObj = new HashMap<String, Object>();
+ flowObj.put("flowId", flow.getId());
+ flowList.add(flowObj);
+ }
+
+ ret.put("flows", flowList);
+ }
+
+ private void ajaxFetchFlowGraph(Project project, HashMap<String, Object> ret,
+ HttpServletRequest req) throws ServletException {
+ String flowId = getParam(req, "flow");
+
+ fillFlowInfo(project, flowId, ret);
+ }
+
+ private void fillFlowInfo(Project project, String flowId,
+ HashMap<String, Object> ret) {
+ Flow flow = project.getFlow(flowId);
+
+ ArrayList<Map<String, Object>> nodeList =
+ new ArrayList<Map<String, Object>>();
+ for (Node node : flow.getNodes()) {
+ HashMap<String, Object> nodeObj = new HashMap<String, Object>();
+ nodeObj.put("id", node.getId());
+ nodeObj.put("type", node.getType());
+ if (node.getEmbeddedFlowId() != null) {
+ nodeObj.put("flowId", node.getEmbeddedFlowId());
+ // HashMap<String, Object> embeddedNodeObj = new HashMap<String,
+ // Object>();
+ fillFlowInfo(project, node.getEmbeddedFlowId(), nodeObj);
+ // nodeObj.put("flowData", embeddedNodeObj);
+ }
+
+ nodeList.add(nodeObj);
+ Set<Edge> inEdges = flow.getInEdges(node.getId());
+ if (inEdges != null && !inEdges.isEmpty()) {
+ ArrayList<String> inEdgesList = new ArrayList<String>();
+ for (Edge edge : inEdges) {
+ inEdgesList.add(edge.getSourceId());
+ }
+ Collections.sort(inEdgesList);
+ nodeObj.put("in", inEdgesList);
+ }
+ }
+
+ Collections.sort(nodeList, new Comparator<Map<String, Object>>() {
+ @Override
+ public int compare(Map<String, Object> o1, Map<String, Object> o2) {
+ String id = (String) o1.get("id");
+ return id.compareTo((String) o2.get("id"));
+ }
+ });
+
+ ret.put("flow", flowId);
+ ret.put("nodes", nodeList);
+ }
+
+ private void ajaxFetchFlowNodeData(Project project,
+ HashMap<String, Object> ret, HttpServletRequest req)
+ throws ServletException {
+ String flowId = getParam(req, "flow");
+ Flow flow = project.getFlow(flowId);
+
+ String nodeId = getParam(req, "node");
+ Node node = flow.getNode(nodeId);
+
+ if (node == null) {
+ ret.put("error", "Job " + nodeId + " doesn't exist.");
+ return;
+ }
+
+ ret.put("id", nodeId);
+ ret.put("flow", flowId);
+ ret.put("type", node.getType());
+
+ Props props;
+ try {
+ props = projectManager.getProperties(project, node.getJobSource());
+ } catch (ProjectManagerException e) {
+ ret.put("error", "Failed to upload job override property for " + nodeId);
+ return;
+ }
+
+ if (props == null) {
+ ret.put("error", "Properties for " + nodeId + " isn't found.");
+ return;
+ }
+
+ Map<String, String> properties = PropsUtils.toStringMap(props, true);
+ ret.put("props", properties);
+
+ if (node.getType().equals("flow")) {
+ if (node.getEmbeddedFlowId() != null) {
+ fillFlowInfo(project, node.getEmbeddedFlowId(), ret);
+ }
+ }
+ }
+
+ private void ajaxFetchFlow(Project project, HashMap<String, Object> ret,
+ HttpServletRequest req, HttpServletResponse resp) throws ServletException {
+ String flowId = getParam(req, "flow");
+ Flow flow = project.getFlow(flowId);
+
+ ArrayList<Node> flowNodes = new ArrayList<Node>(flow.getNodes());
+ Collections.sort(flowNodes, NODE_LEVEL_COMPARATOR);
+
+ ArrayList<Object> nodeList = new ArrayList<Object>();
+ for (Node node : flowNodes) {
+ HashMap<String, Object> nodeObj = new HashMap<String, Object>();
+ nodeObj.put("id", node.getId());
+
+ ArrayList<String> dependencies = new ArrayList<String>();
+ Collection<Edge> collection = flow.getInEdges(node.getId());
+ if (collection != null) {
+ for (Edge edge : collection) {
+ dependencies.add(edge.getSourceId());
+ }
+ }
+
+ ArrayList<String> dependents = new ArrayList<String>();
+ collection = flow.getOutEdges(node.getId());
+ if (collection != null) {
+ for (Edge edge : collection) {
+ dependents.add(edge.getTargetId());
+ }
+ }
+
+ nodeObj.put("dependencies", dependencies);
+ nodeObj.put("dependents", dependents);
+ nodeObj.put("level", node.getLevel());
+ nodeList.add(nodeObj);
+ }
+
+ ret.put("flowId", flowId);
+ ret.put("nodes", nodeList);
+ }
+
+ private void ajaxAddProxyUser(Project project, HashMap<String, Object> ret,
+ HttpServletRequest req, User user) throws ServletException {
+ String name = getParam(req, "name");
+
+ logger.info("Adding proxy user " + name + " by " + user.getUserId());
+ if (userManager.validateProxyUser(name, user)) {
+ try {
+ projectManager.addProjectProxyUser(project, name, user);
+ } catch (ProjectManagerException e) {
+ ret.put("error", e.getMessage());
+ }
+ } else {
+ ret.put("error", "User " + user.getUserId()
+ + " has no permission to add " + name + " as proxy user.");
+ return;
+ }
+ }
+
+ private void ajaxRemoveProxyUser(Project project,
+ HashMap<String, Object> ret, HttpServletRequest req, User user)
+ throws ServletException {
+ String name = getParam(req, "name");
+
+ logger.info("Removing proxy user " + name + " by " + user.getUserId());
+
+ try {
+ projectManager.removeProjectProxyUser(project, name, user);
+ } catch (ProjectManagerException e) {
+ ret.put("error", e.getMessage());
+ }
+ }
+
+ private void ajaxAddPermission(Project project, HashMap<String, Object> ret,
+ HttpServletRequest req, User user) throws ServletException {
+ String name = getParam(req, "name");
+ boolean group = Boolean.parseBoolean(getParam(req, "group"));
+
+ if (group) {
+ if (project.getGroupPermission(name) != null) {
+ ret.put("error", "Group permission already exists.");
+ return;
+ }
+ if (!userManager.validateGroup(name)) {
+ ret.put("error", "Group is invalid.");
+ return;
+ }
+ } else {
+ if (project.getUserPermission(name) != null) {
+ ret.put("error", "User permission already exists.");
+ return;
+ }
+ if (!userManager.validateUser(name)) {
+ ret.put("error", "User is invalid.");
+ return;
+ }
+ }
+
+ boolean admin = Boolean.parseBoolean(getParam(req, "permissions[admin]"));
+ boolean read = Boolean.parseBoolean(getParam(req, "permissions[read]"));
+ boolean write = Boolean.parseBoolean(getParam(req, "permissions[write]"));
+ boolean execute =
+ Boolean.parseBoolean(getParam(req, "permissions[execute]"));
+ boolean schedule =
+ Boolean.parseBoolean(getParam(req, "permissions[schedule]"));
+
+ Permission perm = new Permission();
+ if (admin) {
+ perm.setPermission(Type.ADMIN, true);
+ } else {
+ perm.setPermission(Type.READ, read);
+ perm.setPermission(Type.WRITE, write);
+ perm.setPermission(Type.EXECUTE, execute);
+ perm.setPermission(Type.SCHEDULE, schedule);
+ }
+
+ try {
+ projectManager.updateProjectPermission(project, name, perm, group, user);
+ } catch (ProjectManagerException e) {
+ ret.put("error", e.getMessage());
+ }
}
- private void ajaxFetchFlowExecutions(Project project,
- HashMap<String, Object> ret, HttpServletRequest req)
+ private void ajaxChangePermissions(Project project,
+ HashMap<String, Object> ret, HttpServletRequest req, User user)
throws ServletException {
- String flowId = getParam(req, "flow");
- int from = Integer.valueOf(getParam(req, "start"));
- int length = Integer.valueOf(getParam(req, "length"));
-
- ArrayList<ExecutableFlow> exFlows = new ArrayList<ExecutableFlow>();
- int total = 0;
- try {
- total = executorManager.getExecutableFlows(
- project.getId(), flowId, from, length, exFlows);
- }
- catch (ExecutorManagerException e) {
- ret.put("error", "Error retrieving executable flows");
- }
-
- ret.put("flow", flowId);
- ret.put("total", total);
- ret.put("from", from);
- ret.put("length", length);
-
- ArrayList<Object> history = new ArrayList<Object>();
- for (ExecutableFlow flow: exFlows) {
- HashMap<String, Object> flowInfo = new HashMap<String, Object>();
- flowInfo.put("execId", flow.getExecutionId());
- flowInfo.put("flowId", flow.getFlowId());
- flowInfo.put("projectId", flow.getProjectId());
- flowInfo.put("status", flow.getStatus().toString());
- flowInfo.put("submitTime", flow.getSubmitTime());
- flowInfo.put("startTime", flow.getStartTime());
- flowInfo.put("endTime", flow.getEndTime());
- flowInfo.put("submitUser", flow.getSubmitUser());
-
- history.add(flowInfo);
- }
-
- ret.put("executions", history);
- }
-
- private void handleRemoveProject(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException, IOException {
- User user = session.getUser();
- String projectName = getParam(req, "project");
-
- Project project = projectManager.getProject(projectName);
- if (project == null) {
- this.setErrorMessageInCookie(resp, "Project " + projectName + " doesn't exist.");
- resp.sendRedirect(req.getContextPath());
- return;
- }
-
- if (!hasPermission(project,user,Type.ADMIN)) {
- this.setErrorMessageInCookie(resp, "Cannot delete. User '" + user.getUserId() + "' is not an ADMIN.");
- resp.sendRedirect(req.getRequestURI() + "?project=" + projectName);
- return;
- }
-
- // Check if scheduled
- Schedule sflow = null;
- try {
- for (Schedule flow: scheduleManager.getSchedules()) {
-
- if (flow.getProjectId() == project.getId()) {
- sflow = flow;
- break;
- }
- }
- }
- catch (ScheduleManagerException e) {
- // TODO Auto-generated catch block
- throw new ServletException(e);
- }
-
- if (sflow != null) {
- this.setErrorMessageInCookie(resp, "Cannot delete. Please unschedule " + sflow.getScheduleName() + ".");
-
- resp.sendRedirect(req.getRequestURI() + "?project=" + projectName);
- return;
- }
-
- // Check if executing
- ExecutableFlow exflow = null;
- for (ExecutableFlow flow: executorManager.getRunningFlows()) {
- if (flow.getProjectId() == project.getId()) {
- exflow = flow;
- break;
- }
- }
- if (exflow != null) {
- this.setErrorMessageInCookie(resp, "Cannot delete. Executable flow " + exflow.getExecutionId() + " is still running.");
- resp.sendRedirect(req.getRequestURI() + "?project=" + projectName);
- return;
- }
-
- //project.info("Project removing by '" + user.getUserId() + "'");
- try {
- projectManager.removeProject(project, user);
- }
- catch (ProjectManagerException e) {
- this.setErrorMessageInCookie(resp, e.getMessage());
- resp.sendRedirect(req.getRequestURI() + "?project=" + projectName);
- return;
- }
-
- this.setSuccessMessageInCookie(resp, "Project '" + projectName + "' was successfully deleted.");
- resp.sendRedirect(req.getContextPath());
- }
-
- private void ajaxChangeDescription(Project project, HashMap<String, Object> ret, HttpServletRequest req, User user) throws ServletException {
- String description = getParam(req, "description");
- project.setDescription(description);
-
- try {
- projectManager.updateProjectDescription(project, description, user);
- } catch (ProjectManagerException e) {
- ret.put("error", e.getMessage());
- }
- }
-
- private void ajaxFetchJobInfo(Project project, HashMap<String, Object> ret, HttpServletRequest req) throws ServletException {
- String flowName = getParam(req, "flowName");
- String jobName = getParam(req, "jobName");
-
- Flow flow = project.getFlow(flowName);
- if(flow == null) {
- ret.put("error", "Flow " + flowName + " not found in project " + project.getName());
- return;
- }
-
- Node node = flow.getNode(jobName);
- if(node == null) {
- ret.put("error", "Job " + jobName + " not found in flow " + flowName);
- return;
- }
-
- Props prop;
- try {
- prop = projectManager.getProperties(project, node.getJobSource());
- } catch (ProjectManagerException e) {
- ret.put("error", "Failed to retrieve job properties!");
- return;
- }
-
- Props overrideProp;
- try {
- overrideProp = projectManager.getJobOverrideProperty(project, jobName);
- } catch (ProjectManagerException e) {
- ret.put("error", "Failed to retrieve job override properties!");
- return;
- }
-
- ret.put("jobName", node.getId());
- ret.put("jobType", prop.get("type"));
-
- if(overrideProp == null) {
- overrideProp = new Props(prop);
- }
-
- Map<String, String> generalParams = new HashMap<String, String>();
- Map<String, String> overrideParams = new HashMap<String, String>();
- for(String ps : prop.getKeySet()) {
- generalParams.put(ps, prop.getString(ps));
- }
- for(String ops : overrideProp.getKeySet()) {
-// generalParams.put(ops, overrideProp.getString(ops));
- overrideParams.put(ops, overrideProp.getString(ops));
- }
- ret.put("generalParams", generalParams);
- ret.put("overrideParams", overrideParams);
- }
-
- private void ajaxSetJobOverrideProperty(Project project, HashMap<String, Object> ret, HttpServletRequest req) throws ServletException {
- String flowName = getParam(req, "flowName");
- String jobName = getParam(req, "jobName");
-
- Flow flow = project.getFlow(flowName);
- if(flow == null) {
- ret.put("error", "Flow " + flowName + " not found in project " + project.getName());
- return;
- }
-
- Node node = flow.getNode(jobName);
- if(node == null) {
- ret.put("error", "Job " + jobName + " not found in flow " + flowName);
- return;
- }
-
- Map<String, String> jobParamGroup = this.getParamGroup(req, "jobOverride");
- @SuppressWarnings("unchecked")
- Props overrideParams = new Props(null, jobParamGroup);
- try {
- projectManager.setJobOverrideProperty(project, overrideParams, jobName);
- } catch (ProjectManagerException e) {
- ret.put("error", "Failed to upload job override property");
- }
-
- }
-
- private void ajaxFetchProjectFlows(Project project, HashMap<String, Object> ret, HttpServletRequest req) throws ServletException {
- ArrayList<Map<String,Object>> flowList = new ArrayList<Map<String,Object>>();
- for (Flow flow: project.getFlows()) {
- HashMap<String, Object> flowObj = new HashMap<String, Object>();
- flowObj.put("flowId", flow.getId());
- flowList.add(flowObj);
- }
-
- ret.put("flows", flowList);
- }
-
- private void ajaxFetchFlowGraph(Project project, HashMap<String, Object> ret, HttpServletRequest req) throws ServletException {
- String flowId = getParam(req, "flow");
-
- fillFlowInfo(project, flowId, ret);
- }
-
- private void fillFlowInfo(Project project, String flowId, HashMap<String, Object> ret) {
- Flow flow = project.getFlow(flowId);
-
- ArrayList<Map<String, Object>> nodeList = new ArrayList<Map<String, Object>>();
- for (Node node: flow.getNodes()) {
- HashMap<String, Object> nodeObj = new HashMap<String, Object>();
- nodeObj.put("id", node.getId());
- nodeObj.put("type", node.getType());
- if (node.getEmbeddedFlowId() != null) {
- nodeObj.put("flowId", node.getEmbeddedFlowId());
- //HashMap<String, Object> embeddedNodeObj = new HashMap<String, Object>();
- fillFlowInfo(project, node.getEmbeddedFlowId(), nodeObj);
- //nodeObj.put("flowData", embeddedNodeObj);
- }
-
- nodeList.add(nodeObj);
- Set<Edge> inEdges = flow.getInEdges(node.getId());
- if (inEdges != null && !inEdges.isEmpty()) {
- ArrayList<String> inEdgesList = new ArrayList<String>();
- for (Edge edge: inEdges) {
- inEdgesList.add(edge.getSourceId());
- }
- Collections.sort(inEdgesList);
- nodeObj.put("in", inEdgesList);
- }
- }
-
- Collections.sort(nodeList, new Comparator<Map<String, Object>>() {
- @Override
- public int compare(Map<String, Object> o1, Map<String, Object> o2) {
- String id = (String)o1.get("id");
- return id.compareTo((String)o2.get("id"));
- }
- });
-
- ret.put("flow", flowId);
- ret.put("nodes", nodeList);
- }
-
- private void ajaxFetchFlowNodeData(Project project, HashMap<String, Object> ret, HttpServletRequest req) throws ServletException {
- String flowId = getParam(req, "flow");
- Flow flow = project.getFlow(flowId);
-
- String nodeId = getParam(req, "node");
- Node node = flow.getNode(nodeId);
-
- if (node == null) {
- ret.put("error", "Job " + nodeId + " doesn't exist.");
- return;
- }
-
- ret.put("id", nodeId);
- ret.put("flow", flowId);
- ret.put("type", node.getType());
-
- Props props;
- try {
- props = projectManager.getProperties(project, node.getJobSource());
- } catch (ProjectManagerException e) {
- ret.put("error", "Failed to upload job override property for " + nodeId);
- return;
- }
-
- if (props == null) {
- ret.put("error", "Properties for " + nodeId + " isn't found.");
- return;
- }
-
- Map<String,String> properties = PropsUtils.toStringMap(props, true);
- ret.put("props", properties);
-
- if (node.getType().equals("flow")) {
- if (node.getEmbeddedFlowId() != null) {
- fillFlowInfo(project, node.getEmbeddedFlowId(), ret);
- }
- }
- }
-
- private void ajaxFetchFlow(Project project, HashMap<String, Object> ret, HttpServletRequest req, HttpServletResponse resp) throws ServletException {
- String flowId = getParam(req, "flow");
- Flow flow = project.getFlow(flowId);
-
- ArrayList<Node> flowNodes = new ArrayList<Node>(flow.getNodes());
- Collections.sort(flowNodes, NODE_LEVEL_COMPARATOR);
-
- ArrayList<Object> nodeList = new ArrayList<Object>();
- for (Node node: flowNodes) {
- HashMap<String, Object> nodeObj = new HashMap<String, Object>();
- nodeObj.put("id", node.getId());
-
- ArrayList<String> dependencies = new ArrayList<String>();
- Collection<Edge> collection = flow.getInEdges(node.getId());
- if (collection != null) {
- for (Edge edge: collection) {
- dependencies.add(edge.getSourceId());
- }
- }
-
- ArrayList<String> dependents = new ArrayList<String>();
- collection = flow.getOutEdges(node.getId());
- if (collection != null) {
- for (Edge edge: collection) {
- dependents.add(edge.getTargetId());
- }
- }
-
- nodeObj.put("dependencies", dependencies);
- nodeObj.put("dependents", dependents);
- nodeObj.put("level", node.getLevel());
- nodeList.add(nodeObj);
- }
-
- ret.put("flowId", flowId);
- ret.put("nodes", nodeList);
- }
-
-
- private void ajaxAddProxyUser(Project project, HashMap<String, Object> ret, HttpServletRequest req, User user) throws ServletException {
- String name = getParam(req, "name");
-
- logger.info("Adding proxy user " + name + " by " + user.getUserId());
- if(userManager.validateProxyUser(name, user)) {
- try {
- projectManager.addProjectProxyUser(project, name, user);
- } catch (ProjectManagerException e) {
- ret.put("error", e.getMessage());
- }
- }
- else {
- ret.put("error", "User " + user.getUserId() + " has no permission to add " + name + " as proxy user.");
- return;
- }
- }
-
- private void ajaxRemoveProxyUser(Project project, HashMap<String, Object> ret, HttpServletRequest req, User user) throws ServletException {
- String name = getParam(req, "name");
-
- logger.info("Removing proxy user " + name + " by " + user.getUserId());
-
- try {
- projectManager.removeProjectProxyUser(project, name, user);
- } catch (ProjectManagerException e) {
- ret.put("error", e.getMessage());
- }
- }
-
- private void ajaxAddPermission(Project project, HashMap<String, Object> ret, HttpServletRequest req, User user) throws ServletException {
- String name = getParam(req, "name");
- boolean group = Boolean.parseBoolean(getParam(req, "group"));
-
- if (group) {
- if (project.getGroupPermission(name) != null) {
- ret.put("error", "Group permission already exists.");
- return;
- }
- if (!userManager.validateGroup(name)) {
- ret.put("error", "Group is invalid.");
- return;
- }
- }
- else {
- if (project.getUserPermission(name) != null) {
- ret.put("error", "User permission already exists.");
- return;
- }
- if (!userManager.validateUser(name)) {
- ret.put("error", "User is invalid.");
- return;
- }
- }
-
- boolean admin = Boolean.parseBoolean(getParam(req, "permissions[admin]"));
- boolean read = Boolean.parseBoolean(getParam(req, "permissions[read]"));
- boolean write = Boolean.parseBoolean(getParam(req, "permissions[write]"));
- boolean execute = Boolean.parseBoolean(getParam(req, "permissions[execute]"));
- boolean schedule = Boolean.parseBoolean(getParam(req, "permissions[schedule]"));
-
- Permission perm = new Permission();
- if (admin) {
- perm.setPermission(Type.ADMIN, true);
- }
- else {
- perm.setPermission(Type.READ, read);
- perm.setPermission(Type.WRITE, write);
- perm.setPermission(Type.EXECUTE, execute);
- perm.setPermission(Type.SCHEDULE, schedule);
- }
-
- try {
- projectManager.updateProjectPermission(project, name, perm, group, user);
- } catch (ProjectManagerException e) {
- ret.put("error", e.getMessage());
- }
- }
-
-
- private void ajaxChangePermissions(Project project, HashMap<String, Object> ret, HttpServletRequest req, User user) throws ServletException {
- boolean admin = Boolean.parseBoolean(getParam(req, "permissions[admin]"));
- boolean read = Boolean.parseBoolean(getParam(req, "permissions[read]"));
- boolean write = Boolean.parseBoolean(getParam(req, "permissions[write]"));
- boolean execute = Boolean.parseBoolean(getParam(req, "permissions[execute]"));
- boolean schedule = Boolean.parseBoolean(getParam(req, "permissions[schedule]"));
-
- boolean group = Boolean.parseBoolean(getParam(req, "group"));
-
- String name = getParam(req, "name");
- Permission perm;
- if (group) {
- perm = project.getGroupPermission(name);
- }
- else {
- perm = project.getUserPermission(name);
- }
-
- if (perm == null) {
- ret.put("error", "Permissions for " + name + " cannot be found.");
- return;
- }
-
- if (admin || read || write || execute || schedule) {
- if (admin) {
- perm.setPermission(Type.ADMIN, true);
- perm.setPermission(Type.READ, false);
- perm.setPermission(Type.WRITE, false);
- perm.setPermission(Type.EXECUTE, false);
- perm.setPermission(Type.SCHEDULE, false);
- }
- else {
- perm.setPermission(Type.ADMIN, false);
- perm.setPermission(Type.READ, read);
- perm.setPermission(Type.WRITE, write);
- perm.setPermission(Type.EXECUTE, execute);
- perm.setPermission(Type.SCHEDULE, schedule);
- }
-
- try {
- projectManager.updateProjectPermission(project, name, perm, group, user);
- } catch (ProjectManagerException e) {
- ret.put("error", e.getMessage());
- }
- }
- else {
- try {
- projectManager.removeProjectPermission(project, name, group, user);
- } catch (ProjectManagerException e) {
- ret.put("error", e.getMessage());
- }
- }
- }
-
- private void ajaxGetPermissions(Project project, HashMap<String, Object> ret) {
- ArrayList<HashMap<String, Object>> permissions = new ArrayList<HashMap<String, Object>>();
- for(Pair<String, Permission> perm: project.getUserPermissions()) {
- HashMap<String, Object> permObj = new HashMap<String, Object>();
- String userId = perm.getFirst();
- permObj.put("username", userId);
- permObj.put("permission", perm.getSecond().toStringArray());
-
- permissions.add(permObj);
- }
-
- ret.put("permissions", permissions);
- }
-
- private void handleProjectLogsPage(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException, IOException {
- Page page = newPage(req, resp, session, "azkaban/webapp/servlet/velocity/projectlogpage.vm");
- String projectName = getParam(req, "project");
-
- User user = session.getUser();
- Project project = null;
- try {
- project = projectManager.getProject(projectName);
- if (project == null) {
- page.add("errorMsg", "Project " + projectName + " doesn't exist.");
- }
- else {
- if (!hasPermission(project,user,Type.READ)) {
- throw new AccessControlException( "No permission to view project " + projectName + ".");
- }
-
- page.add("project", project);
- page.add("admins", Utils.flattenToString(project.getUsersWithPermission(Type.ADMIN), ","));
- Permission perm = this.getPermissionObject(project, user, Type.ADMIN);
- page.add("userpermission", perm);
-
- boolean adminPerm = perm.isPermissionSet(Type.ADMIN);
- if (adminPerm) {
- page.add("admin", true);
- }
- // Set this so we can display execute buttons only to those who have access.
- if (perm.isPermissionSet(Type.EXECUTE) || adminPerm) {
- page.add("exec", true);
- }
- else {
- page.add("exec", false);
- }
- }
- }
- catch (AccessControlException e) {
- page.add("errorMsg", e.getMessage());
- }
-
- //page.add("projectManager", projectManager);
- //int bytesSkip = 0;
- int numBytes = 1024;
-
- // Really sucks if we do a lot of these because it'll eat up memory fast.
- // But it's expected that this won't be a heavily used thing. If it is,
- // then we'll revisit it to make it more stream friendly.
- StringBuffer buffer = new StringBuffer(numBytes);
- page.add("log", buffer.toString());
-
- page.render();
- }
-
- private void handleJobHistoryPage(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException, IOException {
- Page page = newPage(req, resp, session, "azkaban/webapp/servlet/velocity/jobhistorypage.vm");
- String projectName = getParam(req, "project");
- User user = session.getUser();
-
- Project project = projectManager.getProject(projectName);
- if (project == null) {
- page.add("errorMsg", "Project " + projectName + " doesn't exist.");
- page.render();
- return;
- }
- if (!hasPermission(project, user, Type.READ)) {
- page.add("errorMsg", "No permission to view project " + projectName + ".");
- page.render();
- return;
- }
-
- String jobId = getParam(req, "job");
- int pageNum = getIntParam(req, "page", 1);
- int pageSize = getIntParam(req, "size", 25);
-
- page.add("projectId", project.getId());
- page.add("projectName", project.getName());
- page.add("jobid", jobId);
- page.add("page", pageNum);
-
- int skipPage = (pageNum - 1)*pageSize;
-
- int numResults = 0;
- try {
- numResults = executorManager.getNumberOfJobExecutions(project, jobId);
- int maxPage = (numResults / pageSize) + 1;
- List<ExecutableJobInfo> jobInfo = executorManager.getExecutableJobs(project, jobId, skipPage, pageSize);
-
- if (jobInfo == null || jobInfo.isEmpty()) {
- jobInfo = null;
- }
- page.add("history", jobInfo);
-
- page.add("previous", new PageSelection("Previous", pageSize, true, false, Math.max(pageNum - 1, 1)));
-
- page.add("next", new PageSelection("Next", pageSize, false, false, Math.min(pageNum + 1, maxPage)));
-
- if (jobInfo != null) {
- ArrayList<Object> dataSeries = new ArrayList<Object>();
- for (ExecutableJobInfo info: jobInfo) {
- Map<String,Object> map = info.toObject();
- dataSeries.add(map);
- }
- page.add("dataSeries", JSONUtils.toJSON(dataSeries));
- }
- else {
- page.add("dataSeries", "[]");
- }
- } catch (ExecutorManagerException e) {
- page.add("errorMsg", e.getMessage());
- }
-
- // Now for the 5 other values.
- int pageStartValue = 1;
- if (pageNum > 3) {
- pageStartValue = pageNum - 2;
- }
- int maxPage = (numResults / pageSize) + 1;
-
- page.add("page1", new PageSelection(String.valueOf(pageStartValue), pageSize, pageStartValue > maxPage, pageStartValue == pageNum, Math.min(pageStartValue, maxPage)));
- pageStartValue++;
- page.add("page2", new PageSelection(String.valueOf(pageStartValue), pageSize, pageStartValue > maxPage, pageStartValue == pageNum, Math.min(pageStartValue, maxPage)));
- pageStartValue++;
- page.add("page3", new PageSelection(String.valueOf(pageStartValue), pageSize, pageStartValue > maxPage, pageStartValue == pageNum, Math.min(pageStartValue, maxPage)));
- pageStartValue++;
- page.add("page4", new PageSelection(String.valueOf(pageStartValue), pageSize, pageStartValue > maxPage, pageStartValue == pageNum, Math.min(pageStartValue, maxPage)));
- pageStartValue++;
- page.add("page5", new PageSelection(String.valueOf(pageStartValue), pageSize, pageStartValue > maxPage, pageStartValue == pageNum, Math.min(pageStartValue, maxPage)));
-
- page.render();
- }
-
- private void handlePermissionPage(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException {
- Page page = newPage(req, resp, session, "azkaban/webapp/servlet/velocity/permissionspage.vm");
- String projectName = getParam(req, "project");
- User user = session.getUser();
-
- Project project = null;
- try {
- project = projectManager.getProject(projectName);
- if (project == null) {
- page.add("errorMsg", "Project " + projectName + " not found.");
- }
- else {
- if (!hasPermission(project, user, Type.READ)) {
- throw new AccessControlException( "No permission to view project " + projectName + ".");
- }
-
- page.add("project", project);
- page.add("username", user.getUserId());
- page.add("admins", Utils.flattenToString(project.getUsersWithPermission(Type.ADMIN), ","));
- Permission perm = this.getPermissionObject(project, user, Type.ADMIN);
- page.add("userpermission", perm);
-
- if (perm.isPermissionSet(Type.ADMIN)) {
- page.add("admin", true);
- }
-
- List<Pair<String, Permission>> userPermission = project.getUserPermissions();
- if (userPermission != null && !userPermission.isEmpty()) {
- page.add("permissions", userPermission);
- }
-
- List<Pair<String, Permission>> groupPermission = project.getGroupPermissions();
- if (groupPermission != null && !groupPermission.isEmpty()) {
- page.add("groupPermissions", groupPermission);
- }
-
- Set<String> proxyUsers = project.getProxyUsers();
- if (proxyUsers != null && !proxyUsers.isEmpty()) {
- page.add("proxyUsers", proxyUsers);
- }
-
- if(hasPermission(project, user, Type.ADMIN)) {
- page.add("isAdmin", true);
- }
- }
- }
- catch(AccessControlException e) {
- page.add("errorMsg", e.getMessage());
- }
-
- page.render();
- }
-
- private void handleJobPage(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException {
- Page page = newPage(req, resp, session, "azkaban/webapp/servlet/velocity/jobpage.vm");
- String projectName = getParam(req, "project");
- String flowName = getParam(req, "flow");
- String jobName = getParam(req, "job");
-
- User user = session.getUser();
- Project project = null;
- Flow flow = null;
- try {
- project = projectManager.getProject(projectName);
- if (project == null) {
- page.add("errorMsg", "Project " + projectName + " not found.");
+ boolean admin = Boolean.parseBoolean(getParam(req, "permissions[admin]"));
+ boolean read = Boolean.parseBoolean(getParam(req, "permissions[read]"));
+ boolean write = Boolean.parseBoolean(getParam(req, "permissions[write]"));
+ boolean execute =
+ Boolean.parseBoolean(getParam(req, "permissions[execute]"));
+ boolean schedule =
+ Boolean.parseBoolean(getParam(req, "permissions[schedule]"));
+
+ boolean group = Boolean.parseBoolean(getParam(req, "group"));
+
+ String name = getParam(req, "name");
+ Permission perm;
+ if (group) {
+ perm = project.getGroupPermission(name);
+ } else {
+ perm = project.getUserPermission(name);
+ }
+
+ if (perm == null) {
+ ret.put("error", "Permissions for " + name + " cannot be found.");
+ return;
+ }
+
+ if (admin || read || write || execute || schedule) {
+ if (admin) {
+ perm.setPermission(Type.ADMIN, true);
+ perm.setPermission(Type.READ, false);
+ perm.setPermission(Type.WRITE, false);
+ perm.setPermission(Type.EXECUTE, false);
+ perm.setPermission(Type.SCHEDULE, false);
+ } else {
+ perm.setPermission(Type.ADMIN, false);
+ perm.setPermission(Type.READ, read);
+ perm.setPermission(Type.WRITE, write);
+ perm.setPermission(Type.EXECUTE, execute);
+ perm.setPermission(Type.SCHEDULE, schedule);
+ }
+
+ try {
+ projectManager
+ .updateProjectPermission(project, name, perm, group, user);
+ } catch (ProjectManagerException e) {
+ ret.put("error", e.getMessage());
+ }
+ } else {
+ try {
+ projectManager.removeProjectPermission(project, name, group, user);
+ } catch (ProjectManagerException e) {
+ ret.put("error", e.getMessage());
+ }
+ }
+ }
+
+ private void ajaxGetPermissions(Project project, HashMap<String, Object> ret) {
+ ArrayList<HashMap<String, Object>> permissions =
+ new ArrayList<HashMap<String, Object>>();
+ for (Pair<String, Permission> perm : project.getUserPermissions()) {
+ HashMap<String, Object> permObj = new HashMap<String, Object>();
+ String userId = perm.getFirst();
+ permObj.put("username", userId);
+ permObj.put("permission", perm.getSecond().toStringArray());
+
+ permissions.add(permObj);
+ }
+
+ ret.put("permissions", permissions);
+ }
+
+ private void handleProjectLogsPage(HttpServletRequest req,
+ HttpServletResponse resp, Session session) throws ServletException,
+ IOException {
+ Page page =
+ newPage(req, resp, session,
+ "azkaban/webapp/servlet/velocity/projectlogpage.vm");
+ String projectName = getParam(req, "project");
+
+ User user = session.getUser();
+ Project project = null;
+ try {
+ project = projectManager.getProject(projectName);
+ if (project == null) {
+ page.add("errorMsg", "Project " + projectName + " doesn't exist.");
+ } else {
+ if (!hasPermission(project, user, Type.READ)) {
+ throw new AccessControlException("No permission to view project "
+ + projectName + ".");
+ }
+
+ page.add("project", project);
+ page.add("admins", Utils.flattenToString(
+ project.getUsersWithPermission(Type.ADMIN), ","));
+ Permission perm = this.getPermissionObject(project, user, Type.ADMIN);
+ page.add("userpermission", perm);
+
+ boolean adminPerm = perm.isPermissionSet(Type.ADMIN);
+ if (adminPerm) {
+ page.add("admin", true);
+ }
+ // Set this so we can display execute buttons only to those who have
+ // access.
+ if (perm.isPermissionSet(Type.EXECUTE) || adminPerm) {
+ page.add("exec", true);
+ } else {
+ page.add("exec", false);
+ }
+ }
+ } catch (AccessControlException e) {
+ page.add("errorMsg", e.getMessage());
+ }
+
+ // page.add("projectManager", projectManager);
+ // int bytesSkip = 0;
+ int numBytes = 1024;
+
+ // Really sucks if we do a lot of these because it'll eat up memory fast.
+ // But it's expected that this won't be a heavily used thing. If it is,
+ // then we'll revisit it to make it more stream friendly.
+ StringBuffer buffer = new StringBuffer(numBytes);
+ page.add("log", buffer.toString());
+
+ page.render();
+ }
+
+ private void handleJobHistoryPage(HttpServletRequest req,
+ HttpServletResponse resp, Session session) throws ServletException,
+ IOException {
+ Page page =
+ newPage(req, resp, session,
+ "azkaban/webapp/servlet/velocity/jobhistorypage.vm");
+ String projectName = getParam(req, "project");
+ User user = session.getUser();
+
+ Project project = projectManager.getProject(projectName);
+ if (project == null) {
+ page.add("errorMsg", "Project " + projectName + " doesn't exist.");
+ page.render();
+ return;
+ }
+ if (!hasPermission(project, user, Type.READ)) {
+ page.add("errorMsg", "No permission to view project " + projectName + ".");
+ page.render();
+ return;
+ }
+
+ String jobId = getParam(req, "job");
+ int pageNum = getIntParam(req, "page", 1);
+ int pageSize = getIntParam(req, "size", 25);
+
+ page.add("projectId", project.getId());
+ page.add("projectName", project.getName());
+ page.add("jobid", jobId);
+ page.add("page", pageNum);
+
+ int skipPage = (pageNum - 1) * pageSize;
+
+ int numResults = 0;
+ try {
+ numResults = executorManager.getNumberOfJobExecutions(project, jobId);
+ int maxPage = (numResults / pageSize) + 1;
+ List<ExecutableJobInfo> jobInfo =
+ executorManager.getExecutableJobs(project, jobId, skipPage, pageSize);
+
+ if (jobInfo == null || jobInfo.isEmpty()) {
+ jobInfo = null;
+ }
+ page.add("history", jobInfo);
+
+ page.add("previous", new PageSelection("Previous", pageSize, true, false,
+ Math.max(pageNum - 1, 1)));
+
+ page.add(
+ "next",
+ new PageSelection("Next", pageSize, false, false, Math.min(
+ pageNum + 1, maxPage)));
+
+ if (jobInfo != null) {
+ ArrayList<Object> dataSeries = new ArrayList<Object>();
+ for (ExecutableJobInfo info : jobInfo) {
+ Map<String, Object> map = info.toObject();
+ dataSeries.add(map);
+ }
+ page.add("dataSeries", JSONUtils.toJSON(dataSeries));
+ } else {
+ page.add("dataSeries", "[]");
+ }
+ } catch (ExecutorManagerException e) {
+ page.add("errorMsg", e.getMessage());
+ }
+
+ // Now for the 5 other values.
+ int pageStartValue = 1;
+ if (pageNum > 3) {
+ pageStartValue = pageNum - 2;
+ }
+ int maxPage = (numResults / pageSize) + 1;
+
+ page.add(
+ "page1",
+ new PageSelection(String.valueOf(pageStartValue), pageSize,
+ pageStartValue > maxPage, pageStartValue == pageNum, Math.min(
+ pageStartValue, maxPage)));
+ pageStartValue++;
+ page.add(
+ "page2",
+ new PageSelection(String.valueOf(pageStartValue), pageSize,
+ pageStartValue > maxPage, pageStartValue == pageNum, Math.min(
+ pageStartValue, maxPage)));
+ pageStartValue++;
+ page.add(
+ "page3",
+ new PageSelection(String.valueOf(pageStartValue), pageSize,
+ pageStartValue > maxPage, pageStartValue == pageNum, Math.min(
+ pageStartValue, maxPage)));
+ pageStartValue++;
+ page.add(
+ "page4",
+ new PageSelection(String.valueOf(pageStartValue), pageSize,
+ pageStartValue > maxPage, pageStartValue == pageNum, Math.min(
+ pageStartValue, maxPage)));
+ pageStartValue++;
+ page.add(
+ "page5",
+ new PageSelection(String.valueOf(pageStartValue), pageSize,
+ pageStartValue > maxPage, pageStartValue == pageNum, Math.min(
+ pageStartValue, maxPage)));
+
+ page.render();
+ }
+
+ private void handlePermissionPage(HttpServletRequest req,
+ HttpServletResponse resp, Session session) throws ServletException {
+ Page page =
+ newPage(req, resp, session,
+ "azkaban/webapp/servlet/velocity/permissionspage.vm");
+ String projectName = getParam(req, "project");
+ User user = session.getUser();
+
+ Project project = null;
+ try {
+ project = projectManager.getProject(projectName);
+ if (project == null) {
+ page.add("errorMsg", "Project " + projectName + " not found.");
+ } else {
+ if (!hasPermission(project, user, Type.READ)) {
+ throw new AccessControlException("No permission to view project "
+ + projectName + ".");
+ }
+
+ page.add("project", project);
+ page.add("username", user.getUserId());
+ page.add("admins", Utils.flattenToString(
+ project.getUsersWithPermission(Type.ADMIN), ","));
+ Permission perm = this.getPermissionObject(project, user, Type.ADMIN);
+ page.add("userpermission", perm);
+
+ if (perm.isPermissionSet(Type.ADMIN)) {
+ page.add("admin", true);
+ }
+
+ List<Pair<String, Permission>> userPermission =
+ project.getUserPermissions();
+ if (userPermission != null && !userPermission.isEmpty()) {
+ page.add("permissions", userPermission);
+ }
+
+ List<Pair<String, Permission>> groupPermission =
+ project.getGroupPermissions();
+ if (groupPermission != null && !groupPermission.isEmpty()) {
+ page.add("groupPermissions", groupPermission);
+ }
+
+ Set<String> proxyUsers = project.getProxyUsers();
+ if (proxyUsers != null && !proxyUsers.isEmpty()) {
+ page.add("proxyUsers", proxyUsers);
+ }
+
+ if (hasPermission(project, user, Type.ADMIN)) {
+ page.add("isAdmin", true);
+ }
+ }
+ } catch (AccessControlException e) {
+ page.add("errorMsg", e.getMessage());
+ }
+
+ page.render();
+ }
+
+ private void handleJobPage(HttpServletRequest req, HttpServletResponse resp,
+ Session session) throws ServletException {
+ Page page =
+ newPage(req, resp, session,
+ "azkaban/webapp/servlet/velocity/jobpage.vm");
+ String projectName = getParam(req, "project");
+ String flowName = getParam(req, "flow");
+ String jobName = getParam(req, "job");
+
+ User user = session.getUser();
+ Project project = null;
+ Flow flow = null;
+ try {
+ project = projectManager.getProject(projectName);
+ if (project == null) {
+ page.add("errorMsg", "Project " + projectName + " not found.");
page.render();
return;
- }
+ }
if (!hasPermission(project, user, Type.READ)) {
- throw new AccessControlException( "No permission to view project " + projectName + ".");
+ throw new AccessControlException("No permission to view project "
+ + projectName + ".");
}
-
+
page.add("project", project);
flow = project.getFlow(flowName);
if (flow == null) {
@@ -1074,7 +1132,8 @@ public class ProjectManagerServlet extends LoginAbstractAzkabanServlet {
}
Props prop = projectManager.getProperties(project, node.getJobSource());
- Props overrideProp = projectManager.getJobOverrideProperty(project, jobName);
+ Props overrideProp =
+ projectManager.getJobOverrideProperty(project, jobName);
if (overrideProp == null) {
overrideProp = new Props();
}
@@ -1084,85 +1143,88 @@ public class ProjectManagerServlet extends LoginAbstractAzkabanServlet {
}
page.add("jobid", node.getId());
page.add("jobtype", node.getType());
-
+
ArrayList<String> dependencies = new ArrayList<String>();
Set<Edge> inEdges = flow.getInEdges(node.getId());
if (inEdges != null) {
- for (Edge dependency: inEdges) {
+ for (Edge dependency : inEdges) {
dependencies.add(dependency.getSourceId());
}
}
if (!dependencies.isEmpty()) {
page.add("dependencies", dependencies);
}
-
+
ArrayList<String> dependents = new ArrayList<String>();
Set<Edge> outEdges = flow.getOutEdges(node.getId());
if (outEdges != null) {
- for (Edge dependent: outEdges) {
+ for (Edge dependent : outEdges) {
dependents.add(dependent.getTargetId());
}
}
if (!dependents.isEmpty()) {
page.add("dependents", dependents);
}
-
+
// Resolve property dependencies
- ArrayList<String> source = new ArrayList<String>();
+ ArrayList<String> source = new ArrayList<String>();
String nodeSource = node.getPropsSource();
if (nodeSource != null) {
source.add(nodeSource);
FlowProps parent = flow.getFlowProps(nodeSource);
while (parent.getInheritedSource() != null) {
source.add(parent.getInheritedSource());
- parent = flow.getFlowProps(parent.getInheritedSource());
+ parent = flow.getFlowProps(parent.getInheritedSource());
}
}
if (!source.isEmpty()) {
page.add("properties", source);
}
-
- ArrayList<Pair<String,String>> parameters = new ArrayList<Pair<String, String>>();
+
+ ArrayList<Pair<String, String>> parameters =
+ new ArrayList<Pair<String, String>>();
// Parameter
for (String key : comboProp.getKeySet()) {
String value = comboProp.get(key);
- parameters.add(new Pair<String,String>(key, value));
+ parameters.add(new Pair<String, String>(key, value));
}
-
+
page.add("parameters", parameters);
- }
- catch (AccessControlException e) {
- page.add("errorMsg", e.getMessage());
- }
- catch (ProjectManagerException e) {
- page.add("errorMsg", e.getMessage());
- }
- page.render();
- }
-
- private void handlePropertyPage(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException {
- Page page = newPage(req, resp, session, "azkaban/webapp/servlet/velocity/propertypage.vm");
- String projectName = getParam(req, "project");
- String flowName = getParam(req, "flow");
- String jobName = getParam(req, "job");
- String propSource = getParam(req, "prop");
-
- User user = session.getUser();
- Project project = null;
- Flow flow = null;
- try {
- project = projectManager.getProject(projectName);
- if (project == null) {
- page.add("errorMsg", "Project " + projectName + " not found.");
+ } catch (AccessControlException e) {
+ page.add("errorMsg", e.getMessage());
+ } catch (ProjectManagerException e) {
+ page.add("errorMsg", e.getMessage());
+ }
+ page.render();
+ }
+
+ private void handlePropertyPage(HttpServletRequest req,
+ HttpServletResponse resp, Session session) throws ServletException {
+ Page page =
+ newPage(req, resp, session,
+ "azkaban/webapp/servlet/velocity/propertypage.vm");
+ String projectName = getParam(req, "project");
+ String flowName = getParam(req, "flow");
+ String jobName = getParam(req, "job");
+ String propSource = getParam(req, "prop");
+
+ User user = session.getUser();
+ Project project = null;
+ Flow flow = null;
+ try {
+ project = projectManager.getProject(projectName);
+ if (project == null) {
+ page.add("errorMsg", "Project " + projectName + " not found.");
page.render();
return;
- }
+ }
- if (!hasPermission(project,user,Type.READ)) {
- throw new AccessControlException( "No permission to view project " + projectName + ".");
+ if (!hasPermission(project, user, Type.READ)) {
+ throw new AccessControlException("No permission to view project "
+ + projectName + ".");
}
page.add("project", project);
-
+
flow = project.getFlow(flowName);
if (flow == null) {
page.add("errorMsg", "Flow " + flowName + " not found.");
@@ -1181,312 +1243,326 @@ public class ProjectManagerServlet extends LoginAbstractAzkabanServlet {
Props prop = projectManager.getProperties(project, propSource);
page.add("property", propSource);
page.add("jobid", node.getId());
-
+
// Resolve property dependencies
- ArrayList<String> inheritProps = new ArrayList<String>();
+ ArrayList<String> inheritProps = new ArrayList<String>();
FlowProps parent = flow.getFlowProps(propSource);
while (parent.getInheritedSource() != null) {
inheritProps.add(parent.getInheritedSource());
- parent = flow.getFlowProps(parent.getInheritedSource());
+ parent = flow.getFlowProps(parent.getInheritedSource());
}
if (!inheritProps.isEmpty()) {
page.add("inheritedproperties", inheritProps);
}
-
- ArrayList<String> dependingProps = new ArrayList<String>();
- FlowProps child = flow.getFlowProps(flow.getNode(jobName).getPropsSource());
+
+ ArrayList<String> dependingProps = new ArrayList<String>();
+ FlowProps child =
+ flow.getFlowProps(flow.getNode(jobName).getPropsSource());
while (!child.getSource().equals(propSource)) {
dependingProps.add(child.getSource());
- child = flow.getFlowProps(child.getInheritedSource());
+ child = flow.getFlowProps(child.getInheritedSource());
}
if (!dependingProps.isEmpty()) {
page.add("dependingproperties", dependingProps);
}
-
- ArrayList<Pair<String,String>> parameters = new ArrayList<Pair<String, String>>();
+
+ ArrayList<Pair<String, String>> parameters =
+ new ArrayList<Pair<String, String>>();
// Parameter
for (String key : prop.getKeySet()) {
String value = prop.get(key);
- parameters.add(new Pair<String,String>(key, value));
+ parameters.add(new Pair<String, String>(key, value));
}
-
+
page.add("parameters", parameters);
- }
- catch (AccessControlException e) {
- page.add("errorMsg", e.getMessage());
- } catch (ProjectManagerException e) {
- page.add("errorMsg", e.getMessage());
- }
-
- page.render();
- }
-
- private void handleFlowPage(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException {
- Page page = newPage(req, resp, session, "azkaban/webapp/servlet/velocity/flowpage.vm");
- String projectName = getParam(req, "project");
- String flowName = getParam(req, "flow");
-
- User user = session.getUser();
- Project project = null;
- Flow flow = null;
- try {
- project = projectManager.getProject(projectName);
- if (project == null) {
- page.add("errorMsg", "Project " + projectName + " not found.");
+ } catch (AccessControlException e) {
+ page.add("errorMsg", e.getMessage());
+ } catch (ProjectManagerException e) {
+ page.add("errorMsg", e.getMessage());
+ }
+
+ page.render();
+ }
+
+ private void handleFlowPage(HttpServletRequest req, HttpServletResponse resp,
+ Session session) throws ServletException {
+ Page page =
+ newPage(req, resp, session,
+ "azkaban/webapp/servlet/velocity/flowpage.vm");
+ String projectName = getParam(req, "project");
+ String flowName = getParam(req, "flow");
+
+ User user = session.getUser();
+ Project project = null;
+ Flow flow = null;
+ try {
+ project = projectManager.getProject(projectName);
+ if (project == null) {
+ page.add("errorMsg", "Project " + projectName + " not found.");
page.render();
return;
- }
+ }
if (!hasPermission(project, user, Type.READ)) {
- throw new AccessControlException( "No permission Project " + projectName + ".");
+ throw new AccessControlException("No permission Project " + projectName
+ + ".");
}
-
+
page.add("project", project);
flow = project.getFlow(flowName);
if (flow == null) {
page.add("errorMsg", "Flow " + flowName + " not found.");
}
-
+
page.add("flowid", flow.getId());
- }
- catch (AccessControlException e) {
- page.add("errorMsg", e.getMessage());
- }
-
- page.render();
- }
-
- private void handleProjectPage(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException {
- Page page = newPage(req, resp, session, "azkaban/webapp/servlet/velocity/projectpage.vm");
- String projectName = getParam(req, "project");
-
- User user = session.getUser();
- Project project = null;
- try {
- project = projectManager.getProject(projectName);
- if (project == null) {
- page.add("errorMsg", "Project " + projectName + " not found.");
- }
- else {
- if (!hasPermission(project,user,Type.READ)) {
- throw new AccessControlException( "No permission to view project " + projectName + ".");
- }
-
- page.add("project", project);
- page.add("admins", Utils.flattenToString(project.getUsersWithPermission(Type.ADMIN), ","));
- Permission perm = this.getPermissionObject(project, user, Type.ADMIN);
- page.add("userpermission", perm);
-
- boolean adminPerm = perm.isPermissionSet(Type.ADMIN);
- if (adminPerm) {
- page.add("admin", true);
- }
- // Set this so we can display execute buttons only to those who have access.
- if (perm.isPermissionSet(Type.EXECUTE) || adminPerm) {
- page.add("exec", true);
- }
- else {
- page.add("exec", false);
- }
-
- List<Flow> flows = project.getFlows();
- if (!flows.isEmpty()) {
- Collections.sort(flows, FLOW_ID_COMPARATOR);
- page.add("flows", flows);
- }
- }
- }
- catch (AccessControlException e) {
- page.add("errorMsg", e.getMessage());
- }
- page.render();
- }
-
- private void handleCreate(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException {
- String projectName = hasParam(req, "name") ? getParam(req, "name") : null;
- String projectDescription = hasParam(req, "description") ? getParam(req, "description") : null;
- logger.info("Create project " + projectName);
-
- User user = session.getUser();
-
- String status = null;
- String action = null;
- String message = null;
- HashMap<String, Object> params = null;
-
- if (lockdownCreateProjects && !hasPermissionToCreateProject(user)) {
- message = "User " + user.getUserId() + " doesn't have permission to create projects.";
- logger.info(message);
- status = "error";
- }
- else {
- try {
- projectManager.createProject(projectName, projectDescription, user);
- status = "success";
- action = "redirect";
- String redirect = "manager?project=" + projectName;
- params = new HashMap<String, Object>();
- params.put("path", redirect);
- } catch (ProjectManagerException e) {
- message = e.getMessage();
- status = "error";
- }
- }
- String response = createJsonResponse(status, message, action, params);
- try {
- Writer write = resp.getWriter();
- write.append(response);
- write.flush();
- } catch (IOException e) {
- e.printStackTrace();
- }
- }
-
- private void ajaxHandleUpload(HttpServletRequest req, Map<String, String> ret, Map<String, Object> multipart, Session session) throws ServletException, IOException {
- User user = session.getUser();
- String projectName = (String) multipart.get("project");
- Project project = projectManager.getProject(projectName);
-
- if (projectName == null || projectName.isEmpty()) {
- ret.put("error", "No project name found.");
- }
- else if (project == null) {
- ret.put("error", "Installation Failed. Project '" + projectName + "' doesn't exist.");
- }
- else if (!hasPermission(project,user,Type.WRITE)) {
- ret.put("error", "Installation Failed. User '" + user.getUserId() + "' does not have write access.");
- }
- else {
- ret.put("projectId", String.valueOf(project.getId()));
-
- FileItem item = (FileItem) multipart.get("file");
- String name = item.getName();
- String type = null;
-
- final String contentType = item.getContentType();
- if (contentType != null
- && (contentType.startsWith("application/zip")
- || contentType.startsWith("application/x-zip-compressed")
- || contentType.startsWith("application/octet-stream"))) {
- type = "zip";
- }
- else {
- item.delete();
- ret.put("error", "File type " + contentType + " unrecognized.");
-
- return;
- }
-
- File tempDir = Utils.createTempDir();
- OutputStream out = null;
- try {
- logger.info("Uploading file " + name);
- File archiveFile = new File(tempDir, name);
- out = new BufferedOutputStream(new FileOutputStream(archiveFile));
- IOUtils.copy(item.getInputStream(), out);
- out.close();
-
- projectManager.uploadProject(project, archiveFile, type, user);
- } catch (Exception e) {
- logger.info("Installation Failed.", e);
- String error = e.getMessage();
- if (error.length() > 512) {
- error = error.substring(0, 512) + "\nToo many errors to display.\n";
- }
- ret.put("error", "Installation Failed.\n" + error);
- }
- finally {
- if (tempDir.exists()) {
- FileUtils.deleteDirectory(tempDir);
- }
- if (out != null) {
- out.close();
- }
- }
-
- ret.put("version", String.valueOf(project.getVersion()));
- }
- }
-
- private void handleUpload(HttpServletRequest req, HttpServletResponse resp, Map<String, Object> multipart, Session session) throws ServletException, IOException {
- HashMap<String,String> ret = new HashMap<String,String>();
- String projectName = (String) multipart.get("project");
- ajaxHandleUpload(req, ret, multipart, session);
-
- if (ret.containsKey("error")) {
- setErrorMessageInCookie(resp, ret.get("error"));
- }
-
- resp.sendRedirect(req.getRequestURI() + "?project=" + projectName);
- }
-
- private static class NodeLevelComparator implements Comparator<Node> {
- @Override
- public int compare(Node node1, Node node2) {
- return node1.getLevel() - node2.getLevel();
- }
- }
-
- public class PageSelection {
- private String page;
- private int size;
- private boolean disabled;
- private boolean selected;
- private int nextPage;
-
- public PageSelection(String pageName, int size, boolean disabled, boolean selected, int nextPage) {
- this.page = pageName;
- this.size = size;
- this.disabled = disabled;
- this.setSelected(selected);
- this.nextPage = nextPage;
- }
-
- public String getPage() {
- return page;
- }
-
- public int getSize() {
- return size;
- }
-
- public boolean getDisabled() {
- return disabled;
- }
-
- public boolean isSelected() {
- return selected;
- }
-
- public int getNextPage() {
- return nextPage;
- }
-
- public void setSelected(boolean selected) {
- this.selected = selected;
- }
- }
-
- private Permission getPermissionObject(Project project, User user, Permission.Type type) {
- Permission perm = project.getCollectivePermission(user);
-
- for(String roleName: user.getRoles()) {
- Role role = userManager.getRole(roleName);
- perm.addPermissions(role.getPermission());
- }
-
- return perm;
- }
-
- private boolean hasPermissionToCreateProject(User user) {
- for(String roleName: user.getRoles()) {
- Role role = userManager.getRole(roleName);
- Permission perm = role.getPermission();
- if (perm.isPermissionSet(Permission.Type.ADMIN) || perm.isPermissionSet(Permission.Type.CREATEPROJECTS)) {
- return true;
- }
- }
-
- return false;
- }
+ } catch (AccessControlException e) {
+ page.add("errorMsg", e.getMessage());
+ }
+
+ page.render();
+ }
+
+ private void handleProjectPage(HttpServletRequest req,
+ HttpServletResponse resp, Session session) throws ServletException {
+ Page page =
+ newPage(req, resp, session,
+ "azkaban/webapp/servlet/velocity/projectpage.vm");
+ String projectName = getParam(req, "project");
+
+ User user = session.getUser();
+ Project project = null;
+ try {
+ project = projectManager.getProject(projectName);
+ if (project == null) {
+ page.add("errorMsg", "Project " + projectName + " not found.");
+ } else {
+ if (!hasPermission(project, user, Type.READ)) {
+ throw new AccessControlException("No permission to view project "
+ + projectName + ".");
+ }
+
+ page.add("project", project);
+ page.add("admins", Utils.flattenToString(
+ project.getUsersWithPermission(Type.ADMIN), ","));
+ Permission perm = this.getPermissionObject(project, user, Type.ADMIN);
+ page.add("userpermission", perm);
+
+ boolean adminPerm = perm.isPermissionSet(Type.ADMIN);
+ if (adminPerm) {
+ page.add("admin", true);
+ }
+ // Set this so we can display execute buttons only to those who have
+ // access.
+ if (perm.isPermissionSet(Type.EXECUTE) || adminPerm) {
+ page.add("exec", true);
+ } else {
+ page.add("exec", false);
+ }
+
+ List<Flow> flows = project.getFlows();
+ if (!flows.isEmpty()) {
+ Collections.sort(flows, FLOW_ID_COMPARATOR);
+ page.add("flows", flows);
+ }
+ }
+ } catch (AccessControlException e) {
+ page.add("errorMsg", e.getMessage());
+ }
+ page.render();
+ }
+
+ private void handleCreate(HttpServletRequest req, HttpServletResponse resp,
+ Session session) throws ServletException {
+ String projectName = hasParam(req, "name") ? getParam(req, "name") : null;
+ String projectDescription =
+ hasParam(req, "description") ? getParam(req, "description") : null;
+ logger.info("Create project " + projectName);
+
+ User user = session.getUser();
+
+ String status = null;
+ String action = null;
+ String message = null;
+ HashMap<String, Object> params = null;
+
+ if (lockdownCreateProjects && !hasPermissionToCreateProject(user)) {
+ message =
+ "User " + user.getUserId()
+ + " doesn't have permission to create projects.";
+ logger.info(message);
+ status = "error";
+ } else {
+ try {
+ projectManager.createProject(projectName, projectDescription, user);
+ status = "success";
+ action = "redirect";
+ String redirect = "manager?project=" + projectName;
+ params = new HashMap<String, Object>();
+ params.put("path", redirect);
+ } catch (ProjectManagerException e) {
+ message = e.getMessage();
+ status = "error";
+ }
+ }
+ String response = createJsonResponse(status, message, action, params);
+ try {
+ Writer write = resp.getWriter();
+ write.append(response);
+ write.flush();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+
+ private void ajaxHandleUpload(HttpServletRequest req,
+ Map<String, String> ret, Map<String, Object> multipart, Session session)
+ throws ServletException, IOException {
+ User user = session.getUser();
+ String projectName = (String) multipart.get("project");
+ Project project = projectManager.getProject(projectName);
+
+ if (projectName == null || projectName.isEmpty()) {
+ ret.put("error", "No project name found.");
+ } else if (project == null) {
+ ret.put("error", "Installation Failed. Project '" + projectName
+ + "' doesn't exist.");
+ } else if (!hasPermission(project, user, Type.WRITE)) {
+ ret.put("error", "Installation Failed. User '" + user.getUserId()
+ + "' does not have write access.");
+ } else {
+ ret.put("projectId", String.valueOf(project.getId()));
+
+ FileItem item = (FileItem) multipart.get("file");
+ String name = item.getName();
+ String type = null;
+
+ final String contentType = item.getContentType();
+ if (contentType != null
+ && (contentType.startsWith("application/zip")
+ || contentType.startsWith("application/x-zip-compressed") || contentType
+ .startsWith("application/octet-stream"))) {
+ type = "zip";
+ } else {
+ item.delete();
+ ret.put("error", "File type " + contentType + " unrecognized.");
+
+ return;
+ }
+
+ File tempDir = Utils.createTempDir();
+ OutputStream out = null;
+ try {
+ logger.info("Uploading file " + name);
+ File archiveFile = new File(tempDir, name);
+ out = new BufferedOutputStream(new FileOutputStream(archiveFile));
+ IOUtils.copy(item.getInputStream(), out);
+ out.close();
+
+ projectManager.uploadProject(project, archiveFile, type, user);
+ } catch (Exception e) {
+ logger.info("Installation Failed.", e);
+ String error = e.getMessage();
+ if (error.length() > 512) {
+ error = error.substring(0, 512) + "\nToo many errors to display.\n";
+ }
+ ret.put("error", "Installation Failed.\n" + error);
+ } finally {
+ if (tempDir.exists()) {
+ FileUtils.deleteDirectory(tempDir);
+ }
+ if (out != null) {
+ out.close();
+ }
+ }
+
+ ret.put("version", String.valueOf(project.getVersion()));
+ }
+ }
+
+ private void handleUpload(HttpServletRequest req, HttpServletResponse resp,
+ Map<String, Object> multipart, Session session) throws ServletException,
+ IOException {
+ HashMap<String, String> ret = new HashMap<String, String>();
+ String projectName = (String) multipart.get("project");
+ ajaxHandleUpload(req, ret, multipart, session);
+
+ if (ret.containsKey("error")) {
+ setErrorMessageInCookie(resp, ret.get("error"));
+ }
+
+ resp.sendRedirect(req.getRequestURI() + "?project=" + projectName);
+ }
+
+ private static class NodeLevelComparator implements Comparator<Node> {
+ @Override
+ public int compare(Node node1, Node node2) {
+ return node1.getLevel() - node2.getLevel();
+ }
+ }
+
+ public class PageSelection {
+ private String page;
+ private int size;
+ private boolean disabled;
+ private boolean selected;
+ private int nextPage;
+
+ public PageSelection(String pageName, int size, boolean disabled,
+ boolean selected, int nextPage) {
+ this.page = pageName;
+ this.size = size;
+ this.disabled = disabled;
+ this.setSelected(selected);
+ this.nextPage = nextPage;
+ }
+
+ public String getPage() {
+ return page;
+ }
+
+ public int getSize() {
+ return size;
+ }
+
+ public boolean getDisabled() {
+ return disabled;
+ }
+
+ public boolean isSelected() {
+ return selected;
+ }
+
+ public int getNextPage() {
+ return nextPage;
+ }
+
+ public void setSelected(boolean selected) {
+ this.selected = selected;
+ }
+ }
+
+ private Permission getPermissionObject(Project project, User user,
+ Permission.Type type) {
+ Permission perm = project.getCollectivePermission(user);
+
+ for (String roleName : user.getRoles()) {
+ Role role = userManager.getRole(roleName);
+ perm.addPermissions(role.getPermission());
+ }
+
+ return perm;
+ }
+
+ private boolean hasPermissionToCreateProject(User user) {
+ for (String roleName : user.getRoles()) {
+ Role role = userManager.getRole(roleName);
+ Permission perm = role.getPermission();
+ if (perm.isPermissionSet(Permission.Type.ADMIN)
+ || perm.isPermissionSet(Permission.Type.CREATEPROJECTS)) {
+ return true;
+ }
+ }
+
+ return false;
+ }
}
src/main/java/azkaban/webapp/servlet/ProjectServlet.java 211(+104 -107)
diff --git a/src/main/java/azkaban/webapp/servlet/ProjectServlet.java b/src/main/java/azkaban/webapp/servlet/ProjectServlet.java
index ba263d6..2f9e4dd 100644
--- a/src/main/java/azkaban/webapp/servlet/ProjectServlet.java
+++ b/src/main/java/azkaban/webapp/servlet/ProjectServlet.java
@@ -39,111 +39,108 @@ import azkaban.webapp.session.Session;
* The main page
*/
public class ProjectServlet extends LoginAbstractAzkabanServlet {
- private static final Logger logger =
- Logger.getLogger(ProjectServlet.class.getName());
- private static final String LOCKDOWN_CREATE_PROJECTS_KEY =
- "lockdown.create.projects";
- private static final long serialVersionUID = -1;
-
- private UserManager userManager;
-
- private boolean lockdownCreateProjects = false;
-
- @Override
- public void init(ServletConfig config) throws ServletException {
- super.init(config);
- AzkabanWebServer server = (AzkabanWebServer)getApplication();
-
- userManager = server.getUserManager();
- lockdownCreateProjects = server.getServerProps().getBoolean(
- LOCKDOWN_CREATE_PROJECTS_KEY, false);
- if (lockdownCreateProjects) {
- logger.info("Creation of projects is locked down");
- }
- }
-
- @Override
- protected void handleGet(HttpServletRequest req, HttpServletResponse resp,
- Session session) throws ServletException, IOException {
- if (hasParam(req, "doaction")) {
- if (getParam(req, "doaction").equals("search")) {
- String searchTerm = getParam(req, "searchterm");
- if (!searchTerm.equals("") && !searchTerm.equals(".*")) {
- handleFilter(req, resp, session, searchTerm);
- return;
- }
- }
- }
-
- User user = session.getUser();
-
- ProjectManager manager =
- ((AzkabanWebServer)getApplication()).getProjectManager();
- Page page = newPage(
- req, resp, session, "azkaban/webapp/servlet/velocity/index.vm");
-
- if (lockdownCreateProjects && !hasPermissionToCreateProject(user)) {
- page.add("hideCreateProject", true);
- }
-
- if (hasParam(req, "all")) {
- List<Project> projects = manager.getProjects();
- page.add("viewProjects", "all");
- page.add("projects", projects);
- }
- else if (hasParam(req, "group")) {
- List<Project> projects = manager.getGroupProjects(user);
- page.add("viewProjects", "group");
- page.add("projects", projects);
- }
- else {
- List<Project> projects = manager.getUserProjects(user);
- page.add("viewProjects", "personal");
- page.add("projects", projects);
- }
-
- page.render();
- }
-
- private void handleFilter(HttpServletRequest req, HttpServletResponse resp,
- Session session, String searchTerm) {
- User user = session.getUser();
- ProjectManager manager =
- ((AzkabanWebServer)getApplication()).getProjectManager();
- Page page = newPage(
- req, resp, session, "azkaban/webapp/servlet/velocity/index.vm");
- if (hasParam(req, "all")) {
- //do nothing special if one asks for 'ALL' projects
- List<Project> projects = manager.getProjectsByRegex(searchTerm);
- page.add("allProjects", "");
- page.add("projects", projects);
- page.add("search_term", searchTerm);
- }
- else {
- List<Project> projects = manager.getUserProjectsByRegex(user, searchTerm);
- page.add("projects", projects);
- page.add("search_term", searchTerm);
- }
-
- page.render();
- }
-
- @Override
- protected void handlePost(HttpServletRequest req, HttpServletResponse resp,
- Session session) throws ServletException, IOException {
- // TODO Auto-generated method stub
- }
-
- private boolean hasPermissionToCreateProject(User user) {
- for (String roleName: user.getRoles()) {
- Role role = userManager.getRole(roleName);
- Permission perm = role.getPermission();
- if (perm.isPermissionSet(Permission.Type.ADMIN) ||
- perm.isPermissionSet(Permission.Type.CREATEPROJECTS)) {
- return true;
- }
- }
-
- return false;
- }
+ private static final Logger logger = Logger.getLogger(ProjectServlet.class
+ .getName());
+ private static final String LOCKDOWN_CREATE_PROJECTS_KEY =
+ "lockdown.create.projects";
+ private static final long serialVersionUID = -1;
+
+ private UserManager userManager;
+
+ private boolean lockdownCreateProjects = false;
+
+ @Override
+ public void init(ServletConfig config) throws ServletException {
+ super.init(config);
+ AzkabanWebServer server = (AzkabanWebServer) getApplication();
+
+ userManager = server.getUserManager();
+ lockdownCreateProjects =
+ server.getServerProps().getBoolean(LOCKDOWN_CREATE_PROJECTS_KEY, false);
+ if (lockdownCreateProjects) {
+ logger.info("Creation of projects is locked down");
+ }
+ }
+
+ @Override
+ protected void handleGet(HttpServletRequest req, HttpServletResponse resp,
+ Session session) throws ServletException, IOException {
+ if (hasParam(req, "doaction")) {
+ if (getParam(req, "doaction").equals("search")) {
+ String searchTerm = getParam(req, "searchterm");
+ if (!searchTerm.equals("") && !searchTerm.equals(".*")) {
+ handleFilter(req, resp, session, searchTerm);
+ return;
+ }
+ }
+ }
+
+ User user = session.getUser();
+
+ ProjectManager manager =
+ ((AzkabanWebServer) getApplication()).getProjectManager();
+ Page page =
+ newPage(req, resp, session, "azkaban/webapp/servlet/velocity/index.vm");
+
+ if (lockdownCreateProjects && !hasPermissionToCreateProject(user)) {
+ page.add("hideCreateProject", true);
+ }
+
+ if (hasParam(req, "all")) {
+ List<Project> projects = manager.getProjects();
+ page.add("viewProjects", "all");
+ page.add("projects", projects);
+ } else if (hasParam(req, "group")) {
+ List<Project> projects = manager.getGroupProjects(user);
+ page.add("viewProjects", "group");
+ page.add("projects", projects);
+ } else {
+ List<Project> projects = manager.getUserProjects(user);
+ page.add("viewProjects", "personal");
+ page.add("projects", projects);
+ }
+
+ page.render();
+ }
+
+ private void handleFilter(HttpServletRequest req, HttpServletResponse resp,
+ Session session, String searchTerm) {
+ User user = session.getUser();
+ ProjectManager manager =
+ ((AzkabanWebServer) getApplication()).getProjectManager();
+ Page page =
+ newPage(req, resp, session, "azkaban/webapp/servlet/velocity/index.vm");
+ if (hasParam(req, "all")) {
+ // do nothing special if one asks for 'ALL' projects
+ List<Project> projects = manager.getProjectsByRegex(searchTerm);
+ page.add("allProjects", "");
+ page.add("projects", projects);
+ page.add("search_term", searchTerm);
+ } else {
+ List<Project> projects = manager.getUserProjectsByRegex(user, searchTerm);
+ page.add("projects", projects);
+ page.add("search_term", searchTerm);
+ }
+
+ page.render();
+ }
+
+ @Override
+ protected void handlePost(HttpServletRequest req, HttpServletResponse resp,
+ Session session) throws ServletException, IOException {
+ // TODO Auto-generated method stub
+ }
+
+ private boolean hasPermissionToCreateProject(User user) {
+ for (String roleName : user.getRoles()) {
+ Role role = userManager.getRole(roleName);
+ Permission perm = role.getPermission();
+ if (perm.isPermissionSet(Permission.Type.ADMIN)
+ || perm.isPermissionSet(Permission.Type.CREATEPROJECTS)) {
+ return true;
+ }
+ }
+
+ return false;
+ }
}
src/main/java/azkaban/webapp/servlet/ScheduleServlet.java 1265(+653 -612)
diff --git a/src/main/java/azkaban/webapp/servlet/ScheduleServlet.java b/src/main/java/azkaban/webapp/servlet/ScheduleServlet.java
index 96f9f9b..3d50446 100644
--- a/src/main/java/azkaban/webapp/servlet/ScheduleServlet.java
+++ b/src/main/java/azkaban/webapp/servlet/ScheduleServlet.java
@@ -68,621 +68,662 @@ import azkaban.webapp.AzkabanWebServer;
import azkaban.webapp.session.Session;
public class ScheduleServlet extends LoginAbstractAzkabanServlet {
- private static final long serialVersionUID = 1L;
- private static final Logger logger = Logger.getLogger(ScheduleServlet.class);
- private ProjectManager projectManager;
- private ScheduleManager scheduleManager;
-
- @Override
- public void init(ServletConfig config) throws ServletException {
- super.init(config);
- AzkabanWebServer server = (AzkabanWebServer)getApplication();
- projectManager = server.getProjectManager();
- scheduleManager = server.getScheduleManager();
- }
-
- @Override
- protected void handleGet(HttpServletRequest req, HttpServletResponse resp,
- Session session) throws ServletException, IOException {
- if (hasParam(req, "ajax")) {
- handleAJAXAction(req, resp, session);
- }
- else if (hasParam(req, "calendar")) {
- handleGetScheduleCalendar(req, resp, session);
- }
- else {
- handleGetAllSchedules(req, resp, session);
- }
- }
-
- private void handleAJAXAction(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException, IOException {
- HashMap<String, Object> ret = new HashMap<String, Object>();
- String ajaxName = getParam(req, "ajax");
-
- if (ajaxName.equals("slaInfo")) {
- ajaxSlaInfo(req, ret, session.getUser());
- }
- else if (ajaxName.equals("setSla")) {
- ajaxSetSla(req, ret, session.getUser());
- } else if(ajaxName.equals("loadFlow")) {
- ajaxLoadFlows(req, ret, session.getUser());
- }
- else if (ajaxName.equals("loadHistory")) {
- ajaxLoadHistory(req, resp, session.getUser());
- ret = null;
- }
- else if (ajaxName.equals("scheduleFlow")) {
- ajaxScheduleFlow(req, ret, session.getUser());
- }
- else if (ajaxName.equals("fetchSchedule")) {
+ private static final long serialVersionUID = 1L;
+ private static final Logger logger = Logger.getLogger(ScheduleServlet.class);
+ private ProjectManager projectManager;
+ private ScheduleManager scheduleManager;
+
+ @Override
+ public void init(ServletConfig config) throws ServletException {
+ super.init(config);
+ AzkabanWebServer server = (AzkabanWebServer) getApplication();
+ projectManager = server.getProjectManager();
+ scheduleManager = server.getScheduleManager();
+ }
+
+ @Override
+ protected void handleGet(HttpServletRequest req, HttpServletResponse resp,
+ Session session) throws ServletException, IOException {
+ if (hasParam(req, "ajax")) {
+ handleAJAXAction(req, resp, session);
+ } else if (hasParam(req, "calendar")) {
+ handleGetScheduleCalendar(req, resp, session);
+ } else {
+ handleGetAllSchedules(req, resp, session);
+ }
+ }
+
+ private void handleAJAXAction(HttpServletRequest req,
+ HttpServletResponse resp, Session session) throws ServletException,
+ IOException {
+ HashMap<String, Object> ret = new HashMap<String, Object>();
+ String ajaxName = getParam(req, "ajax");
+
+ if (ajaxName.equals("slaInfo")) {
+ ajaxSlaInfo(req, ret, session.getUser());
+ } else if (ajaxName.equals("setSla")) {
+ ajaxSetSla(req, ret, session.getUser());
+ } else if (ajaxName.equals("loadFlow")) {
+ ajaxLoadFlows(req, ret, session.getUser());
+ } else if (ajaxName.equals("loadHistory")) {
+ ajaxLoadHistory(req, resp, session.getUser());
+ ret = null;
+ } else if (ajaxName.equals("scheduleFlow")) {
+ ajaxScheduleFlow(req, ret, session.getUser());
+ } else if (ajaxName.equals("fetchSchedule")) {
ajaxFetchSchedule(req, ret, session.getUser());
}
- if (ret != null) {
- this.writeJSON(resp, ret);
- }
- }
-
- private void ajaxSetSla(HttpServletRequest req, HashMap<String, Object> ret, User user) {
- try {
- int scheduleId = getIntParam(req, "scheduleId");
- Schedule sched = scheduleManager.getSchedule(scheduleId);
-
- Project project = projectManager.getProject(sched.getProjectId());
- if(!hasPermission(project, user, Permission.Type.SCHEDULE)) {
- ret.put("error", "User " + user + " does not have permission to set SLA for this flow.");
- return;
- }
-
- String emailStr = getParam(req, "slaEmails");
- String[] emailSplit = emailStr.split("\\s*,\\s*|\\s*;\\s*|\\s+");
- List<String> slaEmails = Arrays.asList(emailSplit);
-
- Map<String, String> settings = getParamGroup(req, "settings");
-
- List<SlaOption> slaOptions = new ArrayList<SlaOption>();
- for(String set : settings.keySet()) {
- SlaOption sla;
- try {
- sla = parseSlaSetting(settings.get(set));
- }
- catch (Exception e) {
- throw new ServletException(e);
- }
- if(sla != null) {
- sla.getInfo().put(SlaOption.INFO_FLOW_NAME, sched.getFlowName());
- sla.getInfo().put(SlaOption.INFO_EMAIL_LIST, slaEmails);
- slaOptions.add(sla);
- }
- }
-
- sched.setSlaOptions(slaOptions);
- scheduleManager.insertSchedule(sched);
-
- if(slaOptions != null) {
- projectManager.postProjectEvent(project, EventType.SLA, user.getUserId(), "SLA for flow " + sched.getFlowName() + " has been added/changed.");
- }
-
- } catch (ServletException e) {
- ret.put("error", e.getMessage());
- } catch (ScheduleManagerException e) {
- ret.put("error", e.getMessage());
- }
-
- }
-
- private SlaOption parseSlaSetting(String set) throws ScheduleManagerException {
- // "" + Duration + EmailAction + KillAction
- logger.info("Tryint to set sla with the following set: " + set);
-
- String slaType;
- List<String> slaActions = new ArrayList<String>();
- Map<String, Object> slaInfo = new HashMap<String, Object>();
- String[] parts = set.split(",", -1);
- String id = parts[0];
- String rule = parts[1];
- String duration = parts[2];
- String emailAction = parts[3];
- String killAction = parts[4];
- if(emailAction.equals("true") || killAction.equals("true")) {
- //String type = id.equals("") ? SlaOption.RULE_FLOW_RUNTIME_SLA : SlaOption.RULE_JOB_RUNTIME_SLA ;
- if(emailAction.equals("true")) {
- slaActions.add(SlaOption.ACTION_ALERT);
- slaInfo.put(SlaOption.ALERT_TYPE, "email");
- }
- if(killAction.equals("true")) {
- slaActions.add(SlaOption.ACTION_CANCEL_FLOW);
- }
- if(id.equals("")) {
- if(rule.equals("SUCCESS")) {
- slaType = SlaOption.TYPE_FLOW_SUCCEED;
- }
- else {
- slaType = SlaOption.TYPE_FLOW_FINISH;
- }
- } else {
- slaInfo.put(SlaOption.INFO_JOB_NAME, id);
- if(rule.equals("SUCCESS")) {
- slaType = SlaOption.TYPE_JOB_SUCCEED;
- } else {
- slaType = SlaOption.TYPE_JOB_FINISH;
- }
- }
-
- ReadablePeriod dur;
- try {
- dur = parseDuration(duration);
- }
- catch (Exception e) {
- throw new ScheduleManagerException("Unable to parse duration for a SLA that needs to take actions!", e);
- }
-
- slaInfo.put(SlaOption.INFO_DURATION, Utils.createPeriodString(dur));
- SlaOption r = new SlaOption(slaType, slaActions, slaInfo);
- logger.info("Parsing sla as id:" + id + " type:" + slaType + " rule:" + rule + " Duration:" + duration + " actions:" + slaActions);
- return r;
- }
- return null;
- }
-
- private ReadablePeriod parseDuration(String duration) {
- int hour = Integer.parseInt(duration.split(":")[0]);
- int min = Integer.parseInt(duration.split(":")[1]);
- return Minutes.minutes(min+hour*60).toPeriod();
- }
-
- private void ajaxFetchSchedule(HttpServletRequest req,
+ if (ret != null) {
+ this.writeJSON(resp, ret);
+ }
+ }
+
+ private void ajaxSetSla(HttpServletRequest req, HashMap<String, Object> ret,
+ User user) {
+ try {
+ int scheduleId = getIntParam(req, "scheduleId");
+ Schedule sched = scheduleManager.getSchedule(scheduleId);
+
+ Project project = projectManager.getProject(sched.getProjectId());
+ if (!hasPermission(project, user, Permission.Type.SCHEDULE)) {
+ ret.put("error", "User " + user
+ + " does not have permission to set SLA for this flow.");
+ return;
+ }
+
+ String emailStr = getParam(req, "slaEmails");
+ String[] emailSplit = emailStr.split("\\s*,\\s*|\\s*;\\s*|\\s+");
+ List<String> slaEmails = Arrays.asList(emailSplit);
+
+ Map<String, String> settings = getParamGroup(req, "settings");
+
+ List<SlaOption> slaOptions = new ArrayList<SlaOption>();
+ for (String set : settings.keySet()) {
+ SlaOption sla;
+ try {
+ sla = parseSlaSetting(settings.get(set));
+ } catch (Exception e) {
+ throw new ServletException(e);
+ }
+ if (sla != null) {
+ sla.getInfo().put(SlaOption.INFO_FLOW_NAME, sched.getFlowName());
+ sla.getInfo().put(SlaOption.INFO_EMAIL_LIST, slaEmails);
+ slaOptions.add(sla);
+ }
+ }
+
+ sched.setSlaOptions(slaOptions);
+ scheduleManager.insertSchedule(sched);
+
+ if (slaOptions != null) {
+ projectManager.postProjectEvent(project, EventType.SLA,
+ user.getUserId(), "SLA for flow " + sched.getFlowName()
+ + " has been added/changed.");
+ }
+
+ } catch (ServletException e) {
+ ret.put("error", e.getMessage());
+ } catch (ScheduleManagerException e) {
+ ret.put("error", e.getMessage());
+ }
+
+ }
+
+ private SlaOption parseSlaSetting(String set) throws ScheduleManagerException {
+ // "" + Duration + EmailAction + KillAction
+ logger.info("Tryint to set sla with the following set: " + set);
+
+ String slaType;
+ List<String> slaActions = new ArrayList<String>();
+ Map<String, Object> slaInfo = new HashMap<String, Object>();
+ String[] parts = set.split(",", -1);
+ String id = parts[0];
+ String rule = parts[1];
+ String duration = parts[2];
+ String emailAction = parts[3];
+ String killAction = parts[4];
+ if (emailAction.equals("true") || killAction.equals("true")) {
+ // String type = id.equals("") ? SlaOption.RULE_FLOW_RUNTIME_SLA :
+ // SlaOption.RULE_JOB_RUNTIME_SLA ;
+ if (emailAction.equals("true")) {
+ slaActions.add(SlaOption.ACTION_ALERT);
+ slaInfo.put(SlaOption.ALERT_TYPE, "email");
+ }
+ if (killAction.equals("true")) {
+ slaActions.add(SlaOption.ACTION_CANCEL_FLOW);
+ }
+ if (id.equals("")) {
+ if (rule.equals("SUCCESS")) {
+ slaType = SlaOption.TYPE_FLOW_SUCCEED;
+ } else {
+ slaType = SlaOption.TYPE_FLOW_FINISH;
+ }
+ } else {
+ slaInfo.put(SlaOption.INFO_JOB_NAME, id);
+ if (rule.equals("SUCCESS")) {
+ slaType = SlaOption.TYPE_JOB_SUCCEED;
+ } else {
+ slaType = SlaOption.TYPE_JOB_FINISH;
+ }
+ }
+
+ ReadablePeriod dur;
+ try {
+ dur = parseDuration(duration);
+ } catch (Exception e) {
+ throw new ScheduleManagerException(
+ "Unable to parse duration for a SLA that needs to take actions!", e);
+ }
+
+ slaInfo.put(SlaOption.INFO_DURATION, Utils.createPeriodString(dur));
+ SlaOption r = new SlaOption(slaType, slaActions, slaInfo);
+ logger.info("Parsing sla as id:" + id + " type:" + slaType + " rule:"
+ + rule + " Duration:" + duration + " actions:" + slaActions);
+ return r;
+ }
+ return null;
+ }
+
+ private ReadablePeriod parseDuration(String duration) {
+ int hour = Integer.parseInt(duration.split(":")[0]);
+ int min = Integer.parseInt(duration.split(":")[1]);
+ return Minutes.minutes(min + hour * 60).toPeriod();
+ }
+
+ private void ajaxFetchSchedule(HttpServletRequest req,
HashMap<String, Object> ret, User user) throws ServletException {
-
- int projectId = getIntParam(req, "projectId");
- String flowId = getParam(req, "flowId");
- try {
- Schedule schedule = scheduleManager.getSchedule(
- projectId, flowId);
-
- if (schedule != null) {
- Map<String, String> jsonObj = new HashMap<String, String>();
- jsonObj.put("scheduleId", Integer.toString(schedule.getScheduleId()));
- jsonObj.put("submitUser", schedule.getSubmitUser());
- jsonObj.put("firstSchedTime",
- utils.formatDateTime(schedule.getFirstSchedTime()));
- jsonObj.put("nextExecTime",
- utils.formatDateTime(schedule.getNextExecTime()));
- jsonObj.put("period", utils.formatPeriod(schedule.getPeriod()));
- ret.put("schedule", jsonObj);
- }
- }
- catch (ScheduleManagerException e) {
+
+ int projectId = getIntParam(req, "projectId");
+ String flowId = getParam(req, "flowId");
+ try {
+ Schedule schedule = scheduleManager.getSchedule(projectId, flowId);
+
+ if (schedule != null) {
+ Map<String, String> jsonObj = new HashMap<String, String>();
+ jsonObj.put("scheduleId", Integer.toString(schedule.getScheduleId()));
+ jsonObj.put("submitUser", schedule.getSubmitUser());
+ jsonObj.put("firstSchedTime",
+ utils.formatDateTime(schedule.getFirstSchedTime()));
+ jsonObj.put("nextExecTime",
+ utils.formatDateTime(schedule.getNextExecTime()));
+ jsonObj.put("period", utils.formatPeriod(schedule.getPeriod()));
+ ret.put("schedule", jsonObj);
+ }
+ } catch (ScheduleManagerException e) {
ret.put("error", e);
- }
- }
-
- private void ajaxSlaInfo(HttpServletRequest req, HashMap<String, Object> ret, User user) {
- int scheduleId;
- try {
- scheduleId = getIntParam(req, "scheduleId");
- Schedule sched = scheduleManager.getSchedule(scheduleId);
- Project project = getProjectAjaxByPermission(ret, sched.getProjectId(), user, Type.READ);
- if (project == null) {
- ret.put("error", "Error loading project. Project " + sched.getProjectId() + " doesn't exist");
- return;
- }
-
- Flow flow = project.getFlow(sched.getFlowName());
- if (flow == null) {
- ret.put("error", "Error loading flow. Flow " + sched.getFlowName() + " doesn't exist in " + sched.getProjectId());
- return;
- }
-
- List<SlaOption> slaOptions = sched.getSlaOptions();
- ExecutionOptions flowOptions = sched.getExecutionOptions();
-
- if(slaOptions != null && slaOptions.size() > 0) {
- ret.put("slaEmails", slaOptions.get(0).getInfo().get(SlaOption.INFO_EMAIL_LIST));
-
- List<Object> setObj = new ArrayList<Object>();
- for(SlaOption sla: slaOptions) {
- setObj.add(sla.toWebObject());
- }
- ret.put("settings", setObj);
- }
- else if (flowOptions != null) {
- if(flowOptions.getFailureEmails() != null) {
- List<String> emails = flowOptions.getFailureEmails();
- if(emails.size() > 0) {
- ret.put("slaEmails", emails);
- }
- }
- }
- else {
- if(flow.getFailureEmails() != null) {
- List<String> emails = flow.getFailureEmails();
- if(emails.size() > 0) {
- ret.put("slaEmails", emails);
- }
- }
- }
-
- List<String> allJobs = new ArrayList<String>();
- for(Node n : flow.getNodes()) {
- allJobs.add(n.getId());
- }
-
- ret.put("allJobNames", allJobs);
- } catch (ServletException e) {
- ret.put("error", e);
- } catch (ScheduleManagerException e) {
- ret.put("error", e);
- }
- }
-
- protected Project getProjectAjaxByPermission(Map<String, Object> ret, int projectId, User user, Permission.Type type) {
- Project project = projectManager.getProject(projectId);
-
- if (project == null) {
- ret.put("error", "Project '" + project + "' not found.");
- }
- else if (!hasPermission(project, user, type)) {
- ret.put("error", "User '" + user.getUserId() + "' doesn't have " + type.name() + " permissions on " + project.getName());
- }
- else {
- return project;
- }
-
- return null;
- }
-
- private void handleGetAllSchedules(HttpServletRequest req, HttpServletResponse resp,
- Session session) throws ServletException, IOException{
-
- Page page = newPage(req, resp, session, "azkaban/webapp/servlet/velocity/scheduledflowpage.vm");
-
- List<Schedule> schedules;
- try {
- schedules = scheduleManager.getSchedules();
- } catch (ScheduleManagerException e) {
- // TODO Auto-generated catch block
- throw new ServletException(e);
- }
- page.add("schedules", schedules);
-//
-// List<SLA> slas = slaManager.getSLAs();
-// page.add("slas", slas);
-
- page.render();
- }
-
- private void handleGetScheduleCalendar(HttpServletRequest req, HttpServletResponse resp,
- Session session) throws ServletException, IOException{
-
- Page page = newPage(req, resp, session, "azkaban/webapp/servlet/velocity/scheduledflowcalendarpage.vm");
-
- List<Schedule> schedules;
- try {
- schedules = scheduleManager.getSchedules();
- } catch (ScheduleManagerException e) {
- // TODO Auto-generated catch block
- throw new ServletException(e);
- }
- page.add("schedules", schedules);
-//
-// List<SLA> slas = slaManager.getSLAs();
-// page.add("slas", slas);
-
- page.render();
- }
-
- @Override
- protected void handlePost(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException, IOException {
- if (hasParam(req, "ajax")) {
- handleAJAXAction(req, resp, session);
- }
- else {
- HashMap<String, Object> ret = new HashMap<String, Object>();
- if (hasParam(req, "action")) {
- String action = getParam(req, "action");
- if (action.equals("scheduleFlow")) {
- ajaxScheduleFlow(req, ret, session.getUser());
- }
- else if (action.equals("removeSched")){
- ajaxRemoveSched(req, ret, session.getUser());
- }
- }
-
- if(ret.get("status") == ("success"))
- setSuccessMessageInCookie(resp, (String) ret.get("message"));
- else
- setErrorMessageInCookie(resp, (String) ret.get("message"));
-
- this.writeJSON(resp, ret);
- }
- }
-
- private void ajaxLoadFlows(HttpServletRequest req, HashMap<String, Object> ret, User user) throws ServletException {
- List<Schedule> schedules;
- try {
- schedules = scheduleManager.getSchedules();
- } catch (ScheduleManagerException e) {
- // TODO Auto-generated catch block
- throw new ServletException(e);
- }
- // See if anything is scheduled
- if (schedules.size() <= 0)
- return;
-
- List<HashMap<String, Object>> output = new ArrayList<HashMap<String, Object>>();
- ret.put("items", output);
-
- for (Schedule schedule : schedules) {
- try {
- writeScheduleData(output, schedule);
- } catch (ScheduleManagerException e) {
- // TODO Auto-generated catch block
- throw new ServletException(e);
- }
- }
- }
-
- private void writeScheduleData(List<HashMap<String, Object>> output, Schedule schedule) throws ScheduleManagerException {
- Map<String, Object> stats = ScheduleStatisticManager.getStatistics(schedule.getScheduleId(), (AzkabanWebServer) getApplication());
- HashMap<String, Object> data = new HashMap<String, Object>();
- data.put("scheduleid", schedule.getScheduleId());
- data.put("flowname", schedule.getFlowName());
- data.put("projectname", schedule.getProjectName());
- data.put("time", schedule.getFirstSchedTime());
-
- DateTime time = DateTime.now();
- long period = 0;
- if (schedule.getPeriod() != null) {
- period = time.plus(schedule.getPeriod()).getMillis() - time.getMillis();
- }
- data.put("period", period);
- int length = 3600 * 1000;
- if (stats.get("average") != null && stats.get("average") instanceof Integer) {
- length = (int) (Integer) stats.get("average");
- if (length == 0) {
- length = 3600 * 1000;
- }
- }
- data.put("length", length);
- data.put("history", false);
- data.put("stats", stats);
- output.add(data);
- }
-
- private void ajaxLoadHistory(HttpServletRequest req, HttpServletResponse resp, User user) throws ServletException, IOException {
- resp.setContentType(JSON_MIME_TYPE);
- long today = DateTime.now().withTime(0, 0, 0, 0).getMillis();
- long startTime = getLongParam(req, "startTime");
- DateTime start = new DateTime(startTime);
- // Ensure start time is 12:00 AM
- startTime = start.withTime(0, 0, 0, 0).getMillis();
- boolean useCache = false;
- if (startTime < today) {
- useCache = true;
- }
- long endTime = startTime + 24 * 3600 * 1000;
- // long endTime = getLongParam(req, "endTime");
- int loadAll = getIntParam(req, "loadAll");
-
- // Cache file
- String cacheDir = getApplication().getServerProps().getString("cache.directory", "cache");
- File cacheDirFile = new File(cacheDir, "schedule-history");
- File cache = new File(cacheDirFile, startTime + ".cache");
- cache.getParentFile().mkdirs();
-
- if (useCache) {
- // Determine if cache exists
- boolean cacheExists = false;
- synchronized (this) {
- cacheExists = cache.exists() && cache.isFile();
- }
- if (cacheExists) {
- // Send the cache instead
- InputStream cacheInput = new BufferedInputStream(new FileInputStream(cache));
- try {
- IOUtils.copy(cacheInput, resp.getOutputStream());
- // System.out.println("Using cache copy for " + start);
- return;
- } finally {
- IOUtils.closeQuietly(cacheInput);
- }
- }
- }
-
- // Load data if not cached
- List<ExecutableFlow> history = null;
- try {
- AzkabanWebServer server = (AzkabanWebServer) getApplication();
- ExecutorManagerAdapter executorManager = server.getExecutorManager();
- history = executorManager.getExecutableFlows(null, null, null, 0, startTime, endTime, -1, -1);
- } catch (ExecutorManagerException e) {
- logger.error(e);
- }
-
- HashMap<String, Object> ret = new HashMap<String, Object>();
- List<HashMap<String, Object>> output = new ArrayList<HashMap<String, Object>>();
- ret.put("items", output);
- for (ExecutableFlow historyItem : history) {
- // Check if it is an scheduled execution
- if (historyItem.getScheduleId() >= 0 || loadAll != 0) {
- writeHistoryData(output, historyItem);
- }
- }
-
- // Make sure we're ready to cache it, otherwise output and return
- synchronized (this) {
- if (!useCache || cache.exists()) {
- JSONUtils.toJSON(ret, resp.getOutputStream(), false);
- return;
- }
- }
-
- //Create cache file
- File cacheTemp = new File(cacheDirFile, startTime + ".tmp");
- cacheTemp.createNewFile();
- OutputStream cacheOutput = new BufferedOutputStream(new FileOutputStream(cacheTemp));
- try {
- OutputStream outputStream = new SplitterOutputStream(cacheOutput, resp.getOutputStream());
- // Write to both the cache file and web output
- JSONUtils.toJSON(ret, outputStream, false);
- } finally {
- IOUtils.closeQuietly(cacheOutput);
- }
- //Move cache file
- synchronized (this) {
- cacheTemp.renameTo(cache);
- }
- }
-
- private void writeHistoryData(List<HashMap<String, Object>> output, ExecutableFlow history) {
- HashMap<String, Object> data = new HashMap<String, Object>();
-
- data.put("scheduleid", history.getScheduleId());
- Project project = projectManager.getProject(history.getProjectId());
- data.put("flowname", history.getFlowId());
- data.put("projectname", project.getName());
- data.put("time", history.getStartTime());
- data.put("period", "0");
- long endTime = history.getEndTime();
- if(endTime == -1){
- endTime = System.currentTimeMillis();
- }
- data.put("length", endTime - history.getStartTime());
- data.put("history", true);
- data.put("status", history.getStatus().getNumVal());
-
- output.add(data);
- }
-
- private void ajaxRemoveSched(HttpServletRequest req, Map<String, Object> ret, User user) throws ServletException{
- int scheduleId = getIntParam(req, "scheduleId");
- Schedule sched;
- try {
- sched = scheduleManager.getSchedule(scheduleId);
- } catch (ScheduleManagerException e) {
- // TODO Auto-generated catch block
- throw new ServletException(e);
- }
- if(sched == null) {
- ret.put("message", "Schedule with ID " + scheduleId + " does not exist");
- ret.put("status", "error");
- return;
- }
-
- Project project = projectManager.getProject(sched.getProjectId());
-
- if (project == null) {
- ret.put("message", "Project " + sched.getProjectId() + " does not exist");
- ret.put("status", "error");
- return;
- }
-
- if(!hasPermission(project, user, Type.SCHEDULE)) {
- ret.put("status", "error");
- ret.put("message", "Permission denied. Cannot remove schedule with id " + scheduleId);
- return;
- }
-
- scheduleManager.removeSchedule(sched);
- logger.info("User '" + user.getUserId() + " has removed schedule " + sched.getScheduleName());
- projectManager.postProjectEvent(project, EventType.SCHEDULE, user.getUserId(), "Schedule " + sched.toString() + " has been removed.");
-
- ret.put("status", "success");
- ret.put("message", "flow " + sched.getFlowName() + " removed from Schedules.");
- return;
- }
-
- private void ajaxScheduleFlow(HttpServletRequest req, HashMap<String, Object> ret, User user) throws ServletException {
- String projectName = getParam(req, "projectName");
- String flowName = getParam(req, "flow");
- int projectId = getIntParam(req, "projectId");
-
- Project project = projectManager.getProject(projectId);
-
- if (project == null) {
- ret.put("message", "Project " + projectName + " does not exist");
- ret.put("status", "error");
- return;
- }
-
- if (!hasPermission(project, user, Type.SCHEDULE)) {
- ret.put("status", "error");
- ret.put("message", "Permission denied. Cannot execute " + flowName);
- return;
- }
-
- Flow flow = project.getFlow(flowName);
- if (flow == null) {
- ret.put("status", "error");
- ret.put("message", "Flow " + flowName + " cannot be found in project " + project);
- return;
- }
-
- String scheduleTime = getParam(req, "scheduleTime");
- String scheduleDate = getParam(req, "scheduleDate");
- DateTime firstSchedTime;
- try {
- firstSchedTime = parseDateTime(scheduleDate, scheduleTime);
- }
- catch (Exception e) {
- ret.put("error", "Invalid date and/or time '" + scheduleDate + " " + scheduleTime);
- return;
- }
-
- ReadablePeriod thePeriod = null;
- try {
- if(hasParam(req, "is_recurring") && getParam(req, "is_recurring").equals("on")) {
- thePeriod = Schedule.parsePeriodString(getParam(req, "period"));
- }
- }
- catch(Exception e){
- ret.put("error", e.getMessage());
- }
-
- // Schedule sched = scheduleManager.getSchedule(projectId, flowName);
- ExecutionOptions flowOptions = null;
- try {
- flowOptions = HttpRequestUtils.parseFlowOptions(req);
- }
- catch (Exception e) {
- ret.put("error", e.getMessage());
- }
-
- List<SlaOption> slaOptions = null;
-
- Schedule schedule = scheduleManager.scheduleFlow(-1, projectId, projectName, flowName, "ready", firstSchedTime.getMillis(), firstSchedTime.getZone(), thePeriod, DateTime.now().getMillis(), firstSchedTime.getMillis(), firstSchedTime.getMillis(), user.getUserId(), flowOptions, slaOptions);
- logger.info("User '" + user.getUserId() + "' has scheduled " + "[" + projectName + flowName + " (" + projectId +")" + "].");
- projectManager.postProjectEvent(project, EventType.SCHEDULE, user.getUserId(), "Schedule " + schedule.toString() + " has been added.");
-
- ret.put("status", "success");
- ret.put("message", projectName + "." + flowName + " scheduled.");
- }
-
- private DateTime parseDateTime(String scheduleDate, String scheduleTime) {
- // scheduleTime: 12,00,pm,PDT
- String[] parts = scheduleTime.split(",", -1);
- int hour = Integer.parseInt(parts[0]);
- int minutes = Integer.parseInt(parts[1]);
- boolean isPm = parts[2].equalsIgnoreCase("pm");
-
- DateTimeZone timezone = parts[3].equals("UTC") ? DateTimeZone.UTC : DateTimeZone.getDefault();
-
- // scheduleDate: 02/10/2013
- DateTime day = null;
- if(scheduleDate == null || scheduleDate.trim().length() == 0) {
- day = new LocalDateTime().toDateTime();
- } else {
- day = DateTimeFormat.forPattern("MM/dd/yyyy").withZone(timezone).parseDateTime(scheduleDate);
- }
-
- hour %= 12;
-
- if(isPm)
- hour += 12;
-
- DateTime firstSchedTime = day.withHourOfDay(hour).withMinuteOfHour(minutes).withSecondOfMinute(0);
-
- return firstSchedTime;
- }
+ }
+ }
+
+ private void ajaxSlaInfo(HttpServletRequest req, HashMap<String, Object> ret,
+ User user) {
+ int scheduleId;
+ try {
+ scheduleId = getIntParam(req, "scheduleId");
+ Schedule sched = scheduleManager.getSchedule(scheduleId);
+ Project project =
+ getProjectAjaxByPermission(ret, sched.getProjectId(), user, Type.READ);
+ if (project == null) {
+ ret.put("error",
+ "Error loading project. Project " + sched.getProjectId()
+ + " doesn't exist");
+ return;
+ }
+
+ Flow flow = project.getFlow(sched.getFlowName());
+ if (flow == null) {
+ ret.put("error", "Error loading flow. Flow " + sched.getFlowName()
+ + " doesn't exist in " + sched.getProjectId());
+ return;
+ }
+
+ List<SlaOption> slaOptions = sched.getSlaOptions();
+ ExecutionOptions flowOptions = sched.getExecutionOptions();
+
+ if (slaOptions != null && slaOptions.size() > 0) {
+ ret.put("slaEmails",
+ slaOptions.get(0).getInfo().get(SlaOption.INFO_EMAIL_LIST));
+
+ List<Object> setObj = new ArrayList<Object>();
+ for (SlaOption sla : slaOptions) {
+ setObj.add(sla.toWebObject());
+ }
+ ret.put("settings", setObj);
+ } else if (flowOptions != null) {
+ if (flowOptions.getFailureEmails() != null) {
+ List<String> emails = flowOptions.getFailureEmails();
+ if (emails.size() > 0) {
+ ret.put("slaEmails", emails);
+ }
+ }
+ } else {
+ if (flow.getFailureEmails() != null) {
+ List<String> emails = flow.getFailureEmails();
+ if (emails.size() > 0) {
+ ret.put("slaEmails", emails);
+ }
+ }
+ }
+
+ List<String> allJobs = new ArrayList<String>();
+ for (Node n : flow.getNodes()) {
+ allJobs.add(n.getId());
+ }
+
+ ret.put("allJobNames", allJobs);
+ } catch (ServletException e) {
+ ret.put("error", e);
+ } catch (ScheduleManagerException e) {
+ ret.put("error", e);
+ }
+ }
+
+ protected Project getProjectAjaxByPermission(Map<String, Object> ret,
+ int projectId, User user, Permission.Type type) {
+ Project project = projectManager.getProject(projectId);
+
+ if (project == null) {
+ ret.put("error", "Project '" + project + "' not found.");
+ } else if (!hasPermission(project, user, type)) {
+ ret.put("error",
+ "User '" + user.getUserId() + "' doesn't have " + type.name()
+ + " permissions on " + project.getName());
+ } else {
+ return project;
+ }
+
+ return null;
+ }
+
+ private void handleGetAllSchedules(HttpServletRequest req,
+ HttpServletResponse resp, Session session) throws ServletException,
+ IOException {
+
+ Page page =
+ newPage(req, resp, session,
+ "azkaban/webapp/servlet/velocity/scheduledflowpage.vm");
+
+ List<Schedule> schedules;
+ try {
+ schedules = scheduleManager.getSchedules();
+ } catch (ScheduleManagerException e) {
+ // TODO Auto-generated catch block
+ throw new ServletException(e);
+ }
+ page.add("schedules", schedules);
+ //
+ // List<SLA> slas = slaManager.getSLAs();
+ // page.add("slas", slas);
+
+ page.render();
+ }
+
+ private void handleGetScheduleCalendar(HttpServletRequest req,
+ HttpServletResponse resp, Session session) throws ServletException,
+ IOException {
+
+ Page page =
+ newPage(req, resp, session,
+ "azkaban/webapp/servlet/velocity/scheduledflowcalendarpage.vm");
+
+ List<Schedule> schedules;
+ try {
+ schedules = scheduleManager.getSchedules();
+ } catch (ScheduleManagerException e) {
+ // TODO Auto-generated catch block
+ throw new ServletException(e);
+ }
+ page.add("schedules", schedules);
+ //
+ // List<SLA> slas = slaManager.getSLAs();
+ // page.add("slas", slas);
+
+ page.render();
+ }
+
+ @Override
+ protected void handlePost(HttpServletRequest req, HttpServletResponse resp,
+ Session session) throws ServletException, IOException {
+ if (hasParam(req, "ajax")) {
+ handleAJAXAction(req, resp, session);
+ } else {
+ HashMap<String, Object> ret = new HashMap<String, Object>();
+ if (hasParam(req, "action")) {
+ String action = getParam(req, "action");
+ if (action.equals("scheduleFlow")) {
+ ajaxScheduleFlow(req, ret, session.getUser());
+ } else if (action.equals("removeSched")) {
+ ajaxRemoveSched(req, ret, session.getUser());
+ }
+ }
+
+ if (ret.get("status") == ("success"))
+ setSuccessMessageInCookie(resp, (String) ret.get("message"));
+ else
+ setErrorMessageInCookie(resp, (String) ret.get("message"));
+
+ this.writeJSON(resp, ret);
+ }
+ }
+
+ private void ajaxLoadFlows(HttpServletRequest req,
+ HashMap<String, Object> ret, User user) throws ServletException {
+ List<Schedule> schedules;
+ try {
+ schedules = scheduleManager.getSchedules();
+ } catch (ScheduleManagerException e) {
+ // TODO Auto-generated catch block
+ throw new ServletException(e);
+ }
+ // See if anything is scheduled
+ if (schedules.size() <= 0)
+ return;
+
+ List<HashMap<String, Object>> output =
+ new ArrayList<HashMap<String, Object>>();
+ ret.put("items", output);
+
+ for (Schedule schedule : schedules) {
+ try {
+ writeScheduleData(output, schedule);
+ } catch (ScheduleManagerException e) {
+ // TODO Auto-generated catch block
+ throw new ServletException(e);
+ }
+ }
+ }
+
+ private void writeScheduleData(List<HashMap<String, Object>> output,
+ Schedule schedule) throws ScheduleManagerException {
+ Map<String, Object> stats =
+ ScheduleStatisticManager.getStatistics(schedule.getScheduleId(),
+ (AzkabanWebServer) getApplication());
+ HashMap<String, Object> data = new HashMap<String, Object>();
+ data.put("scheduleid", schedule.getScheduleId());
+ data.put("flowname", schedule.getFlowName());
+ data.put("projectname", schedule.getProjectName());
+ data.put("time", schedule.getFirstSchedTime());
+
+ DateTime time = DateTime.now();
+ long period = 0;
+ if (schedule.getPeriod() != null) {
+ period = time.plus(schedule.getPeriod()).getMillis() - time.getMillis();
+ }
+ data.put("period", period);
+ int length = 3600 * 1000;
+ if (stats.get("average") != null && stats.get("average") instanceof Integer) {
+ length = (int) (Integer) stats.get("average");
+ if (length == 0) {
+ length = 3600 * 1000;
+ }
+ }
+ data.put("length", length);
+ data.put("history", false);
+ data.put("stats", stats);
+ output.add(data);
+ }
+
+ private void ajaxLoadHistory(HttpServletRequest req,
+ HttpServletResponse resp, User user) throws ServletException, IOException {
+ resp.setContentType(JSON_MIME_TYPE);
+ long today = DateTime.now().withTime(0, 0, 0, 0).getMillis();
+ long startTime = getLongParam(req, "startTime");
+ DateTime start = new DateTime(startTime);
+ // Ensure start time is 12:00 AM
+ startTime = start.withTime(0, 0, 0, 0).getMillis();
+ boolean useCache = false;
+ if (startTime < today) {
+ useCache = true;
+ }
+ long endTime = startTime + 24 * 3600 * 1000;
+ // long endTime = getLongParam(req, "endTime");
+ int loadAll = getIntParam(req, "loadAll");
+
+ // Cache file
+ String cacheDir =
+ getApplication().getServerProps().getString("cache.directory", "cache");
+ File cacheDirFile = new File(cacheDir, "schedule-history");
+ File cache = new File(cacheDirFile, startTime + ".cache");
+ cache.getParentFile().mkdirs();
+
+ if (useCache) {
+ // Determine if cache exists
+ boolean cacheExists = false;
+ synchronized (this) {
+ cacheExists = cache.exists() && cache.isFile();
+ }
+ if (cacheExists) {
+ // Send the cache instead
+ InputStream cacheInput =
+ new BufferedInputStream(new FileInputStream(cache));
+ try {
+ IOUtils.copy(cacheInput, resp.getOutputStream());
+ // System.out.println("Using cache copy for " + start);
+ return;
+ } finally {
+ IOUtils.closeQuietly(cacheInput);
+ }
+ }
+ }
+
+ // Load data if not cached
+ List<ExecutableFlow> history = null;
+ try {
+ AzkabanWebServer server = (AzkabanWebServer) getApplication();
+ ExecutorManagerAdapter executorManager = server.getExecutorManager();
+ history =
+ executorManager.getExecutableFlows(null, null, null, 0, startTime,
+ endTime, -1, -1);
+ } catch (ExecutorManagerException e) {
+ logger.error(e);
+ }
+
+ HashMap<String, Object> ret = new HashMap<String, Object>();
+ List<HashMap<String, Object>> output =
+ new ArrayList<HashMap<String, Object>>();
+ ret.put("items", output);
+ for (ExecutableFlow historyItem : history) {
+ // Check if it is an scheduled execution
+ if (historyItem.getScheduleId() >= 0 || loadAll != 0) {
+ writeHistoryData(output, historyItem);
+ }
+ }
+
+ // Make sure we're ready to cache it, otherwise output and return
+ synchronized (this) {
+ if (!useCache || cache.exists()) {
+ JSONUtils.toJSON(ret, resp.getOutputStream(), false);
+ return;
+ }
+ }
+
+ // Create cache file
+ File cacheTemp = new File(cacheDirFile, startTime + ".tmp");
+ cacheTemp.createNewFile();
+ OutputStream cacheOutput =
+ new BufferedOutputStream(new FileOutputStream(cacheTemp));
+ try {
+ OutputStream outputStream =
+ new SplitterOutputStream(cacheOutput, resp.getOutputStream());
+ // Write to both the cache file and web output
+ JSONUtils.toJSON(ret, outputStream, false);
+ } finally {
+ IOUtils.closeQuietly(cacheOutput);
+ }
+ // Move cache file
+ synchronized (this) {
+ cacheTemp.renameTo(cache);
+ }
+ }
+
+ private void writeHistoryData(List<HashMap<String, Object>> output,
+ ExecutableFlow history) {
+ HashMap<String, Object> data = new HashMap<String, Object>();
+
+ data.put("scheduleid", history.getScheduleId());
+ Project project = projectManager.getProject(history.getProjectId());
+ data.put("flowname", history.getFlowId());
+ data.put("projectname", project.getName());
+ data.put("time", history.getStartTime());
+ data.put("period", "0");
+ long endTime = history.getEndTime();
+ if (endTime == -1) {
+ endTime = System.currentTimeMillis();
+ }
+ data.put("length", endTime - history.getStartTime());
+ data.put("history", true);
+ data.put("status", history.getStatus().getNumVal());
+
+ output.add(data);
+ }
+
+ private void ajaxRemoveSched(HttpServletRequest req, Map<String, Object> ret,
+ User user) throws ServletException {
+ int scheduleId = getIntParam(req, "scheduleId");
+ Schedule sched;
+ try {
+ sched = scheduleManager.getSchedule(scheduleId);
+ } catch (ScheduleManagerException e) {
+ // TODO Auto-generated catch block
+ throw new ServletException(e);
+ }
+ if (sched == null) {
+ ret.put("message", "Schedule with ID " + scheduleId + " does not exist");
+ ret.put("status", "error");
+ return;
+ }
+
+ Project project = projectManager.getProject(sched.getProjectId());
+
+ if (project == null) {
+ ret.put("message", "Project " + sched.getProjectId() + " does not exist");
+ ret.put("status", "error");
+ return;
+ }
+
+ if (!hasPermission(project, user, Type.SCHEDULE)) {
+ ret.put("status", "error");
+ ret.put("message", "Permission denied. Cannot remove schedule with id "
+ + scheduleId);
+ return;
+ }
+
+ scheduleManager.removeSchedule(sched);
+ logger.info("User '" + user.getUserId() + " has removed schedule "
+ + sched.getScheduleName());
+ projectManager
+ .postProjectEvent(project, EventType.SCHEDULE, user.getUserId(),
+ "Schedule " + sched.toString() + " has been removed.");
+
+ ret.put("status", "success");
+ ret.put("message", "flow " + sched.getFlowName()
+ + " removed from Schedules.");
+ return;
+ }
+
+ private void ajaxScheduleFlow(HttpServletRequest req,
+ HashMap<String, Object> ret, User user) throws ServletException {
+ String projectName = getParam(req, "projectName");
+ String flowName = getParam(req, "flow");
+ int projectId = getIntParam(req, "projectId");
+
+ Project project = projectManager.getProject(projectId);
+
+ if (project == null) {
+ ret.put("message", "Project " + projectName + " does not exist");
+ ret.put("status", "error");
+ return;
+ }
+
+ if (!hasPermission(project, user, Type.SCHEDULE)) {
+ ret.put("status", "error");
+ ret.put("message", "Permission denied. Cannot execute " + flowName);
+ return;
+ }
+
+ Flow flow = project.getFlow(flowName);
+ if (flow == null) {
+ ret.put("status", "error");
+ ret.put("message", "Flow " + flowName + " cannot be found in project "
+ + project);
+ return;
+ }
+
+ String scheduleTime = getParam(req, "scheduleTime");
+ String scheduleDate = getParam(req, "scheduleDate");
+ DateTime firstSchedTime;
+ try {
+ firstSchedTime = parseDateTime(scheduleDate, scheduleTime);
+ } catch (Exception e) {
+ ret.put("error", "Invalid date and/or time '" + scheduleDate + " "
+ + scheduleTime);
+ return;
+ }
+
+ ReadablePeriod thePeriod = null;
+ try {
+ if (hasParam(req, "is_recurring")
+ && getParam(req, "is_recurring").equals("on")) {
+ thePeriod = Schedule.parsePeriodString(getParam(req, "period"));
+ }
+ } catch (Exception e) {
+ ret.put("error", e.getMessage());
+ }
+
+ // Schedule sched = scheduleManager.getSchedule(projectId, flowName);
+ ExecutionOptions flowOptions = null;
+ try {
+ flowOptions = HttpRequestUtils.parseFlowOptions(req);
+ } catch (Exception e) {
+ ret.put("error", e.getMessage());
+ }
+
+ List<SlaOption> slaOptions = null;
+
+ Schedule schedule =
+ scheduleManager.scheduleFlow(-1, projectId, projectName, flowName,
+ "ready", firstSchedTime.getMillis(), firstSchedTime.getZone(),
+ thePeriod, DateTime.now().getMillis(), firstSchedTime.getMillis(),
+ firstSchedTime.getMillis(), user.getUserId(), flowOptions,
+ slaOptions);
+ logger.info("User '" + user.getUserId() + "' has scheduled " + "["
+ + projectName + flowName + " (" + projectId + ")" + "].");
+ projectManager.postProjectEvent(project, EventType.SCHEDULE,
+ user.getUserId(), "Schedule " + schedule.toString()
+ + " has been added.");
+
+ ret.put("status", "success");
+ ret.put("message", projectName + "." + flowName + " scheduled.");
+ }
+
+ private DateTime parseDateTime(String scheduleDate, String scheduleTime) {
+ // scheduleTime: 12,00,pm,PDT
+ String[] parts = scheduleTime.split(",", -1);
+ int hour = Integer.parseInt(parts[0]);
+ int minutes = Integer.parseInt(parts[1]);
+ boolean isPm = parts[2].equalsIgnoreCase("pm");
+
+ DateTimeZone timezone =
+ parts[3].equals("UTC") ? DateTimeZone.UTC : DateTimeZone.getDefault();
+
+ // scheduleDate: 02/10/2013
+ DateTime day = null;
+ if (scheduleDate == null || scheduleDate.trim().length() == 0) {
+ day = new LocalDateTime().toDateTime();
+ } else {
+ day =
+ DateTimeFormat.forPattern("MM/dd/yyyy").withZone(timezone)
+ .parseDateTime(scheduleDate);
+ }
+
+ hour %= 12;
+
+ if (isPm)
+ hour += 12;
+
+ DateTime firstSchedTime =
+ day.withHourOfDay(hour).withMinuteOfHour(minutes).withSecondOfMinute(0);
+
+ return firstSchedTime;
+ }
}
diff --git a/src/main/java/azkaban/webapp/servlet/TriggerManagerServlet.java b/src/main/java/azkaban/webapp/servlet/TriggerManagerServlet.java
index 44f0f1a..3a050b2 100644
--- a/src/main/java/azkaban/webapp/servlet/TriggerManagerServlet.java
+++ b/src/main/java/azkaban/webapp/servlet/TriggerManagerServlet.java
@@ -35,88 +35,99 @@ import azkaban.webapp.AzkabanWebServer;
import azkaban.webapp.session.Session;
public class TriggerManagerServlet extends LoginAbstractAzkabanServlet {
- private static final long serialVersionUID = 1L;
- private static final Logger logger = Logger.getLogger(TriggerManagerServlet.class);
- private TriggerManager triggerManager;
-
- @Override
- public void init(ServletConfig config) throws ServletException {
- super.init(config);
- AzkabanWebServer server = (AzkabanWebServer)getApplication();
- triggerManager = server.getTriggerManager();
- }
-
- @Override
- protected void handleGet(HttpServletRequest req, HttpServletResponse resp,
- Session session) throws ServletException, IOException {
- if (hasParam(req, "ajax")) {
- handleAJAXAction(req, resp, session);
- } else {
- handleGetAllSchedules(req, resp, session);
- }
- }
-
- private void handleAJAXAction(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException, IOException {
- HashMap<String, Object> ret = new HashMap<String, Object>();
- String ajaxName = getParam(req, "ajax");
-
- try {
- if (ajaxName.equals("expireTrigger")) {
- ajaxExpireTrigger(req, ret, session.getUser());
- }
- } catch (Exception e) {
- ret.put("error", e.getMessage());
- }
-
- if (ret != null) {
- this.writeJSON(resp, ret);
- }
- }
-
- private void handleGetAllSchedules(HttpServletRequest req, HttpServletResponse resp,
- Session session) throws ServletException, IOException{
-
- Page page = newPage(req, resp, session, "azkaban/webapp/servlet/velocity/triggerspage.vm");
-
- List<Trigger> triggers = triggerManager.getTriggers();
- page.add("triggers", triggers);
-//
-// List<SLA> slas = slaManager.getSLAs();
-// page.add("slas", slas);
-
- page.render();
- }
-
- @Override
- protected void handlePost(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException, IOException {
- if (hasParam(req, "ajax")) {
- handleAJAXAction(req, resp, session);
- }
- }
-
- private void ajaxExpireTrigger(HttpServletRequest req, Map<String, Object> ret, User user) throws ServletException, TriggerManagerException{
- int triggerId = getIntParam(req, "triggerId");
- Trigger t = triggerManager.getTrigger(triggerId);
- if(t == null) {
- ret.put("message", "Trigger with ID " + triggerId + " does not exist");
- ret.put("status", "error");
- return;
- }
-
-// if(!hasPermission(project, user, Type.SCHEDULE)) {
-// ret.put("status", "error");
-// ret.put("message", "Permission denied. Cannot remove trigger with id " + triggerId);
-// return;
-// }
-
- triggerManager.expireTrigger(triggerId);
- logger.info("User '" + user.getUserId() + " has removed trigger " + t.getDescription());
-// projectManager.postProjectEvent(project, EventType.SCHEDULE, user.getUserId(), "Schedule " + sched.toString() + " has been removed.");
-
- ret.put("status", "success");
- ret.put("message", "trigger " + triggerId + " removed from Schedules.");
- return;
- }
+ private static final long serialVersionUID = 1L;
+ private static final Logger logger = Logger
+ .getLogger(TriggerManagerServlet.class);
+ private TriggerManager triggerManager;
-}
+ @Override
+ public void init(ServletConfig config) throws ServletException {
+ super.init(config);
+ AzkabanWebServer server = (AzkabanWebServer) getApplication();
+ triggerManager = server.getTriggerManager();
+ }
+
+ @Override
+ protected void handleGet(HttpServletRequest req, HttpServletResponse resp,
+ Session session) throws ServletException, IOException {
+ if (hasParam(req, "ajax")) {
+ handleAJAXAction(req, resp, session);
+ } else {
+ handleGetAllSchedules(req, resp, session);
+ }
+ }
+
+ private void handleAJAXAction(HttpServletRequest req,
+ HttpServletResponse resp, Session session) throws ServletException,
+ IOException {
+ HashMap<String, Object> ret = new HashMap<String, Object>();
+ String ajaxName = getParam(req, "ajax");
+
+ try {
+ if (ajaxName.equals("expireTrigger")) {
+ ajaxExpireTrigger(req, ret, session.getUser());
+ }
+ } catch (Exception e) {
+ ret.put("error", e.getMessage());
+ }
+
+ if (ret != null) {
+ this.writeJSON(resp, ret);
+ }
+ }
+
+ private void handleGetAllSchedules(HttpServletRequest req,
+ HttpServletResponse resp, Session session) throws ServletException,
+ IOException {
+
+ Page page =
+ newPage(req, resp, session,
+ "azkaban/webapp/servlet/velocity/triggerspage.vm");
+
+ List<Trigger> triggers = triggerManager.getTriggers();
+ page.add("triggers", triggers);
+ //
+ // List<SLA> slas = slaManager.getSLAs();
+ // page.add("slas", slas);
+ page.render();
+ }
+
+ @Override
+ protected void handlePost(HttpServletRequest req, HttpServletResponse resp,
+ Session session) throws ServletException, IOException {
+ if (hasParam(req, "ajax")) {
+ handleAJAXAction(req, resp, session);
+ }
+ }
+
+ private void ajaxExpireTrigger(HttpServletRequest req,
+ Map<String, Object> ret, User user) throws ServletException,
+ TriggerManagerException {
+ int triggerId = getIntParam(req, "triggerId");
+ Trigger t = triggerManager.getTrigger(triggerId);
+ if (t == null) {
+ ret.put("message", "Trigger with ID " + triggerId + " does not exist");
+ ret.put("status", "error");
+ return;
+ }
+
+ // if(!hasPermission(project, user, Type.SCHEDULE)) {
+ // ret.put("status", "error");
+ // ret.put("message", "Permission denied. Cannot remove trigger with id " +
+ // triggerId);
+ // return;
+ // }
+
+ triggerManager.expireTrigger(triggerId);
+ logger.info("User '" + user.getUserId() + " has removed trigger "
+ + t.getDescription());
+ // projectManager.postProjectEvent(project, EventType.SCHEDULE,
+ // user.getUserId(), "Schedule " + sched.toString() + " has been removed.");
+
+ ret.put("status", "success");
+ ret.put("message", "trigger " + triggerId + " removed from Schedules.");
+ return;
+ }
+
+}
diff --git a/src/main/java/azkaban/webapp/servlet/VelocityUtils.java b/src/main/java/azkaban/webapp/servlet/VelocityUtils.java
index 01b411a..2ae7031 100644
--- a/src/main/java/azkaban/webapp/servlet/VelocityUtils.java
+++ b/src/main/java/azkaban/webapp/servlet/VelocityUtils.java
@@ -21,21 +21,21 @@ import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
public class VelocityUtils {
- public String formatDate(long timestamp) {
- return formatDate(timestamp, "yyyy-MM-dd HH:mm:ss");
- }
+ public String formatDate(long timestamp) {
+ return formatDate(timestamp, "yyyy-MM-dd HH:mm:ss");
+ }
- public String formatDate(DateTime date) {
- return formatDate(date, "yyyy-MM-dd HH:mm:ss");
- }
+ public String formatDate(DateTime date) {
+ return formatDate(date, "yyyy-MM-dd HH:mm:ss");
+ }
- public String formatDate(long timestamp, String format) {
- DateTimeFormatter f = DateTimeFormat.forPattern(format);
- return f.print(timestamp);
- }
+ public String formatDate(long timestamp, String format) {
+ DateTimeFormatter f = DateTimeFormat.forPattern(format);
+ return f.print(timestamp);
+ }
- public String formatDate(DateTime date, String format) {
- DateTimeFormatter f = DateTimeFormat.forPattern(format);
- return f.print(date);
- }
+ public String formatDate(DateTime date, String format) {
+ DateTimeFormatter f = DateTimeFormat.forPattern(format);
+ return f.print(date);
+ }
}
diff --git a/src/main/java/azkaban/webapp/session/Session.java b/src/main/java/azkaban/webapp/session/Session.java
index be29b57..08020ba 100644
--- a/src/main/java/azkaban/webapp/session/Session.java
+++ b/src/main/java/azkaban/webapp/session/Session.java
@@ -24,50 +24,50 @@ import azkaban.user.User;
* Container for the session, mapping session id to user in map
*/
public class Session {
- private final User user;
- private final String sessionId;
- private final String ip;
- private Map<String, Object> sessionData = new HashMap<String, Object>();
-
- /**
- * Constructor for the session
- *
- * @param sessionId
- * @param user
- */
- public Session(String sessionId, User user, String ip) {
- this.user = user;
- this.sessionId = sessionId;
- this.ip = ip;
- }
+ private final User user;
+ private final String sessionId;
+ private final String ip;
+ private Map<String, Object> sessionData = new HashMap<String, Object>();
- /**
- * Returns the User object
- *
- * @return
- */
- public User getUser() {
- return user;
- }
+ /**
+ * Constructor for the session
+ *
+ * @param sessionId
+ * @param user
+ */
+ public Session(String sessionId, User user, String ip) {
+ this.user = user;
+ this.sessionId = sessionId;
+ this.ip = ip;
+ }
- /**
- * Returns the sessionId
- *
- * @return
- */
- public String getSessionId() {
- return sessionId;
- }
+ /**
+ * Returns the User object
+ *
+ * @return
+ */
+ public User getUser() {
+ return user;
+ }
- public String getIp() {
- return ip;
- }
-
- public void setSessionData(String key, Object value) {
- sessionData.put(key, value);
- }
-
- public Object getSessionData(String key) {
- return sessionData.get(key);
- }
+ /**
+ * Returns the sessionId
+ *
+ * @return
+ */
+ public String getSessionId() {
+ return sessionId;
+ }
+
+ public String getIp() {
+ return ip;
+ }
+
+ public void setSessionData(String key, Object value) {
+ sessionData.put(key, value);
+ }
+
+ public Object getSessionData(String key) {
+ return sessionData.get(key);
+ }
}
diff --git a/src/main/java/azkaban/webapp/session/SessionCache.java b/src/main/java/azkaban/webapp/session/SessionCache.java
index 7591dbf..32d141d 100644
--- a/src/main/java/azkaban/webapp/session/SessionCache.java
+++ b/src/main/java/azkaban/webapp/session/SessionCache.java
@@ -21,65 +21,65 @@ import azkaban.utils.cache.Cache;
import azkaban.utils.cache.CacheManager;
import azkaban.utils.cache.Cache.EjectionPolicy;
-
/**
* Cache for web session.
*
* The following global azkaban properties can be used: max.num.sessions - used
* to determine the number of live sessions that azkaban will handle. Default is
- * 10000 session.time.to.live -Number of seconds before session expires.
- * Default set to 1 days.
+ * 10000 session.time.to.live -Number of seconds before session expires. Default
+ * set to 1 days.
*/
public class SessionCache {
- private static final int MAX_NUM_SESSIONS = 10000;
- private static final long SESSION_TIME_TO_LIVE = 24*60*60*1000L;
+ private static final int MAX_NUM_SESSIONS = 10000;
+ private static final long SESSION_TIME_TO_LIVE = 24 * 60 * 60 * 1000L;
+
+ // private CacheManager manager = CacheManager.create();
+ private Cache cache;
-// private CacheManager manager = CacheManager.create();
- private Cache cache;
+ /**
+ * Constructor taking global props.
+ *
+ * @param props
+ */
+ public SessionCache(Props props) {
+ CacheManager manager = CacheManager.getInstance();
- /**
- * Constructor taking global props.
- *
- * @param props
- */
- public SessionCache(Props props) {
- CacheManager manager = CacheManager.getInstance();
-
- cache = manager.createCache();
- cache.setEjectionPolicy(EjectionPolicy.LRU);
- cache.setMaxCacheSize(props.getInt("max.num.sessions", MAX_NUM_SESSIONS));
- cache.setExpiryTimeToLiveMs(props.getLong("session.time.to.live", SESSION_TIME_TO_LIVE));
- }
+ cache = manager.createCache();
+ cache.setEjectionPolicy(EjectionPolicy.LRU);
+ cache.setMaxCacheSize(props.getInt("max.num.sessions", MAX_NUM_SESSIONS));
+ cache.setExpiryTimeToLiveMs(props.getLong("session.time.to.live",
+ SESSION_TIME_TO_LIVE));
+ }
- /**
- * Returns the cached session using the session id.
- *
- * @param sessionId
- * @return
- */
- public Session getSession(String sessionId) {
- Session elem = cache.<Session>get(sessionId);
+ /**
+ * Returns the cached session using the session id.
+ *
+ * @param sessionId
+ * @return
+ */
+ public Session getSession(String sessionId) {
+ Session elem = cache.<Session> get(sessionId);
- return elem;
- }
+ return elem;
+ }
- /**
- * Adds a session to the cache. Accessible through the session ID.
- *
- * @param id
- * @param session
- */
- public void addSession(Session session) {
- cache.put(session.getSessionId(), session);
- }
+ /**
+ * Adds a session to the cache. Accessible through the session ID.
+ *
+ * @param id
+ * @param session
+ */
+ public void addSession(Session session) {
+ cache.put(session.getSessionId(), session);
+ }
- /**
- * Removes the session from the cache.
- *
- * @param id
- * @return
- */
- public boolean removeSession(String id) {
- return cache.remove(id);
- }
+ /**
+ * Removes the session from the cache.
+ *
+ * @param id
+ * @return
+ */
+ public boolean removeSession(String id) {
+ return cache.remove(id);
+ }
}
\ No newline at end of file
src/main/less/azkaban-graph.less 80(+40 -40)
diff --git a/src/main/less/azkaban-graph.less b/src/main/less/azkaban-graph.less
index 50d818c..ce93658 100644
--- a/src/main/less/azkaban-graph.less
+++ b/src/main/less/azkaban-graph.less
@@ -40,122 +40,122 @@
}
.border {
- stroke-width: 1;
+ stroke-width: 1;
}
.flownode .nodebox .flowborder {
- stroke-width: 1.25;
- fill: #FFF;
- fill-opacity: 0.8;
+ stroke-width: 1.25;
+ fill: #FFF;
+ fill-opacity: 0.8;
}
.READY > g > rect {
- fill: #DDD;
- stroke: #CCC;
+ fill: #DDD;
+ stroke: #CCC;
}
.READY > g > text {
- fill: #000;
+ fill: #000;
}
.RUNNING > g > rect {
- fill: #39b3d7;
- stroke: #39b3d7;
+ fill: #39b3d7;
+ stroke: #39b3d7;
}
.RUNNING > g > text {
- fill: #FFF;
+ fill: #FFF;
}
.SUCCEEDED > g > rect {
- fill: #5cb85c;
- stroke: #4cae4c;
+ fill: #5cb85c;
+ stroke: #4cae4c;
}
.SUCCEEDED > g > text {
- fill: #FFF;
+ fill: #FFF;
}
.FAILED > g > rect {
- fill: #d2322d;
- stroke: #d2322d;
+ fill: #d2322d;
+ stroke: #d2322d;
}
.FAILED > g > text {
- fill: #FFF;
+ fill: #FFF;
}
.KILLED > g > rect {
- fill: #d2322d;
- stroke: #d2322d;
+ fill: #d2322d;
+ stroke: #d2322d;
}
.KILLED > g > text {
- fill: #FFF;
+ fill: #FFF;
}
.CANCELLED > g > rect {
- fill: #FF9999;
- stroke: #FF9999;
+ fill: #FF9999;
+ stroke: #FF9999;
}
.CANCELLED > g > text {
- fill: #FFF;
+ fill: #FFF;
}
.FAILED_FINISHING > g > rect {
- fill: #ed9c28;
- stroke: #ed9c28;
+ fill: #ed9c28;
+ stroke: #ed9c28;
}
.FAILED_FINISHING > g > text {
- fill: #FFF;
+ fill: #FFF;
}
.DISABLED > g > rect {
- fill: #DDD;
- stroke: #CCC;
+ fill: #DDD;
+ stroke: #CCC;
}
.DISABLED > g > rect {
- fill: #DDD;
- stroke: #CCC;
+ fill: #DDD;
+ stroke: #CCC;
}
.nodeDisabled {
- opacity: 0.25;
+ opacity: 0.25;
}
.SKIPPED > g > rect {
- fill: #DDD;
- stroke: #CCC;
+ fill: #DDD;
+ stroke: #CCC;
}
.DISABLED {
- opacity: 0.25;
+ opacity: 0.25;
}
.SKIPPED {
- opacity: 0.25;
+ opacity: 0.25;
}
.QUEUED > g > rect {
- fill: #39b3d7;
- stroke: #39b3d7;
+ fill: #39b3d7;
+ stroke: #39b3d7;
}
.QUEUED > g > text {
- fill: #FFF;
+ fill: #FFF;
}
.QUEUED {
- opacity: 0.5;
+ opacity: 0.5;
}
/* Edges */
.edge {
- stroke: #CCC;
- stroke-width: 1.5;
+ stroke: #CCC;
+ stroke-width: 1.5;
&:hover {
stroke: #009FC9;
src/main/less/base.less 10(+5 -5)
diff --git a/src/main/less/base.less b/src/main/less/base.less
index 62861ca..b848384 100644
--- a/src/main/less/base.less
+++ b/src/main/less/base.less
@@ -63,11 +63,11 @@
border-radius: 0;
border-left: 0;
border-right: 0;
-
+
&:first-child {
border-top: 0;
}
-
+
&:last-child {
border-bottom: 0;
}
@@ -89,9 +89,9 @@
}
.nav {
- .nav-button {
- margin-left: 5px;
- }
+ .nav-button {
+ margin-left: 5px;
+ }
}
.state-icon {
src/main/less/callout.less 4(+2 -2)
diff --git a/src/main/less/callout.less b/src/main/less/callout.less
index cdf891d..8b54a63 100644
--- a/src/main/less/callout.less
+++ b/src/main/less/callout.less
@@ -30,7 +30,7 @@
.callout-danger {
background-color: #fdf7f7;
border-color: #d9534f;
-
+
h4 {
color: #d9534f;
}
@@ -54,7 +54,7 @@
}
}
-.callout-default {
+.callout-default {
background-color: #f5f5f5;
border-color: #dddddd;
src/main/less/context-menu.less 4(+2 -2)
diff --git a/src/main/less/context-menu.less b/src/main/less/context-menu.less
index e0e8761..47e3875 100644
--- a/src/main/less/context-menu.less
+++ b/src/main/less/context-menu.less
@@ -20,7 +20,7 @@
min-width: 50px;
font-size: 10pt;
cursor: pointer;
-
+
.expandSymbol {
background-image: url("../css/images/ui-icons_cccccc_256x240.png");
background-position: -32px -16px;
@@ -28,7 +28,7 @@
width: 16px;
float:right;
}
-
+
&:hover {
background-color: #555;
color: #FFF;
src/main/less/flow.less 94(+47 -47)
diff --git a/src/main/less/flow.less b/src/main/less/flow.less
index 8ee150a..80272bb 100644
--- a/src/main/less/flow.less
+++ b/src/main/less/flow.less
@@ -28,8 +28,8 @@
}
.flow-progress {
- width: 280px;
- margin: 4px;
+ width: 280px;
+ margin: 4px;
background-color: #f5f5f5;
height: 24px;
border-radius: 4px;
@@ -38,7 +38,7 @@
}
.flow-progress-bar {
- height: 100%;
+ height: 100%;
background-color: #ccc;
border-radius: 5px;
-webkit-box-shadow: inset 0 -1px 0 rgba(0, 0, 0, 0.15);
@@ -60,7 +60,7 @@
&.FAILED {
background-color: @flow-failed-color;
}
-
+
&.KILLED {
background-color: @flow-killed-color;
}
@@ -79,20 +79,20 @@
&.QUEUED {
background-color: @flow-queued-color;
}
-
+
&.CANCELLED {
background-color: @flow-cancelled-color;
}
}
td {
- > .listExpand {
- width: 16px;
- height: 16px;
- float:right;
- margin-top: 5px;
- font-size: 8pt;
- }
+ > .listExpand {
+ width: 16px;
+ height: 16px;
+ float:right;
+ margin-top: 5px;
+ font-size: 8pt;
+ }
.status {
-moz-border-radius: 2px;
@@ -102,7 +102,7 @@ td {
color: #FFF;
text-align: center;
margin-top: 2px;
-
+
&.SUCCEEDED {
background-color: @flow-succeeded-color;
}
@@ -110,7 +110,7 @@ td {
&.FAILED {
background-color: @flow-failed-color;
}
-
+
&.KILLED {
background-color: @flow-killed-color;
}
@@ -199,7 +199,7 @@ td {
.panel {
height: 100%;
-
+
.panel-heading {
padding-right: 10px;
}
@@ -216,30 +216,30 @@ td {
}
.graph-sidebar-search {
- width: 206px;
- margin: 0px;
+ width: 206px;
+ margin: 0px;
}
.graph-sidebar-close {
- float: right;
- color: #CCC;
- padding: 5px 0px;
- cursor: pointer;
-
- &:hover {
- color: #666;
- }
+ float: right;
+ color: #CCC;
+ padding: 5px 0px;
+ cursor: pointer;
+
+ &:hover {
+ color: #666;
+ }
}
.graph-sidebar-open {
- position: absolute;
- margin: 10px;
- color: #CCC;
- cursor: pointer;
-
- &:hover {
- color: #666;
- }
+ position: absolute;
+ margin: 10px;
+ color: #CCC;
+ cursor: pointer;
+
+ &:hover {
+ color: #666;
+ }
}
ul.tree-list {
@@ -247,7 +247,7 @@ ul.tree-list {
padding-left: 0px;
margin: 0;
}
-
+
li.tree-list-item {
&.active > a {
background-color: #D9EDFF;
@@ -275,50 +275,50 @@ li.tree-list-item {
background-color: #f5f5f5;
cursor: pointer;
}
-
+
&.nodedisabled,
&.DISABLED {
opacity: 0.3;
}
-
+
&.DISABLED .icon {
background-position: 16px 0px;
}
-
+
&.READY .icon {
background-position: 16px 0px;
}
-
+
&.QUEUED .icon {
opacity: 0.5;
background-position: 32px 0px;
}
-
+
&.RUNNING .icon {
background-position: 32px 0px;
}
-
+
&.SUCCEEDED .icon {
background-position: 48px 0px;
}
-
+
&.FAILED .icon {
background-position: 0px 0px;
}
-
+
&.KILLED .icon {
background-position: 0px 0px;
}
-
+
&.CANCELLED .icon {
background-position: 0px 0px;
opacity: 0.5;
}
-
+
&.FAILED_FINISHING .icon {
background-position: 0px 0px;
}
-
+
.icon {
float: left;
width: 16px;
@@ -327,14 +327,14 @@ li.tree-list-item {
background-image: url("./images/dot-icon.png");
background-position: 16px 0px;
}
-
+
.expandarrow {
float: right;
width: 16px;
height: 16px;
font-size: 8pt;
}
-
+
.filterHighlight {
background-color: #FFFF00;
}
src/main/less/header.less 2(+1 -1)
diff --git a/src/main/less/header.less b/src/main/less/header.less
index 03ff1d2..04ad4b1 100644
--- a/src/main/less/header.less
+++ b/src/main/less/header.less
@@ -27,7 +27,7 @@
}
}
}
-
+
.exflow-stats {
margin: 0px;
src/main/less/log.less 2(+1 -1)
diff --git a/src/main/less/log.less b/src/main/less/log.less
index 20d2019..a2874eb 100644
--- a/src/main/less/log.less
+++ b/src/main/less/log.less
@@ -21,7 +21,7 @@
left: 0;
right: 0;
bottom: 0;
-
+
padding: 0;
background-color: #fcfcfc;
src/main/less/navbar.less 4(+2 -2)
diff --git a/src/main/less/navbar.less b/src/main/less/navbar.less
index 3826082..aee616f 100644
--- a/src/main/less/navbar.less
+++ b/src/main/less/navbar.less
@@ -26,7 +26,7 @@
.navbar-enviro {
margin: 30px 20px 0px 12px;
-
+
.navbar-enviro-name {
color: #ff3601;
font-family: Helvetica, Arial, Sans-Serif;
@@ -34,7 +34,7 @@
font-weight: bold;
line-height: 100%;
}
-
+
.navbar-enviro-server {
color: #999;
font-family: Helvetica, Arial, Sans-Serif;
src/main/less/project.less 34(+17 -17)
diff --git a/src/main/less/project.less b/src/main/less/project.less
index 0fb63af..dd69efe 100644
--- a/src/main/less/project.less
+++ b/src/main/less/project.less
@@ -26,7 +26,7 @@
margin-top: 0;
margin-bottom: 4px;
}
-
+
.project-description {
margin-bottom: 4px;
}
@@ -49,22 +49,22 @@
}
.project-flows {
- display: none;
- background-color: #f9f9f9;
- padding: 10px 15px 10px 15px;
-
- h5 {
- margin-top: 5px;
- }
-
- .list-group {
- margin-bottom: 10px;
- }
-
- .list-group-item {
- background: transparent;
- padding: 7px 12px 7px 12px;
- }
+ display: none;
+ background-color: #f9f9f9;
+ padding: 10px 15px 10px 15px;
+
+ h5 {
+ margin-top: 5px;
+ }
+
+ .list-group {
+ margin-bottom: 10px;
+ }
+
+ .list-group-item {
+ background: transparent;
+ padding: 7px 12px 7px 12px;
+ }
}
// Flow panel heading.
src/main/less/tables.less 54(+27 -27)
diff --git a/src/main/less/tables.less b/src/main/less/tables.less
index 5d5aeea..9b0d83d 100644
--- a/src/main/less/tables.less
+++ b/src/main/less/tables.less
@@ -20,11 +20,11 @@ table.table-properties {
.property-key,
.property-value,
.property-value-half {
- pre {
- background: transparent;
- padding: 0;
- border: 0;
- }
+ pre {
+ background: transparent;
+ padding: 0;
+ border: 0;
+ }
}
.editable {
@@ -74,18 +74,18 @@ table.table-properties {
}
td {
- &.subflowrow {
- padding: 0px 0px;
-
- table {
- margin: 0px;
- background-color: rgba(230, 230, 230, 0.75);
-
- td {
- background-color: none;
- }
- }
- }
+ &.subflowrow {
+ padding: 0px 0px;
+
+ table {
+ margin: 0px;
+ background-color: rgba(230, 230, 230, 0.75);
+
+ td {
+ background-color: none;
+ }
+ }
+ }
&.date {
width: 160px;
@@ -126,7 +126,7 @@ table.table-properties {
&.logs {
width: 30px;
}
-
+
&.timeline {
width: 280px;
padding: 0px 0px 0px 4px;
@@ -134,16 +134,16 @@ table.table-properties {
vertical-align: bottom;
margin: 0px;
}
-
+
&.startTime {
- width: 160px;
- }
-
- &.endTime {
- width: 160px;
- }
+ width: 160px;
+ }
+
+ &.endTime {
+ width: 160px;
+ }
&.elapsedTime {
- width: 90px;
- }
+ width: 90px;
+ }
}
}
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/alerts.vm b/src/main/resources/azkaban/webapp/servlet/velocity/alerts.vm
index 8877728..10831fb 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/alerts.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/alerts.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -18,19 +18,19 @@
#if ($error_message != "null")
<div class="alert alert-danger alert-dismissable">
- <button type="button" class="close" data-dismiss="alert" aria-hidden="true">×</button>
+ <button type="button" class="close" data-dismiss="alert" aria-hidden="true">×</button>
$error_message
</div>
#elseif ($success_message != "null")
<div class="alert alert-success">
- <button type="button" class="close" data-dismiss="alert" aria-hidden="true">×</button>
+ <button type="button" class="close" data-dismiss="alert" aria-hidden="true">×</button>
$success_message
</div>
#end
## Alert message triggered by JavaScript.
- <div class="alert alert-dismissable alert-messaging" id="messaging">
- <button type="button" class="close" data-dismiss="alert" aria-hidden="true">×</button>
- <p id="messaging-message"></p>
- </div>
+ <div class="alert alert-dismissable alert-messaging" id="messaging">
+ <button type="button" class="close" data-dismiss="alert" aria-hidden="true">×</button>
+ <p id="messaging-message"></p>
+ </div>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/errormsg.vm b/src/main/resources/azkaban/webapp/servlet/velocity/errormsg.vm
index 4c745c8..38d76f9 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/errormsg.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/errormsg.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -14,13 +14,13 @@
* the License.
*#
- <div class="az-page-header">
- <div class="container-full">
+ <div class="az-page-header">
+ <div class="container-full">
<h1 class="danger">Something's wrong</h1>
- </div>
+ </div>
</div>
<div class="container-full">
- <div class="alert alert-danger">
+ <div class="alert alert-danger">
<h4>Error</h4>
$errorMsg
</div>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/executingflowpage.vm b/src/main/resources/azkaban/webapp/servlet/velocity/executingflowpage.vm
index c8cbc5c..672da57 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/executingflowpage.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/executingflowpage.vm
@@ -16,39 +16,39 @@
<!DOCTYPE html>
<html lang="en">
- <head>
+ <head>
#parse("azkaban/webapp/servlet/velocity/style.vm")
#parse("azkaban/webapp/servlet/velocity/javascript.vm")
#parse("azkaban/webapp/servlet/velocity/svgflowincludes.vm")
<script type="text/javascript" src="${context}/js/raphael.min.js"></script>
<script type="text/javascript" src="${context}/js/morris.min.js"></script>
- <script type="text/javascript" src="${context}/js/moment.min.js"></script>
+ <script type="text/javascript" src="${context}/js/moment.min.js"></script>
<script type="text/javascript" src="${context}/js/bootstrap-datetimepicker.min.js"></script>
<script type="text/javascript" src="${context}/js/dust-full-2.2.3.min.js"></script>
- <script type="text/javascript" src="${context}/js/flowstats.js"></script>
- <script type="text/javascript" src="${context}/js/flowstats-no-data.js"></script>
-
- <script type="text/javascript" src="${context}/js/azkaban/view/flow-execution-list.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/view/flow-execute-dialog.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/view/flow-stats.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/view/exflow.js"></script>
- <script type="text/javascript">
- var contextURL = "${context}";
- var currentTime = ${currentTime};
- var timezone = "${timezone}";
- var errorMessage = null;
- var successMessage = null;;
- var projectId = "${projectId}";
- var projectName = "${projectName}";
- var flowId = "${flowid}";
- var execId = "${execid}";
- </script>
- <link rel="stylesheet" type="text/css" href="${context}/css/morris.css" />
- <link rel="stylesheet" type="text/css" href="${context}/css/jquery-ui-1.10.1.custom.css" />
- </head>
- <body>
+ <script type="text/javascript" src="${context}/js/flowstats.js"></script>
+ <script type="text/javascript" src="${context}/js/flowstats-no-data.js"></script>
+
+ <script type="text/javascript" src="${context}/js/azkaban/view/flow-execution-list.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/view/flow-execute-dialog.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/view/flow-stats.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/view/exflow.js"></script>
+ <script type="text/javascript">
+ var contextURL = "${context}";
+ var currentTime = ${currentTime};
+ var timezone = "${timezone}";
+ var errorMessage = null;
+ var successMessage = null;;
+ var projectId = "${projectId}";
+ var projectName = "${projectName}";
+ var flowId = "${flowid}";
+ var execId = "${execid}";
+ </script>
+ <link rel="stylesheet" type="text/css" href="${context}/css/morris.css" />
+ <link rel="stylesheet" type="text/css" href="${context}/css/jquery-ui-1.10.1.custom.css" />
+ </head>
+ <body>
#set ($current_page="all")
#set ($show_schedule="false")
@@ -58,10 +58,10 @@
#parse ("azkaban/webapp/servlet/velocity/errormsg.vm")
#else
- ## Page header
+ ## Page header
- <div class="az-page-header page-header-bare">
- <div class="container-full" id="flow-status">
+ <div class="az-page-header page-header-bare">
+ <div class="container-full" id="flow-status">
<div class="row">
<div class="header-title">
<h1>
@@ -84,7 +84,7 @@
<div class="clearfix"></div>
</div>
</div>
- </div>
+ </div>
</div>
<div class="page-breadcrumb">
<div class="container-full">
@@ -96,59 +96,59 @@
</div>
</div>
- <div class="container-full">
+ <div class="container-full">
#parse ("azkaban/webapp/servlet/velocity/alerts.vm")
- ## Tabs and buttons.
-
- <ul class="nav nav-tabs nav-sm" id="headertabs">
- <li id="graphViewLink"><a href="#graph">Graph</a></li>
- <li id="jobslistViewLink"><a href="#jobslist">Job List</a></li>
- <li id="flowLogViewLink"><a href="#log">Flow Log</a></li>
- <li id="statsViewLink"><a href="#stats">Stats</a></li>
- <li class="nav-button pull-right"><button type="button" id="pausebtn" class="btn btn-primary btn-sm">Pause</button></li>
- <li class="nav-button pull-right"><button type="button" id="resumebtn" class="btn btn-primary btn-sm">Resume</button></li>
- <li class="nav-button pull-right"><button type="button" id="cancelbtn" class="btn btn-danger btn-sm">Kill</button></li>
- <li class="nav-button pull-right"><button type="button" id="retrybtn" class="btn btn-success btn-sm">Retry Failed</button></li>
- <li class="nav-button pull-right"><button type="button" id="executebtn" class="btn btn-success btn-sm">Prepare Execution</button></li>
- </ul>
+ ## Tabs and buttons.
+
+ <ul class="nav nav-tabs nav-sm" id="headertabs">
+ <li id="graphViewLink"><a href="#graph">Graph</a></li>
+ <li id="jobslistViewLink"><a href="#jobslist">Job List</a></li>
+ <li id="flowLogViewLink"><a href="#log">Flow Log</a></li>
+ <li id="statsViewLink"><a href="#stats">Stats</a></li>
+ <li class="nav-button pull-right"><button type="button" id="pausebtn" class="btn btn-primary btn-sm">Pause</button></li>
+ <li class="nav-button pull-right"><button type="button" id="resumebtn" class="btn btn-primary btn-sm">Resume</button></li>
+ <li class="nav-button pull-right"><button type="button" id="cancelbtn" class="btn btn-danger btn-sm">Kill</button></li>
+ <li class="nav-button pull-right"><button type="button" id="retrybtn" class="btn btn-success btn-sm">Retry Failed</button></li>
+ <li class="nav-button pull-right"><button type="button" id="executebtn" class="btn btn-success btn-sm">Prepare Execution</button></li>
+ </ul>
</div>
- ## Graph View
+ ## Graph View
- #parse ("azkaban/webapp/servlet/velocity/flowgraphview.vm")
+ #parse ("azkaban/webapp/servlet/velocity/flowgraphview.vm")
- ## Job List View
+ ## Job List View
<div class="container-full" id="jobListView">
- <div class="row">
- <div class="col-xs-12">
- <table class="table table-bordered table-condensed table-hover executions-table">
- <thead>
- <tr>
- <th>Name</th>
- <th class="jobtype">Type</th>
- <th class="timeline">Timeline</th>
- <th class="date">Start Time</th>
- <th class="date">End Time</th>
- <th class="elapse">Elapsed</th>
- <th class="status">Status</th>
- <th class="logs">Details</th>
- </tr>
- </thead>
- <tbody id="executableBody">
- </tbody>
- </table>
+ <div class="row">
+ <div class="col-xs-12">
+ <table class="table table-bordered table-condensed table-hover executions-table">
+ <thead>
+ <tr>
+ <th>Name</th>
+ <th class="jobtype">Type</th>
+ <th class="timeline">Timeline</th>
+ <th class="date">Start Time</th>
+ <th class="date">End Time</th>
+ <th class="elapse">Elapsed</th>
+ <th class="status">Status</th>
+ <th class="logs">Details</th>
+ </tr>
+ </thead>
+ <tbody id="executableBody">
+ </tbody>
+ </table>
</div><!-- /.col-xs-12 -->
</div><!-- /.row -->
</div><!-- /.container-full -->
- ## Flow Log View
+ ## Flow Log View
<div class="container-full container-fill" id="flowLogView">
- <div class="row">
- <div class="col-xs-12 col-content">
+ <div class="row">
+ <div class="col-xs-12 col-content">
<div class="log-viewer">
<div class="panel panel-default">
<div class="panel-heading">
@@ -178,35 +178,35 @@
</div>
</div>
</div>
- </div><!-- /.row -->
+ </div><!-- /.row -->
</div><!-- /.container-fill -->
- ## Error message message dialog.
+ ## Error message message dialog.
<div class="container-full">
- <div class="modal" id="messageDialog">
- <div class="modal-dialog">
- <div class="modal-content">
- <div class="modal-header" id="messageTitle">
- <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
- <h4 class="modal-title">Error</h4>
- </div>
- <div class="modal-body" id="messageDiv">
- <p id="messageBox"></p>
- </div>
- <div class="modal-footer">
- <button type="button" class="btn btn-primary" data-dismiss="modal">Dismiss</button>
- </div>
- </div>
- </div>
- </div>
+ <div class="modal" id="messageDialog">
+ <div class="modal-dialog">
+ <div class="modal-content">
+ <div class="modal-header" id="messageTitle">
+ <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
+ <h4 class="modal-title">Error</h4>
+ </div>
+ <div class="modal-body" id="messageDiv">
+ <p id="messageBox"></p>
+ </div>
+ <div class="modal-footer">
+ <button type="button" class="btn btn-primary" data-dismiss="modal">Dismiss</button>
+ </div>
+ </div>
+ </div>
+ </div>
<div id="contextMenu"></div>
- #parse ("azkaban/webapp/servlet/velocity/invalidsessionmodal.vm")
- #parse ("azkaban/webapp/servlet/velocity/flowexecutionpanel.vm")
- #parse ("azkaban/webapp/servlet/velocity/messagedialog.vm")
- </div>
+ #parse ("azkaban/webapp/servlet/velocity/invalidsessionmodal.vm")
+ #parse ("azkaban/webapp/servlet/velocity/flowexecutionpanel.vm")
+ #parse ("azkaban/webapp/servlet/velocity/messagedialog.vm")
+ </div>
#end
- </body>
+ </body>
</html>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/executionspage.vm b/src/main/resources/azkaban/webapp/servlet/velocity/executionspage.vm
index 8a6cf52..288b5e6 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/executionspage.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/executionspage.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -14,37 +14,37 @@
* the License.
*#
-<!DOCTYPE html>
+<!DOCTYPE html>
<html lang="en">
- <head>
+ <head>
#parse("azkaban/webapp/servlet/velocity/style.vm")
#parse("azkaban/webapp/servlet/velocity/javascript.vm")
-
- <script type="text/javascript" src="${context}/js/azkaban/view/executions.js"></script>
- <script type="text/javascript">
- var contextURL = "${context}";
- var currentTime = ${currentTime};
- var timezone = "${timezone}";
- var errorMessage = null;
- var successMessage = null;
- </script>
- </head>
- <body>
+
+ <script type="text/javascript" src="${context}/js/azkaban/view/executions.js"></script>
+ <script type="text/javascript">
+ var contextURL = "${context}";
+ var currentTime = ${currentTime};
+ var timezone = "${timezone}";
+ var errorMessage = null;
+ var successMessage = null;
+ </script>
+ </head>
+ <body>
#set ($current_page="executing")
#parse ("azkaban/webapp/servlet/velocity/nav.vm")
## Page header.
- <div class="az-page-header">
- <div class="container-full">
+ <div class="az-page-header">
+ <div class="container-full">
<h1><a href="${context}/executor">Executing Flows</a></h1>
- </div>
- </div>
+ </div>
+ </div>
+
+ <div class="container-full">
- <div class="container-full">
-
#parse ("azkaban/webapp/servlet/velocity/alerts.vm")
## Page Content
@@ -54,8 +54,8 @@
<li id="recently-finished-view-link"><a href="#recently-finished">Recently Finished</a></li>
</ul>
- <div class="row" id="currently-running-view">
- <div class="col-xs-12">
+ <div class="row" id="currently-running-view">
+ <div class="col-xs-12">
<table id="executingJobs" class="table table-striped table-bordered table-hover table-condensed executions-table">
<thead>
<tr>
@@ -73,7 +73,7 @@
</thead>
<tbody>
#if ($runningFlows)
- #foreach ($flow in $runningFlows)
+ #foreach ($flow in $runningFlows)
<tr>
<td class="tb-name">
<a href="${context}/executor?execid=${flow.executionId}">${flow.executionId}</a>
@@ -90,7 +90,7 @@
<td><div class="status ${flow.status}">$utils.formatStatus(${flow.status})</div></td>
<td></td>
</tr>
- #end
+ #end
#else
<tr>
<td colspan="10">No Executing Flows</td>
@@ -98,11 +98,11 @@
#end
</tbody>
</table>
- </div><!-- /col-xs-12 -->
- </div><!-- /row -->
+ </div><!-- /col-xs-12 -->
+ </div><!-- /row -->
- <div class="row" id="recently-finished-view">
- <div class="col-xs-12">
+ <div class="row" id="recently-finished-view">
+ <div class="col-xs-12">
<table id="recentlyFinished" class="table table-striped table-bordered table-hover table-condensed executions-table">
<thead>
<tr>
@@ -120,7 +120,7 @@
</thead>
<tbody>
#if ($recentlyFinished.isEmpty())
- #foreach ($flow in $recentlyFinished)
+ #foreach ($flow in $recentlyFinished)
<tr>
<td class="tb-name execId">
<a href="${context}/executor?execid=${flow.executionId}">${flow.executionId}</a>
@@ -137,17 +137,17 @@
<td><div class="status ${flow.status}">$utils.formatStatus(${flow.status})</div></td>
<td></td>
</tr>
- #end
+ #end
#else
<tr>
<td colspan="10">No Recently Finished</td>
</tr>
-#end
+#end
</tbody>
</table>
- </div><!-- /col-xs-12 -->
- </div><!-- /row -->
-
- </div><!-- /container-full -->
- </body>
+ </div><!-- /col-xs-12 -->
+ </div><!-- /row -->
+
+ </div><!-- /container-full -->
+ </body>
</html>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/flowexecutionpanel.vm b/src/main/resources/azkaban/webapp/servlet/velocity/flowexecutionpanel.vm
index 44a3f16..e22d0f7 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/flowexecutionpanel.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/flowexecutionpanel.vm
@@ -14,53 +14,53 @@
* the License.
*#
- <div class="modal modal-wide" id="execute-flow-panel">
- <div class="modal-dialog">
- <div class="modal-content">
- <div class="modal-header">
- <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
- <h4 class="modal-title" id="execute-flow-panel-title"></h4>
- </div><!-- /modal-header -->
- <div class="modal-body row">
- <div class="col-xs-4">
- <ul class="nav nav-pills nav-stacked" id="graph-options">
- <li id="flow-option" viewpanel="svg-div-custom">
- <a href="#">Flow View</a>
- <div class="menu-caption">Right click on the jobs to disable and enable jobs in the flow.</div>
- </li>
- <li viewpanel="notification-panel">
- <a href="#">Notification</a>
- <div class="menu-caption">Change the address where success and failure emails will be sent.</div>
- </li>
- <li viewpanel="failure-options">
- <a href="#">Failure Options</a>
- <div class="menu-caption">Select flow behavior when a failure is detected.</div>
- </li>
- <li viewpanel="concurrent-panel">
- <a href="#">Concurrent</a>
- <div class="menu-caption">Change the behavior of the flow if it is already running.</div>
- </li>
- <li viewpanel="flow-parameters-panel">
- <a href="#">Flow Parameters</a>
- <div class="menu-caption">Add temporary flow parameters that are used to override global settings for each job.</div>
- </li>
- </ul>
- </div><!-- /col-xs-4 -->
- <div class="col-xs-8">
- <div id="execution-graph-options-panel">
+ <div class="modal modal-wide" id="execute-flow-panel">
+ <div class="modal-dialog">
+ <div class="modal-content">
+ <div class="modal-header">
+ <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
+ <h4 class="modal-title" id="execute-flow-panel-title"></h4>
+ </div><!-- /modal-header -->
+ <div class="modal-body row">
+ <div class="col-xs-4">
+ <ul class="nav nav-pills nav-stacked" id="graph-options">
+ <li id="flow-option" viewpanel="svg-div-custom">
+ <a href="#">Flow View</a>
+ <div class="menu-caption">Right click on the jobs to disable and enable jobs in the flow.</div>
+ </li>
+ <li viewpanel="notification-panel">
+ <a href="#">Notification</a>
+ <div class="menu-caption">Change the address where success and failure emails will be sent.</div>
+ </li>
+ <li viewpanel="failure-options">
+ <a href="#">Failure Options</a>
+ <div class="menu-caption">Select flow behavior when a failure is detected.</div>
+ </li>
+ <li viewpanel="concurrent-panel">
+ <a href="#">Concurrent</a>
+ <div class="menu-caption">Change the behavior of the flow if it is already running.</div>
+ </li>
+ <li viewpanel="flow-parameters-panel">
+ <a href="#">Flow Parameters</a>
+ <div class="menu-caption">Add temporary flow parameters that are used to override global settings for each job.</div>
+ </li>
+ </ul>
+ </div><!-- /col-xs-4 -->
+ <div class="col-xs-8">
+ <div id="execution-graph-options-panel">
## SVG graph panel.
- <div id="svg-div-custom" class="side-panel">
- <svg id="flow-executing-graph" xmlns="http://www.w3.org/2000/svg" version="1.1" shape-rendering="optimize-speed" text-rendering="optimize-speed">
- </svg>
- </div>
+ <div id="svg-div-custom" class="side-panel">
+ <svg id="flow-executing-graph" xmlns="http://www.w3.org/2000/svg" version="1.1" shape-rendering="optimize-speed" text-rendering="optimize-speed">
+ </svg>
+ </div>
## Notification panel.
- <div id="notification-panel" class="side-panel">
- <h4>Notify on failure</h4>
- <p>On a job failure, notify on either the first failure, and/or when the failed flow finishes.</p>
+ <div id="notification-panel" class="side-panel">
+ <h4>Notify on failure</h4>
+ <p>On a job failure, notify on either the first failure, and/or when the failed flow finishes.</p>
<hr>
<div class="btn-group" data-toggle="buttons">
<label class="btn btn-default">
@@ -71,115 +71,115 @@
</label>
</div>
- <h4>Failure Emails</h4>
- <div class="checkbox">
- <label>
- <input type="checkbox" id="override-failure-emails" name="overrideFailureEmails" value="overrideFailureEmails">
- Override flow email settings.
- </label>
- </div>
- <label>Notify these addresses on failure. Comma, space, or semi-colon delimited list.</label>
- <textarea class="form-control" rows="3" id="failure-emails"></textarea>
-
- <h4>Success Emails</h4>
- <div class="checkbox">
- <label>
- <input type="checkbox" id="override-success-emails" name="overrideSuccessEmails" value="overrideSuccessEmails">
- Override flow email settings.
- </label>
- </div>
- <label>Notify when the flow finishes successfully. Comma, space, or semi-colon delimited list.</label>
- <textarea class="form-control" rows="3" id="success-emails"></textarea>
- </div>
+ <h4>Failure Emails</h4>
+ <div class="checkbox">
+ <label>
+ <input type="checkbox" id="override-failure-emails" name="overrideFailureEmails" value="overrideFailureEmails">
+ Override flow email settings.
+ </label>
+ </div>
+ <label>Notify these addresses on failure. Comma, space, or semi-colon delimited list.</label>
+ <textarea class="form-control" rows="3" id="failure-emails"></textarea>
+
+ <h4>Success Emails</h4>
+ <div class="checkbox">
+ <label>
+ <input type="checkbox" id="override-success-emails" name="overrideSuccessEmails" value="overrideSuccessEmails">
+ Override flow email settings.
+ </label>
+ </div>
+ <label>Notify when the flow finishes successfully. Comma, space, or semi-colon delimited list.</label>
+ <textarea class="form-control" rows="3" id="success-emails"></textarea>
+ </div>
## Failure options panel.
- <div id="failure-options" class="side-panel">
- <h4>Failure Options</h4>
- <p>When a failure first occurs in the flow, select the execution behavior.</p>
- <hr>
- <ul>
- <li><strong>Finish Current Running</strong> finishes only the currently running job. It will not start any new jobs.</li>
- <li><strong>Cancel All</strong> immediately kills all jobs and fails the flow.</li>
- <li><strong>Finish All Possible</strong> will keep executing jobs as long as its dependencies are met.</li>
- </ul>
- <select id="failure-action" name="failureAction" class="form-control form-control-auto">
- <option value="finishCurrent">Finish Current Running</option>
- <option value="cancelImmediately">Cancel All</option>
- <option value="finishPossible">Finish All Possible</option>
- </select>
- </div>
+ <div id="failure-options" class="side-panel">
+ <h4>Failure Options</h4>
+ <p>When a failure first occurs in the flow, select the execution behavior.</p>
+ <hr>
+ <ul>
+ <li><strong>Finish Current Running</strong> finishes only the currently running job. It will not start any new jobs.</li>
+ <li><strong>Cancel All</strong> immediately kills all jobs and fails the flow.</li>
+ <li><strong>Finish All Possible</strong> will keep executing jobs as long as its dependencies are met.</li>
+ </ul>
+ <select id="failure-action" name="failureAction" class="form-control form-control-auto">
+ <option value="finishCurrent">Finish Current Running</option>
+ <option value="cancelImmediately">Cancel All</option>
+ <option value="finishPossible">Finish All Possible</option>
+ </select>
+ </div>
## Concurrent execution options panel.
- <div id="concurrent-panel" class="side-panel">
- <h4>Concurrent Execution Options</h4>
- <p>If the flow is currently running, these are the options that can be set.</p>
- <hr>
- <div class="radio">
- <label>
- <input type="radio" id="skip" name="concurrent" value="skip">
- Skip Execution
- </label>
- <span class="help-block">Do not run flow if it is already running.</span>
- </div>
-
- <div class="radio">
- <label>
- <input type="radio" id="ignore" name="concurrent" value="ignore" checked="checked">
- Run Concurrently
- </label>
- <span class="help-block">Run the flow anyway. Previous execution is unaffected.</span>
- </div>
-
- <div class="radio">
- <label>
- <input type="radio" id="pipeline" name="concurrent" value="pipeline">
- Pipeline
- </label>
- <select id="pipeline-level" name="pipelineLevel" class="form-control form-control-auto input-sm">
- <option value="1">Level 1</option>
- <option value="2">Level 2</option>
- </select>
- <span class="help-block">
- Pipeline the flow, so the current execution will not be overrun.
- <ul>
- <li>Level 1: block job A until the previous flow job A has completed.</li>
- <li>Level 2: block job A until the previous flow job A's children have completed.</li>
- </li>
- </span>
- </div>
- </div>
+ <div id="concurrent-panel" class="side-panel">
+ <h4>Concurrent Execution Options</h4>
+ <p>If the flow is currently running, these are the options that can be set.</p>
+ <hr>
+ <div class="radio">
+ <label>
+ <input type="radio" id="skip" name="concurrent" value="skip">
+ Skip Execution
+ </label>
+ <span class="help-block">Do not run flow if it is already running.</span>
+ </div>
+
+ <div class="radio">
+ <label>
+ <input type="radio" id="ignore" name="concurrent" value="ignore" checked="checked">
+ Run Concurrently
+ </label>
+ <span class="help-block">Run the flow anyway. Previous execution is unaffected.</span>
+ </div>
+
+ <div class="radio">
+ <label>
+ <input type="radio" id="pipeline" name="concurrent" value="pipeline">
+ Pipeline
+ </label>
+ <select id="pipeline-level" name="pipelineLevel" class="form-control form-control-auto input-sm">
+ <option value="1">Level 1</option>
+ <option value="2">Level 2</option>
+ </select>
+ <span class="help-block">
+ Pipeline the flow, so the current execution will not be overrun.
+ <ul>
+ <li>Level 1: block job A until the previous flow job A has completed.</li>
+ <li>Level 2: block job A until the previous flow job A's children have completed.</li>
+ </li>
+ </span>
+ </div>
+ </div>
## Flow parameters panel
- <div id="flow-parameters-panel" class="side-panel">
- <h4>Flow Property Override</h4>
- <hr>
- <div id="editTable">
- <table class="table table-striped">
- <thead>
- <tr>
- <th class="property-key">Name</th>
- <th>Value</th>
- </tr>
- </thead>
- <tbody>
- <tr id="addRow" class="addRow">
- <td id="addRow-col" colspan="2">
- <button type="button" class="btn btn-success btn-xs" id="add-btn">Add Row</button>
- </td>
- </tr>
- </tbody>
- </table>
- </div>
- </div>
-
- </div><!-- /execution-graph-options-panel -->
- </div><!-- /col-xs-8 -->
- </div><!-- /modal-body -->
-
- <div class="modal-footer">
+ <div id="flow-parameters-panel" class="side-panel">
+ <h4>Flow Property Override</h4>
+ <hr>
+ <div id="editTable">
+ <table class="table table-striped">
+ <thead>
+ <tr>
+ <th class="property-key">Name</th>
+ <th>Value</th>
+ </tr>
+ </thead>
+ <tbody>
+ <tr id="addRow" class="addRow">
+ <td id="addRow-col" colspan="2">
+ <button type="button" class="btn btn-success btn-xs" id="add-btn">Add Row</button>
+ </td>
+ </tr>
+ </tbody>
+ </table>
+ </div>
+ </div>
+
+ </div><!-- /execution-graph-options-panel -->
+ </div><!-- /col-xs-8 -->
+ </div><!-- /modal-body -->
+
+ <div class="modal-footer">
#if (!$show_schedule || $show_schedule == 'true')
<div class="pull-left">
<button type="button" class="btn btn-success" id="schedule-btn">Schedule</button>
@@ -188,30 +188,30 @@
#*
#if ($triggerPlugins.size() > 0)
- #foreach ($triggerPlugin in $triggerPlugins)
- <button type="button" class="btn btn-default" id=set-$triggerPlugin.pluginName>$triggerPlugin.pluginName</button>
- #end
+ #foreach ($triggerPlugin in $triggerPlugins)
+ <button type="button" class="btn btn-default" id=set-$triggerPlugin.pluginName>$triggerPlugin.pluginName</button>
+ #end
#end
*#
- <button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>
- <button type="button" class="btn btn-primary" id="execute-btn">Execute</button>
- </div><!-- /modal-footer -->
- </div><!-- /modal-content -->
- </div><!-- /modal-dialog -->
- </div><!-- /modal -->
+ <button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>
+ <button type="button" class="btn btn-primary" id="execute-btn">Execute</button>
+ </div><!-- /modal-footer -->
+ </div><!-- /modal-content -->
+ </div><!-- /modal-dialog -->
+ </div><!-- /modal -->
#if (!$show_schedule || $show_schedule == 'true')
- #parse ("azkaban/webapp/servlet/velocity/schedulepanel.vm")
+ #parse ("azkaban/webapp/servlet/velocity/schedulepanel.vm")
#end
#*
#if ($triggerPlugins.size() > 0)
- #foreach ($triggerPlugin in $triggerPlugins)
- #set ($prefix = $triggerPlugin.pluginName)
- #set ($webpath = $triggerPlugin.pluginPath)
- #parse ($triggerPlugin.inputPanelVM)
- #end
+ #foreach ($triggerPlugin in $triggerPlugins)
+ #set ($prefix = $triggerPlugin.pluginName)
+ #set ($webpath = $triggerPlugin.pluginPath)
+ #parse ($triggerPlugin.inputPanelVM)
+ #end
#end
*#
- <div id="contextMenu"></div>
+ <div id="contextMenu"></div>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/flowextendedpanel.vm b/src/main/resources/azkaban/webapp/servlet/velocity/flowextendedpanel.vm
index 557d09c..4b54f61 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/flowextendedpanel.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/flowextendedpanel.vm
@@ -1,22 +1,22 @@
<div id="flowInfoBase" class="flowExtendedView" style="display:none">
- <div class="flowExtendedViewHeader">
- <h3 class="flowInfoTitle"><span class="nodeId"></span><span class="nodeType"></span></h3>
- <a title="Close" class="modal-close closeInfoPanel">x</a>
- </div>
- <div class="dataContent">
- <div class="dataFlow">
- </div>
- <div class="dataJobProperties">
- <table class="dataPropertiesTable">
- <thead class="dataPropertiesHead">
- <tr>
- <th>Name</th>
- <th>Value</th>
- </tr>
- </thead>
- <tbody class="dataPropertiesBody">
- </tbody>
- </table>
- </div>
- </div>
+ <div class="flowExtendedViewHeader">
+ <h3 class="flowInfoTitle"><span class="nodeId"></span><span class="nodeType"></span></h3>
+ <a title="Close" class="modal-close closeInfoPanel">x</a>
+ </div>
+ <div class="dataContent">
+ <div class="dataFlow">
+ </div>
+ <div class="dataJobProperties">
+ <table class="dataPropertiesTable">
+ <thead class="dataPropertiesHead">
+ <tr>
+ <th>Name</th>
+ <th>Value</th>
+ </tr>
+ </thead>
+ <tbody class="dataPropertiesBody">
+ </tbody>
+ </table>
+ </div>
+ </div>
</div>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/flowgraphview.vm b/src/main/resources/azkaban/webapp/servlet/velocity/flowgraphview.vm
index 04b71a4..6f566c8 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/flowgraphview.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/flowgraphview.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -14,31 +14,31 @@
* the License.
*#
- ## Graph view.
+ ## Graph view.
- <div class="container-full container-fill" id="graphView">
- <div class="graph-sidebar-open" id="open-joblist-btn">
- <span class="glyphicon glyphicon-th-list" title="Open Job List Panel"></span>
- </div>
- <div class="graph-sidebar-float">
- <div class="panel panel-default" id="joblist-panel">
- <div class="panel-heading">
- <div class="graph-sidebar-close" id="close-btn" title="Close Panel"><span class="glyphicon glyphicon-remove"></span></div>
- <div class="graph-sidebar-search">
- <input id="filter" type="text" placeholder="Job Filter" class="form-control input-sm">
- </div>
- </div>
- <div id="joblist" class="graph-sidebar-list"></div>
- <div class="panel-footer">
- <button type="button" class="btn btn-sm btn-default" id="resetPanZoomBtn">Reset Pan Zoom</button>
- <button type="button" class="btn btn-sm btn-default" id="autoPanZoomBtn" data-toggle="button">Auto Pan Zoom</button>
- </div>
- </div><!-- /.panel -->
- </div>
- <div class="col-content">
- <div id="svgDiv" class="well well-clear well-sm graph-container">
- <svg id="flow-graph" xmlns="http://www.w3.org/2000/svg" version="1.1" shape-rendering="optimize-speed" text-rendering="optimize-speed">
- </svg>
- </div>
- </div>
- </div>
+ <div class="container-full container-fill" id="graphView">
+ <div class="graph-sidebar-open" id="open-joblist-btn">
+ <span class="glyphicon glyphicon-th-list" title="Open Job List Panel"></span>
+ </div>
+ <div class="graph-sidebar-float">
+ <div class="panel panel-default" id="joblist-panel">
+ <div class="panel-heading">
+ <div class="graph-sidebar-close" id="close-btn" title="Close Panel"><span class="glyphicon glyphicon-remove"></span></div>
+ <div class="graph-sidebar-search">
+ <input id="filter" type="text" placeholder="Job Filter" class="form-control input-sm">
+ </div>
+ </div>
+ <div id="joblist" class="graph-sidebar-list"></div>
+ <div class="panel-footer">
+ <button type="button" class="btn btn-sm btn-default" id="resetPanZoomBtn">Reset Pan Zoom</button>
+ <button type="button" class="btn btn-sm btn-default" id="autoPanZoomBtn" data-toggle="button">Auto Pan Zoom</button>
+ </div>
+ </div><!-- /.panel -->
+ </div>
+ <div class="col-content">
+ <div id="svgDiv" class="well well-clear well-sm graph-container">
+ <svg id="flow-graph" xmlns="http://www.w3.org/2000/svg" version="1.1" shape-rendering="optimize-speed" text-rendering="optimize-speed">
+ </svg>
+ </div>
+ </div>
+ </div>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/flowpage.vm b/src/main/resources/azkaban/webapp/servlet/velocity/flowpage.vm
index 0394da9..02498d8 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/flowpage.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/flowpage.vm
@@ -16,43 +16,43 @@
<!DOCTYPE html>
<html lang="en">
- <head>
+ <head>
#parse("azkaban/webapp/servlet/velocity/style.vm")
#parse("azkaban/webapp/servlet/velocity/javascript.vm")
#parse("azkaban/webapp/servlet/velocity/svgflowincludes.vm")
- <script type="text/javascript" src="${context}/js/moment.min.js"></script>
+ <script type="text/javascript" src="${context}/js/moment.min.js"></script>
<script type="text/javascript" src="${context}/js/bootstrap-datetimepicker.min.js"></script>
<script type="text/javascript" src="${context}/js/raphael.min.js"></script>
<script type="text/javascript" src="${context}/js/morris.min.js"></script>
<script type="text/javascript" src="${context}/js/dust-full-2.2.3.min.js"></script>
- <script type="text/javascript" src="${context}/js/flowsummary.js"></script>
- <script type="text/javascript" src="${context}/js/flowstats-no-data.js"></script>
- <script type="text/javascript" src="${context}/js/flowstats.js"></script>
-
- <script type="text/javascript" src="${context}/js/azkaban/view/time-graph.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/util/schedule.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/view/schedule-sla.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/view/flow-stats.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/view/flow-execute-dialog.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/view/flow.js"></script>
- <script type="text/javascript">
- var contextURL = "${context}";
- var currentTime = ${currentTime};
- var timezone = "${timezone}";
- var errorMessage = null;
- var successMessage = null;
-
- var projectId = ${project.id};
- var projectName = "${project.name}";
- var flowId = "${flowid}";
- var execId = null;
- </script>
- <link rel="stylesheet" type="text/css" href="${context}/css/morris.css" />
- <link rel="stylesheet" type="text/css" href="${context}/css/bootstrap-datetimepicker.css" />
- </head>
- <body>
+ <script type="text/javascript" src="${context}/js/flowsummary.js"></script>
+ <script type="text/javascript" src="${context}/js/flowstats-no-data.js"></script>
+ <script type="text/javascript" src="${context}/js/flowstats.js"></script>
+
+ <script type="text/javascript" src="${context}/js/azkaban/view/time-graph.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/util/schedule.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/view/schedule-sla.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/view/flow-stats.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/view/flow-execute-dialog.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/view/flow.js"></script>
+ <script type="text/javascript">
+ var contextURL = "${context}";
+ var currentTime = ${currentTime};
+ var timezone = "${timezone}";
+ var errorMessage = null;
+ var successMessage = null;
+
+ var projectId = ${project.id};
+ var projectName = "${project.name}";
+ var flowId = "${flowid}";
+ var execId = null;
+ </script>
+ <link rel="stylesheet" type="text/css" href="${context}/css/morris.css" />
+ <link rel="stylesheet" type="text/css" href="${context}/css/bootstrap-datetimepicker.css" />
+ </head>
+ <body>
#set ($current_page="all")
#parse ("azkaban/webapp/servlet/velocity/nav.vm")
@@ -61,21 +61,21 @@
#parse ("azkaban/webapp/servlet/velocity/errormsg.vm")
#else
- ## Page header.
+ ## Page header.
- <div class="az-page-header page-header-bare">
- <div class="container-full">
- <div class="row">
- <div class="header-title">
- <h1><a href="${context}/manager?project=${project.name}&flow=${flowid}">Flow <small>$flowid</small></a></h1>
- </div>
- <div class="header-control">
+ <div class="az-page-header page-header-bare">
+ <div class="container-full">
+ <div class="row">
+ <div class="header-title">
+ <h1><a href="${context}/manager?project=${project.name}&flow=${flowid}">Flow <small>$flowid</small></a></h1>
+ </div>
+ <div class="header-control">
<div class="pull-right header-form">
<button type="button" class="btn btn-sm btn-success" id="executebtn">Schedule / Execute Flow</button>
</div>
- </div>
- </div>
- </div>
+ </div>
+ </div>
+ </div>
</div>
<div class="page-breadcrumb">
<div class="container-full">
@@ -86,60 +86,60 @@
</div>
</div>
- <div class="container-full">
+ <div class="container-full">
#parse ("azkaban/webapp/servlet/velocity/alerts.vm")
## Tabs
- <ul class="nav nav-tabs nav-sm" id="headertabs">
- <li id="graphViewLink"><a href="#graph">Graph</a></li>
- <li id="executionsViewLink"><a href="#executions">Executions</a></li>
- <li id="summaryViewLink"><a href="#summary">Summary</a></li>
- </ul>
+ <ul class="nav nav-tabs nav-sm" id="headertabs">
+ <li id="graphViewLink"><a href="#graph">Graph</a></li>
+ <li id="executionsViewLink"><a href="#executions">Executions</a></li>
+ <li id="summaryViewLink"><a href="#summary">Summary</a></li>
+ </ul>
</div>
- ## Graph view.
+ ## Graph view.
- #parse ("azkaban/webapp/servlet/velocity/flowgraphview.vm")
+ #parse ("azkaban/webapp/servlet/velocity/flowgraphview.vm")
- ## Executions view.
+ ## Executions view.
<div class="container-full" id="executionsView">
- <div class="row">
- <div class="col-xs-12">
- <div class="well well-clear well-sm" id="timeGraph">
+ <div class="row">
+ <div class="col-xs-12">
+ <div class="well well-clear well-sm" id="timeGraph">
</div>
- <table class="table table-striped table-bordered table-condensed table-hover" id="execTable">
- <thead>
- <tr>
- <th>Execution Id</th>
- <th>User</th>
- <th class="date">Start Time</th>
- <th class="date">End Time</th>
- <th class="elapse">Elapsed</th>
- <th class="status">Status</th>
- <th class="action">Action</th>
- </tr>
- </thead>
- <tbody id="execTableBody">
- </tbody>
- </table>
- <ul id="pageSelection" class="pagination">
- <li id="previous" class="first"><a><span class="arrow">←</span>Previous</a></li>
- <li id="page1"><a href="#page1">1</a></li>
- <li id="page2"><a href="#page2">2</a></li>
- <li id="page3"><a href="#page3">3</a></li>
- <li id="page4"><a href="#page4">4</a></li>
- <li id="page5"><a href="#page5">5</a></li>
- <li id="next"><a>Next<span class="arrow">→</span></a></li>
- </ul>
- </div>
- </div>
+ <table class="table table-striped table-bordered table-condensed table-hover" id="execTable">
+ <thead>
+ <tr>
+ <th>Execution Id</th>
+ <th>User</th>
+ <th class="date">Start Time</th>
+ <th class="date">End Time</th>
+ <th class="elapse">Elapsed</th>
+ <th class="status">Status</th>
+ <th class="action">Action</th>
+ </tr>
+ </thead>
+ <tbody id="execTableBody">
+ </tbody>
+ </table>
+ <ul id="pageSelection" class="pagination">
+ <li id="previous" class="first"><a><span class="arrow">←</span>Previous</a></li>
+ <li id="page1"><a href="#page1">1</a></li>
+ <li id="page2"><a href="#page2">2</a></li>
+ <li id="page3"><a href="#page3">3</a></li>
+ <li id="page4"><a href="#page4">4</a></li>
+ <li id="page5"><a href="#page5">5</a></li>
+ <li id="next"><a>Next<span class="arrow">→</span></a></li>
+ </ul>
+ </div>
+ </div>
</div><!-- /.container-fill -->
- ## Summary view.
+ ## Summary view.
<div class="container-full" id="summaryView">
<div id="summary-view-content">
@@ -162,14 +162,14 @@
## Context menu and the rest of the page.
<div class="container-full">
- <div id="contextMenu">
- </div>
-
- #parse ("azkaban/webapp/servlet/velocity/invalidsessionmodal.vm")
- #parse ("azkaban/webapp/servlet/velocity/flowexecutionpanel.vm")
- #parse ("azkaban/webapp/servlet/velocity/messagedialog.vm")
- #parse ("azkaban/webapp/servlet/velocity/slapanel.vm")
- </div><!-- /.container -->
+ <div id="contextMenu">
+ </div>
+
+ #parse ("azkaban/webapp/servlet/velocity/invalidsessionmodal.vm")
+ #parse ("azkaban/webapp/servlet/velocity/flowexecutionpanel.vm")
+ #parse ("azkaban/webapp/servlet/velocity/messagedialog.vm")
+ #parse ("azkaban/webapp/servlet/velocity/slapanel.vm")
+ </div><!-- /.container -->
#end
- </body>
+ </body>
</body>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/historypage.vm b/src/main/resources/azkaban/webapp/servlet/velocity/historypage.vm
index ed0fa38..16d4c87 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/historypage.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/historypage.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -14,26 +14,26 @@
* the License.
*#
-<!DOCTYPE html>
+<!DOCTYPE html>
<html lang="en">
- <head>
+ <head>
#parse("azkaban/webapp/servlet/velocity/style.vm")
#parse("azkaban/webapp/servlet/velocity/javascript.vm")
- <link rel="stylesheet" type="text/css" href="${context}/css/bootstrap-datetimepicker.css" />
+ <link rel="stylesheet" type="text/css" href="${context}/css/bootstrap-datetimepicker.css" />
+
+ <script type="text/javascript" src="${context}/js/moment.min.js"></script>
+ <script type="text/javascript" src="${context}/js/bootstrap-datetimepicker.min.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/view/history.js"></script>
+ <script type="text/javascript">
+ var contextURL = "${context}";
+ var currentTime = ${currentTime};
+ var timezone = "${timezone}";
+ </script>
+ </head>
+ <body>
- <script type="text/javascript" src="${context}/js/moment.min.js"></script>
- <script type="text/javascript" src="${context}/js/bootstrap-datetimepicker.min.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/view/history.js"></script>
- <script type="text/javascript">
- var contextURL = "${context}";
- var currentTime = ${currentTime};
- var timezone = "${timezone}";
- </script>
- </head>
- <body>
-
#set ($current_page="history")
#parse ("azkaban/webapp/servlet/velocity/nav.vm")
@@ -43,7 +43,7 @@
## Page header.
- <div class="az-page-header">
+ <div class="az-page-header">
<div class="container-full">
<div class="row">
<div class="header-title">
@@ -64,15 +64,15 @@
</form>
</div>
</div>
- </div>
- </div>
+ </div>
+ </div>
<div class="container-full">
#parse ("azkaban/webapp/servlet/velocity/alerts.vm")
- <div class="row">
- <div class="col-xs-12">
+ <div class="row">
+ <div class="col-xs-12">
<table id="executingJobs" class="table table-striped table-bordered table-hover table-condensed executions-table">
<thead>
<tr>
@@ -117,107 +117,107 @@
#end
</tbody>
</table>
- <ul class="pagination" id="pageSelection">
+ <ul class="pagination" id="pageSelection">
#if ($search)
- <li id="previous" class="first"><a href="${context}/history?page=${previous.page}&size=${previous.size}&search=true&searchterm=${search_term}"><span class="arrow">←</span>Previous</a></li>
- <li id="page1" #if($page1.selected) class="selected" #end><a href="${context}/history?page=${page1.page}&size=${page1.size}&search=true&searchterm=${search_term}">${page1.page}</a></li>
- <li id="page2" #if($page2.selected) class="selected" #end><a href="${context}/history?page=${page2.page}&size=${page2.size}&search=true&searchterm=${search_term}">${page2.page}</a></li>
- <li id="page3" #if($page3.selected) class="selected" #end><a href="${context}/history?page=${page3.page}&size=${page3.size}&search=true&searchterm=${search_term}">${page3.page}</a></li>
- <li id="page4" #if($page4.selected) class="selected" #end><a href="${context}/history?page=${page4.page}&size=${page4.size}&search=true&searchterm=${search_term}">${page4.page}</a></li>
- <li id="page5" #if($page5.selected) class="selected" #end><a href="${context}/history?page=${page5.page}&size=${page5.size}&search=true&searchterm=${search_term}">${page5.page}</a></li>
- <li id="next"><a href="${context}/history?page=${next.page}&size=${next.size}&search=true&searchterm=${search_term}">Next<span class="arrow">→</span></a></li>
+ <li id="previous" class="first"><a href="${context}/history?page=${previous.page}&size=${previous.size}&search=true&searchterm=${search_term}"><span class="arrow">←</span>Previous</a></li>
+ <li id="page1" #if($page1.selected) class="selected" #end><a href="${context}/history?page=${page1.page}&size=${page1.size}&search=true&searchterm=${search_term}">${page1.page}</a></li>
+ <li id="page2" #if($page2.selected) class="selected" #end><a href="${context}/history?page=${page2.page}&size=${page2.size}&search=true&searchterm=${search_term}">${page2.page}</a></li>
+ <li id="page3" #if($page3.selected) class="selected" #end><a href="${context}/history?page=${page3.page}&size=${page3.size}&search=true&searchterm=${search_term}">${page3.page}</a></li>
+ <li id="page4" #if($page4.selected) class="selected" #end><a href="${context}/history?page=${page4.page}&size=${page4.size}&search=true&searchterm=${search_term}">${page4.page}</a></li>
+ <li id="page5" #if($page5.selected) class="selected" #end><a href="${context}/history?page=${page5.page}&size=${page5.size}&search=true&searchterm=${search_term}">${page5.page}</a></li>
+ <li id="next"><a href="${context}/history?page=${next.page}&size=${next.size}&search=true&searchterm=${search_term}">Next<span class="arrow">→</span></a></li>
#elseif($advfilter)
- <li id="previous" class="first"><a href="${context}/history?page=${previous.page}&size=${previous.size}&advfilter=true&projcontain=${projcontain}&flowcontain=${flowcontain}&usercontain=${usercontain}&status=${status}&begin=${begin}&end=${end}"><span class="arrow">←</span>Previous</a></li>
- <li id="page1" #if($page1.selected) class="selected" #end><a href="${context}/history?page=${page1.page}&size=${page1.size}&advfilter=true&projcontain=${projcontain}&flowcontain=${flowcontain}&usercontain=${usercontain}&status=${status}&begin=${begin}&end=${end}">${page1.page}</a></li>
- <li id="page2" #if($page2.selected) class="selected" #end><a href="${context}/history?page=${page2.page}&size=${page2.size}&advfilter=true&projcontain=${projcontain}&flowcontain=${flowcontain}&usercontain=${usercontain}&status=${status}&begin=${begin}&end=${end}">${page2.page}</a></li>
- <li id="page3" #if($page3.selected) class="selected" #end><a href="${context}/history?page=${page3.page}&size=${page3.size}&advfilter=true&projcontain=${projcontain}&flowcontain=${flowcontain}&usercontain=${usercontain}&status=${status}&begin=${begin}&end=${end}">${page3.page}</a></li>
- <li id="page4" #if($page4.selected) class="selected" #end><a href="${context}/history?page=${page4.page}&size=${page4.size}&advfilter=true&projcontain=${projcontain}&flowcontain=${flowcontain}&usercontain=${usercontain}&status=${status}&begin=${begin}&end=${end}">${page4.page}</a></li>
- <li id="page5" #if($page5.selected) class="selected" #end><a href="${context}/history?page=${page5.page}&size=${page5.size}&advfilter=true&projcontain=${projcontain}&flowcontain=${flowcontain}&usercontain=${usercontain}&status=${status}&begin=${begin}&end=${end}">${page5.page}</a></li>
- <li id="next"><a href="${context}/history?page=${next.page}&size=${next.size}&advfilter=true&projcontain=${projcontain}&flowcontain=${flowcontain}&usercontain=${usercontain}&status=${status}&begin=${begin}&end=${end}">Next<span class="arrow">→</span></a></li>
+ <li id="previous" class="first"><a href="${context}/history?page=${previous.page}&size=${previous.size}&advfilter=true&projcontain=${projcontain}&flowcontain=${flowcontain}&usercontain=${usercontain}&status=${status}&begin=${begin}&end=${end}"><span class="arrow">←</span>Previous</a></li>
+ <li id="page1" #if($page1.selected) class="selected" #end><a href="${context}/history?page=${page1.page}&size=${page1.size}&advfilter=true&projcontain=${projcontain}&flowcontain=${flowcontain}&usercontain=${usercontain}&status=${status}&begin=${begin}&end=${end}">${page1.page}</a></li>
+ <li id="page2" #if($page2.selected) class="selected" #end><a href="${context}/history?page=${page2.page}&size=${page2.size}&advfilter=true&projcontain=${projcontain}&flowcontain=${flowcontain}&usercontain=${usercontain}&status=${status}&begin=${begin}&end=${end}">${page2.page}</a></li>
+ <li id="page3" #if($page3.selected) class="selected" #end><a href="${context}/history?page=${page3.page}&size=${page3.size}&advfilter=true&projcontain=${projcontain}&flowcontain=${flowcontain}&usercontain=${usercontain}&status=${status}&begin=${begin}&end=${end}">${page3.page}</a></li>
+ <li id="page4" #if($page4.selected) class="selected" #end><a href="${context}/history?page=${page4.page}&size=${page4.size}&advfilter=true&projcontain=${projcontain}&flowcontain=${flowcontain}&usercontain=${usercontain}&status=${status}&begin=${begin}&end=${end}">${page4.page}</a></li>
+ <li id="page5" #if($page5.selected) class="selected" #end><a href="${context}/history?page=${page5.page}&size=${page5.size}&advfilter=true&projcontain=${projcontain}&flowcontain=${flowcontain}&usercontain=${usercontain}&status=${status}&begin=${begin}&end=${end}">${page5.page}</a></li>
+ <li id="next"><a href="${context}/history?page=${next.page}&size=${next.size}&advfilter=true&projcontain=${projcontain}&flowcontain=${flowcontain}&usercontain=${usercontain}&status=${status}&begin=${begin}&end=${end}">Next<span class="arrow">→</span></a></li>
#else
- <li id="previous" class="first"><a href="${context}/history?page=${previous.page}&size=${previous.size}"><span class="arrow">←</span>Previous</a></li>
- <li id="page1" #if($page1.selected) class="selected" #end><a href="${context}/history?page=${page1.page}&size=${page1.size}">${page1.page}</a></li>
- <li id="page2" #if($page2.selected) class="selected" #end><a href="${context}/history?page=${page2.page}&size=${page2.size}">${page2.page}</a></li>
- <li id="page3" #if($page3.selected) class="selected" #end><a href="${context}/history?page=${page3.page}&size=${page3.size}">${page3.page}</a></li>
- <li id="page4" #if($page4.selected) class="selected" #end><a href="${context}/history?page=${page4.page}&size=${page4.size}">${page4.page}</a></li>
- <li id="page5" #if($page5.selected) class="selected" #end><a href="${context}/history?page=${page5.page}&size=${page5.size}">${page5.page}</a></li>
- <li id="next"><a href="${context}/history?page=${next.page}&size=${next.size}">Next<span class="arrow">→</span></a></li>
+ <li id="previous" class="first"><a href="${context}/history?page=${previous.page}&size=${previous.size}"><span class="arrow">←</span>Previous</a></li>
+ <li id="page1" #if($page1.selected) class="selected" #end><a href="${context}/history?page=${page1.page}&size=${page1.size}">${page1.page}</a></li>
+ <li id="page2" #if($page2.selected) class="selected" #end><a href="${context}/history?page=${page2.page}&size=${page2.size}">${page2.page}</a></li>
+ <li id="page3" #if($page3.selected) class="selected" #end><a href="${context}/history?page=${page3.page}&size=${page3.size}">${page3.page}</a></li>
+ <li id="page4" #if($page4.selected) class="selected" #end><a href="${context}/history?page=${page4.page}&size=${page4.size}">${page4.page}</a></li>
+ <li id="page5" #if($page5.selected) class="selected" #end><a href="${context}/history?page=${page5.page}&size=${page5.size}">${page5.page}</a></li>
+ <li id="next"><a href="${context}/history?page=${next.page}&size=${next.size}">Next<span class="arrow">→</span></a></li>
#end
- </ul>
- </div><!-- /col-xs-12 -->
- </div><!-- /row -->
+ </ul>
+ </div><!-- /col-xs-12 -->
+ </div><!-- /row -->
## Advanced Filter Modal.
- <div class="modal" id="adv-filter">
- <div class="modal-dialog">
- <div class="modal-content">
- <div class="modal-header">
- <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
- <h4 class="modal-title">Advanced Filter</h4>
- </div>
- <div class="modal-body">
- <div class="alert alert-danger" id="adv-filter-error-msg">$error_msg</div>
- <fieldset class="form-horizontal">
- <div class="form-group">
- <label for="projcontain" class="col-sm-2 control-label">Project</label>
- <div class="col-sm-10">
- <input id="projcontain" type="text" placeholder="Project name containing ..." value="" class="form-control" name="projcontain">
- </div>
- </div>
- <div class="form-group">
- <label for="flowcontain" class="col-sm-2 control-label">Flow</label>
- <div class="col-sm-10">
- <input id="flowcontain" type="text" placeholder="Flow name containing ..." value="" class="form-control" name="flowcontain">
- </div>
- </div>
- <div class="form-group">
- <label for="usercontain" class="col-sm-2 control-label">User</label>
- <div class="col-sm-10">
- <input id="usercontain" type="text" placeholder="User name containing ..." value="" class="form-control" name="usercontain">
- </div>
- </div>
- <div class="form-group">
- <label for="status" class="col-sm-2 control-label">Status</label>
- <div class="col-sm-10">
- <select id="status" class="form-control">
- <option value=0>All Status</option>
- <option value=10>Ready</option>
- <option value=20>Preapring</option>
- <option value=30>Running</option>
- <option value=40>Paused</option>
- <option value=50>Succeed</option>
- <option value=60>Killed</option>
- <option value=70>Failed</option>
- <option value=80>Failed Finishing</option>
- <option value=90>Skipped</option>
- <option value=100>Disabled</option>
- <option value=110>Queued</option>
- </select>
- </div>
- </div>
- <div class="form-group">
- <label for="datetimebegin" class="col-sm-2 control-label">Between</label>
- <div class="col-sm-4">
- <input type="text" id="datetimebegin" value="" class="ui-datetime-container form-control">
- </div>
- <label for="datetimeend" class="col-sm-2 control-label control-label-center">and</label>
- <div class="col-sm-4">
- <input type="text" id="datetimeend" value="" class="ui-datetime-container form-control">
- </div>
- </div>
- </fieldset>
- </div>
- <div class="modal-footer">
- <button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>
- <button class="btn btn-success" id="filter-btn">Filter</button>
- </div>
- </div>
- </div>
- </div>
-
+ <div class="modal" id="adv-filter">
+ <div class="modal-dialog">
+ <div class="modal-content">
+ <div class="modal-header">
+ <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
+ <h4 class="modal-title">Advanced Filter</h4>
+ </div>
+ <div class="modal-body">
+ <div class="alert alert-danger" id="adv-filter-error-msg">$error_msg</div>
+ <fieldset class="form-horizontal">
+ <div class="form-group">
+ <label for="projcontain" class="col-sm-2 control-label">Project</label>
+ <div class="col-sm-10">
+ <input id="projcontain" type="text" placeholder="Project name containing ..." value="" class="form-control" name="projcontain">
+ </div>
+ </div>
+ <div class="form-group">
+ <label for="flowcontain" class="col-sm-2 control-label">Flow</label>
+ <div class="col-sm-10">
+ <input id="flowcontain" type="text" placeholder="Flow name containing ..." value="" class="form-control" name="flowcontain">
+ </div>
+ </div>
+ <div class="form-group">
+ <label for="usercontain" class="col-sm-2 control-label">User</label>
+ <div class="col-sm-10">
+ <input id="usercontain" type="text" placeholder="User name containing ..." value="" class="form-control" name="usercontain">
+ </div>
+ </div>
+ <div class="form-group">
+ <label for="status" class="col-sm-2 control-label">Status</label>
+ <div class="col-sm-10">
+ <select id="status" class="form-control">
+ <option value=0>All Status</option>
+ <option value=10>Ready</option>
+ <option value=20>Preapring</option>
+ <option value=30>Running</option>
+ <option value=40>Paused</option>
+ <option value=50>Succeed</option>
+ <option value=60>Killed</option>
+ <option value=70>Failed</option>
+ <option value=80>Failed Finishing</option>
+ <option value=90>Skipped</option>
+ <option value=100>Disabled</option>
+ <option value=110>Queued</option>
+ </select>
+ </div>
+ </div>
+ <div class="form-group">
+ <label for="datetimebegin" class="col-sm-2 control-label">Between</label>
+ <div class="col-sm-4">
+ <input type="text" id="datetimebegin" value="" class="ui-datetime-container form-control">
+ </div>
+ <label for="datetimeend" class="col-sm-2 control-label control-label-center">and</label>
+ <div class="col-sm-4">
+ <input type="text" id="datetimeend" value="" class="ui-datetime-container form-control">
+ </div>
+ </div>
+ </fieldset>
+ </div>
+ <div class="modal-footer">
+ <button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>
+ <button class="btn btn-success" id="filter-btn">Filter</button>
+ </div>
+ </div>
+ </div>
+ </div>
+
#parse ("azkaban/webapp/servlet/velocity/invalidsessionmodal.vm")
- </div><!-- /container-full -->
+ </div><!-- /container-full -->
#end
- </body>
+ </body>
<html>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/index.vm b/src/main/resources/azkaban/webapp/servlet/velocity/index.vm
index a4fc481..f1cf5b9 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/index.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/index.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -14,31 +14,31 @@
* the License.
*#
-<!DOCTYPE html>
+<!DOCTYPE html>
<html lang="en">
- <head>
+ <head>
#parse("azkaban/webapp/servlet/velocity/style.vm")
#parse("azkaban/webapp/servlet/velocity/javascript.vm")
- <script type="text/javascript" src="${context}/js/azkaban/view/table-sort.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/view/main.js"></script>
- <script type="text/javascript">
- var contextURL = "${context}";
- var currentTime = ${currentTime};
- var timezone = "${timezone}";
- var errorMessage = null;
- var successMessage = null;
- </script>
- </head>
- <body>
+ <script type="text/javascript" src="${context}/js/azkaban/view/table-sort.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/view/main.js"></script>
+ <script type="text/javascript">
+ var contextURL = "${context}";
+ var currentTime = ${currentTime};
+ var timezone = "${timezone}";
+ var errorMessage = null;
+ var successMessage = null;
+ </script>
+ </head>
+ <body>
#set ($current_page="all")
#parse ("azkaban/webapp/servlet/velocity/nav.vm")
## Page Header and project search form.
- <div class="az-page-header">
+ <div class="az-page-header">
<div class="container-full">
<div class="row">
<div class="header-title">
@@ -48,7 +48,7 @@
<form id="search-form" method="get" class="form-inline header-form" role="form">
<input type="hidden" name="doaction" value="search">
#if ($viewProjects == 'all')
- <input type="hidden" name="all" value="true">
+ <input type="hidden" name="all" value="true">
#end
<div class="form-group col-xs-9">
<div class="input-group">
@@ -70,16 +70,16 @@
</form>
</div>
</div>
- </div>
- </div>
+ </div>
+ </div>
+
+ <div class="container-full">
- <div class="container-full">
-
#parse ("azkaban/webapp/servlet/velocity/alerts.vm")
## Table of projects.
- <div class="row row-offcanvas row-offcanvas-left">
+ <div class="row row-offcanvas row-offcanvas-left">
<div class="col-xs-6 col-sm-3 sidebar-offcanvas">
<ul class="nav nav-pills nav-stacked">
<li#if ($viewProjects == 'personal') class="active"#end><a href="${context}/index">Personal</a></li>
@@ -87,10 +87,10 @@
<li#if ($viewProjects == 'all') class="active"#end><a href="${context}/index?all">All</a></li>
</ul>
</div>
- <div class="col-xs-12 col-sm-9 col-content">
+ <div class="col-xs-12 col-sm-9 col-content">
#if (!$projects.isEmpty())
<ul id="project-list">
- #foreach ($project in $projects)
+ #foreach ($project in $projects)
<li>
<div class="project-info">
<h4><a href="${context}/manager?project=${project.name}">$project.name</a></h4>
@@ -120,43 +120,43 @@
## Modal dialog to be displayed to create a new project.
- <div class="modal" id="create-project-modal">
- <div class="modal-dialog">
- <div class="modal-content">
- <div class="modal-header">
- <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
- <h4 class="modal-title">Create Project</h4>
- </div>
- <div class="modal-body">
- <div class="alert alert-danger" id="modal-error-msg">$error_msg</div>
- <fieldset class="form-horizontal">
- <div class="form-group">
- <label for="path" class="col-sm-2 control-label">Name</label>
- <div class="col-sm-10">
- <input id="path" name="project" type="text" class="form-control" placeholder="Project name">
- </div>
- </div>
- <div class="form-group">
- <label for="description" class="col-sm-2 control-label">Description</label>
- <div class="col-sm-10">
- <textarea id="description" name="description" rows="2" cols="40" class="form-control" placeholder="Project description"></textarea>
- </div>
- </div>
- </fieldset>
- </div>
- <div class="modal-footer">
- <input name="action" type="hidden" value="create">
- <input name="redirect" type="hidden" value="$!context/">
- <button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>
- <button type="button" class="btn btn-primary" id="create-btn">Create Project</button>
- </div>
- </div>
- </div>
- </div>
+ <div class="modal" id="create-project-modal">
+ <div class="modal-dialog">
+ <div class="modal-content">
+ <div class="modal-header">
+ <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
+ <h4 class="modal-title">Create Project</h4>
+ </div>
+ <div class="modal-body">
+ <div class="alert alert-danger" id="modal-error-msg">$error_msg</div>
+ <fieldset class="form-horizontal">
+ <div class="form-group">
+ <label for="path" class="col-sm-2 control-label">Name</label>
+ <div class="col-sm-10">
+ <input id="path" name="project" type="text" class="form-control" placeholder="Project name">
+ </div>
+ </div>
+ <div class="form-group">
+ <label for="description" class="col-sm-2 control-label">Description</label>
+ <div class="col-sm-10">
+ <textarea id="description" name="description" rows="2" cols="40" class="form-control" placeholder="Project description"></textarea>
+ </div>
+ </div>
+ </fieldset>
+ </div>
+ <div class="modal-footer">
+ <input name="action" type="hidden" value="create">
+ <input name="redirect" type="hidden" value="$!context/">
+ <button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>
+ <button type="button" class="btn btn-primary" id="create-btn">Create Project</button>
+ </div>
+ </div>
+ </div>
+ </div>
#parse ("azkaban/webapp/servlet/velocity/invalidsessionmodal.vm")
- </div><!-- /container -->
- </body>
+ </div><!-- /container -->
+ </body>
</html>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/invalidsessionmodal.vm b/src/main/resources/azkaban/webapp/servlet/velocity/invalidsessionmodal.vm
index 6f0651d..b405d80 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/invalidsessionmodal.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/invalidsessionmodal.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -14,21 +14,21 @@
* the License.
*#
- ## Modal dialog to be displayed when the user sesion is invalid.
-
- <div class="modal" id="invalid-session-modal">
- <div class="modal-dialog">
- <div class="modal-content">
- <div class="modal-header">
- <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
- <h4 class="modal-title">Invalid Session</h4>
- </div>
- <div class="modal-body">
- <p>Session has expired. Please re-login.</p>
- </div>
- <div class="modal-footer">
- <button type="button" class="btn btn-primary" id="login-btn">Re-login</button>
- </div>
- </div>
- </div>
- </div>
+ ## Modal dialog to be displayed when the user sesion is invalid.
+
+ <div class="modal" id="invalid-session-modal">
+ <div class="modal-dialog">
+ <div class="modal-content">
+ <div class="modal-header">
+ <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
+ <h4 class="modal-title">Invalid Session</h4>
+ </div>
+ <div class="modal-body">
+ <p>Session has expired. Please re-login.</p>
+ </div>
+ <div class="modal-footer">
+ <button type="button" class="btn btn-primary" id="login-btn">Re-login</button>
+ </div>
+ </div>
+ </div>
+ </div>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/javascript.vm b/src/main/resources/azkaban/webapp/servlet/velocity/javascript.vm
index 2ebb28b..c8576f8 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/javascript.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/javascript.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -14,8 +14,8 @@
* the License.
*#
- <script type="text/javascript" src="${context}/js/jquery/jquery-1.9.1.js"></script>
- <script type="text/javascript" src="${context}/js/bootstrap.min.js"></script>
- <script type="text/javascript" src="${context}/js/underscore-1.4.4-min.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/namespace.js"></script>
- <script type="text/javascript" src="${context}/js/backbone-0.9.10-min.js"></script>
+ <script type="text/javascript" src="${context}/js/jquery/jquery-1.9.1.js"></script>
+ <script type="text/javascript" src="${context}/js/bootstrap.min.js"></script>
+ <script type="text/javascript" src="${context}/js/underscore-1.4.4-min.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/namespace.js"></script>
+ <script type="text/javascript" src="${context}/js/backbone-0.9.10-min.js"></script>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/jmxpage.vm b/src/main/resources/azkaban/webapp/servlet/velocity/jmxpage.vm
index d815ca1..16d1d3b 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/jmxpage.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/jmxpage.vm
@@ -16,22 +16,22 @@
<!DOCTYPE html>
<html lang="en">
- <head>
+ <head>
#parse ("azkaban/webapp/servlet/velocity/style.vm")
#parse ("azkaban/webapp/servlet/velocity/javascript.vm")
- <script type="text/javascript" src="${context}/js/azkaban/util/date.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/view/jmx.js"></script>
- <script type="text/javascript">
- var contextURL = "${context}";
- var currentTime = ${currentTime};
- var timezone = "${timezone}";
- var errorMessage = null;
- var successMessage = null;
- </script>
- </head>
- <body>
+ <script type="text/javascript" src="${context}/js/azkaban/util/date.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/view/jmx.js"></script>
+ <script type="text/javascript">
+ var contextURL = "${context}";
+ var currentTime = ${currentTime};
+ var timezone = "${timezone}";
+ var errorMessage = null;
+ var successMessage = null;
+ </script>
+ </head>
+ <body>
#set ($current_page="all")
#set ($counter = 0)
@@ -41,164 +41,164 @@
#parse ("azkaban/webapp/servlet/velocity/errormsg.vm")
#else
- <div class="az-page-header">
- <div class="container-full">
- <h1>Admin JMX Http Page</h1>
- </div>
- </div>
+ <div class="az-page-header">
+ <div class="container-full">
+ <h1>Admin JMX Http Page</h1>
+ </div>
+ </div>
- <div class="container-full">
+ <div class="container-full">
## Web Client JMX
- <div class="row">
- <div class="col-xs-12">
- <div class="panel panel-default">
- <div class="panel-heading">Web Client JMX</div>
- <table id="all-jmx" class="table table-condensed table-bordered table-striped table-hover">
- <thead>
- <tr>
- <th>Name</th>
- <th>Domain</th>
- <th>Canonical Name</th>
- <th></th>
- </tr>
- </thead>
- <tbody>
+ <div class="row">
+ <div class="col-xs-12">
+ <div class="panel panel-default">
+ <div class="panel-heading">Web Client JMX</div>
+ <table id="all-jmx" class="table table-condensed table-bordered table-striped table-hover">
+ <thead>
+ <tr>
+ <th>Name</th>
+ <th>Domain</th>
+ <th>Canonical Name</th>
+ <th></th>
+ </tr>
+ </thead>
+ <tbody>
#foreach ($bean in $mbeans)
- <tr>
- <td>${bean.keyPropertyList.get("name")}</td>
- <td>${bean.domain}</td>
- <td>${bean.canonicalName}</td>
- <td><button type="button" class="btn btn-default btn-sm query-btn" id="expandBtn-$counter" domain="${bean.domain}" name="${bean.keyPropertyList.get("name")}">Query</button></td>
- </tr>
-
- <tr class="childrow" id="expandBtn-${counter}-child" style="display: none;">
- <td class="expandedFlow" colspan="3">
- <table class="table table-condensed table-bordered table-striped table-hover">
- <thead>
- <tr>
- <th>Attribute Name</th>
- <th>Value</th>
- </tr>
- </thead>
- <tbody id="expandBtn-${counter}-tbody">
- </tbody>
- </table>
- </td>
-
- <td>
- <button type="button" class="btn btn-default btn-sm collapse-btn">Collapse</button>
- </td>
- </tr>
+ <tr>
+ <td>${bean.keyPropertyList.get("name")}</td>
+ <td>${bean.domain}</td>
+ <td>${bean.canonicalName}</td>
+ <td><button type="button" class="btn btn-default btn-sm query-btn" id="expandBtn-$counter" domain="${bean.domain}" name="${bean.keyPropertyList.get("name")}">Query</button></td>
+ </tr>
+
+ <tr class="childrow" id="expandBtn-${counter}-child" style="display: none;">
+ <td class="expandedFlow" colspan="3">
+ <table class="table table-condensed table-bordered table-striped table-hover">
+ <thead>
+ <tr>
+ <th>Attribute Name</th>
+ <th>Value</th>
+ </tr>
+ </thead>
+ <tbody id="expandBtn-${counter}-tbody">
+ </tbody>
+ </table>
+ </td>
+
+ <td>
+ <button type="button" class="btn btn-default btn-sm collapse-btn">Collapse</button>
+ </td>
+ </tr>
#set ($counter = $counter + 1)
#end
- </tbody>
- </table>
+ </tbody>
+ </table>
- </div>
- </div>
- </div>
+ </div>
+ </div>
+ </div>
#foreach ($executor in $executorRemoteMBeans.entrySet())
- <div class="row">
- <div class="col-xs-12">
- <div class="panel panel-default">
- <div class="panel-heading">Remote Executor JMX $executor.key</div>
- <table class="remoteJMX table table-striped table-condensed table-bordered table-hover">
- <thead>
- <tr>
- <th>Name</th>
- <th>Domain</th>
- <th>Canonical Name</th>
- <th></th>
- </tr>
- </thead>
- <tbody>
+ <div class="row">
+ <div class="col-xs-12">
+ <div class="panel panel-default">
+ <div class="panel-heading">Remote Executor JMX $executor.key</div>
+ <table class="remoteJMX table table-striped table-condensed table-bordered table-hover">
+ <thead>
+ <tr>
+ <th>Name</th>
+ <th>Domain</th>
+ <th>Canonical Name</th>
+ <th></th>
+ </tr>
+ </thead>
+ <tbody>
#foreach ($bean in $executor.value)
- <tr>
- <td>${bean.get("keyPropertyList").get("name")}</td>
- <td>${bean.get("domain")}</td>
- <td>${bean.get("canonicalName")}</td>
- <td><button type="button" class="btn btn-default btn-sm query-btn" id="expandBtn-$counter" domain="${bean.get("domain")}" name="${bean.get("keyPropertyList").get("name")}" hostport="$executor.key">Query</button></td>
- </tr>
- <tr class="childrow" id="expandBtn-${counter}-child" style="display: none;">
- <td class="expandedFlow" colspan="3">
- <table class="table table-striped table-condensed table-bordered table-hover">
- <thead>
- <tr>
- <th>Attribute Name</th>
- <th>Value</th>
- </tr>
- </thead>
- <tbody id="expandBtn-${counter}-tbody">
- </tbody>
- </table>
- </td>
-
- <td>
- <button type="button" class="btn btn-default btn-sm collapse-btn">Collapse</button>
- </td>
- </tr>
+ <tr>
+ <td>${bean.get("keyPropertyList").get("name")}</td>
+ <td>${bean.get("domain")}</td>
+ <td>${bean.get("canonicalName")}</td>
+ <td><button type="button" class="btn btn-default btn-sm query-btn" id="expandBtn-$counter" domain="${bean.get("domain")}" name="${bean.get("keyPropertyList").get("name")}" hostport="$executor.key">Query</button></td>
+ </tr>
+ <tr class="childrow" id="expandBtn-${counter}-child" style="display: none;">
+ <td class="expandedFlow" colspan="3">
+ <table class="table table-striped table-condensed table-bordered table-hover">
+ <thead>
+ <tr>
+ <th>Attribute Name</th>
+ <th>Value</th>
+ </tr>
+ </thead>
+ <tbody id="expandBtn-${counter}-tbody">
+ </tbody>
+ </table>
+ </td>
+
+ <td>
+ <button type="button" class="btn btn-default btn-sm collapse-btn">Collapse</button>
+ </td>
+ </tr>
#set ($counter = $counter + 1)
#end
- </tbody>
- </table>
- </div>
- </div>
- </div>
+ </tbody>
+ </table>
+ </div>
+ </div>
+ </div>
#end
#foreach ($triggerserver in $triggerserverRemoteMBeans.entrySet())
- <div class="row">
- <div class="col-xs-12">
- <div class="panel panel-default">
- <div class="panel-heading">Remote Trigger Server JMX $triggerserver.key</div>
- <table class="remoteJMX table table-condensed table-striped table-bordered table-hover">
- <thead>
- <tr>
- <th>Name</th>
- <th>Domain</th>
- <th>Canonical Name</th>
- <th></th>
- </tr>
- </thead>
- <tbody>
+ <div class="row">
+ <div class="col-xs-12">
+ <div class="panel panel-default">
+ <div class="panel-heading">Remote Trigger Server JMX $triggerserver.key</div>
+ <table class="remoteJMX table table-condensed table-striped table-bordered table-hover">
+ <thead>
+ <tr>
+ <th>Name</th>
+ <th>Domain</th>
+ <th>Canonical Name</th>
+ <th></th>
+ </tr>
+ </thead>
+ <tbody>
#foreach ($bean in $triggerserver.value)
- <tr>
- <td>${bean.get("keyPropertyList").get("name")}</td>
- <td>${bean.get("domain")}</td>
- <td>${bean.get("canonicalName")}</td>
- <td><button type="button" class="btn btn-default btn-sm querybtn" id="expandBtn-$counter" domain="${bean.get("domain")}" name="${bean.get("keyPropertyList").get("name")}" hostport="$triggerserver.key">Query</button></td>
- </tr>
- <tr class="childrow" id="expandBtn-${counter}-child" style="display: none;">
- <td class="expandedFlow" colspan="3">
- <table class="table table-striped table-condensed table-bordered table-hover">
- <thead>
- <tr>
- <th>Attribute Name</th>
- <th>Value</th>
- </tr>
- </thead>
- <tbody id="expandBtn-${counter}-tbody">
- </tbody>
- </table>
- </td>
-
- <td>
- <button type="button" class="btn btn-default btn-sm collapse-btn">Collapse</button>
- </td>
- </tr>
+ <tr>
+ <td>${bean.get("keyPropertyList").get("name")}</td>
+ <td>${bean.get("domain")}</td>
+ <td>${bean.get("canonicalName")}</td>
+ <td><button type="button" class="btn btn-default btn-sm querybtn" id="expandBtn-$counter" domain="${bean.get("domain")}" name="${bean.get("keyPropertyList").get("name")}" hostport="$triggerserver.key">Query</button></td>
+ </tr>
+ <tr class="childrow" id="expandBtn-${counter}-child" style="display: none;">
+ <td class="expandedFlow" colspan="3">
+ <table class="table table-striped table-condensed table-bordered table-hover">
+ <thead>
+ <tr>
+ <th>Attribute Name</th>
+ <th>Value</th>
+ </tr>
+ </thead>
+ <tbody id="expandBtn-${counter}-tbody">
+ </tbody>
+ </table>
+ </td>
+
+ <td>
+ <button type="button" class="btn btn-default btn-sm collapse-btn">Collapse</button>
+ </td>
+ </tr>
#set ($counter = $counter + 1)
#end
- </tbody>
- </table>
+ </tbody>
+ </table>
- </div>
- </div>
- </div>
+ </div>
+ </div>
+ </div>
#end
</div>
#end
- </body>
+ </body>
</html>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/jobdetailsheader.vm b/src/main/resources/azkaban/webapp/servlet/velocity/jobdetailsheader.vm
index 8d11704..e637a82 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/jobdetailsheader.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/jobdetailsheader.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2014 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -14,21 +14,21 @@
* the License.
*#
- ## Page header.
+ ## Page header.
- <div class="az-page-header page-header-bare">
- <div class="container-full">
- <div class="row">
- <div class="header-title">
- <h1><a href="${context}/executor?execid=${execid}&job=${jobid}">Job Execution <small>$jobid</small></a></h1>
- </div>
- <div class="header-control">
- <div class="pull-right header-form">
- <a href="${context}/manager?project=${projectName}&flow=${parentflowid}&job=$jobname" class="btn btn-info btn-sm">Job Properties</a>
- </div>
- </div>
- </div>
- </div>
+ <div class="az-page-header page-header-bare">
+ <div class="container-full">
+ <div class="row">
+ <div class="header-title">
+ <h1><a href="${context}/executor?execid=${execid}&job=${jobid}">Job Execution <small>$jobid</small></a></h1>
+ </div>
+ <div class="header-control">
+ <div class="pull-right header-form">
+ <a href="${context}/manager?project=${projectName}&flow=${parentflowid}&job=$jobname" class="btn btn-info btn-sm">Job Properties</a>
+ </div>
+ </div>
+ </div>
+ </div>
</div>
<div class="page-breadcrumb">
<div class="container-full">
@@ -40,21 +40,21 @@
</ol>
</div>
</div>
-
+
<div class="container-full">
#parse ("azkaban/webapp/servlet/velocity/alerts.vm")
## Tabs
- <ul class="nav nav-tabs nav-sm" id="headertabs">
- #if ($current_page == "executing")
- <li class="active" id="jobLogViewLink"><a href="#logs">Job Logs</a></li>
- #else
- <li id="jobLogViewLink"><a href="${context}/executor?execid=${execid}&job=${jobid}#logs">Job Logs</a></li>
- #end
- #foreach ($jobViewerPlugin in $jobViewerPlugins)
- <li#if($current_page == $jobViewerPlugin.pluginName) class="active"#end><a href="$!context/${jobViewerPlugin.pluginPath}?execid=${execid}&jobid=${jobid}">$jobViewerPlugin.pluginName</a></li>
- #end
- </ul>
+ <ul class="nav nav-tabs nav-sm" id="headertabs">
+ #if ($current_page == "executing")
+ <li class="active" id="jobLogViewLink"><a href="#logs">Job Logs</a></li>
+ #else
+ <li id="jobLogViewLink"><a href="${context}/executor?execid=${execid}&job=${jobid}#logs">Job Logs</a></li>
+ #end
+ #foreach ($jobViewerPlugin in $jobViewerPlugins)
+ <li#if($current_page == $jobViewerPlugin.pluginName) class="active"#end><a href="$!context/${jobViewerPlugin.pluginPath}?execid=${execid}&jobid=${jobid}">$jobViewerPlugin.pluginName</a></li>
+ #end
+ </ul>
</div>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/jobdetailspage.vm b/src/main/resources/azkaban/webapp/servlet/velocity/jobdetailspage.vm
index 80d90e1..0d4f35f 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/jobdetailspage.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/jobdetailspage.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -14,31 +14,31 @@
* the License.
*#
-<!DOCTYPE html>
+<!DOCTYPE html>
<html lang="en">
- <head>
+ <head>
#parse("azkaban/webapp/servlet/velocity/style.vm")
#parse("azkaban/webapp/servlet/velocity/javascript.vm")
- <script type="text/javascript" src="${context}/js/azkaban/util/ajax.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/model/job-log.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/view/job-details.js"></script>
- <script type="text/javascript">
- var contextURL = "${context}";
- var currentTime = ${currentTime};
- var timezone = "${timezone}";
- var errorMessage = null;
- var successMessage = null;
-
- var projectName = "${projectName}";
- var flowName = "${flowid}";
- var execId = "${execid}";
- var jobId = "${jobid}";
- var attempt = ${attempt};
- </script>
- </head>
- <body>
+ <script type="text/javascript" src="${context}/js/azkaban/util/ajax.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/model/job-log.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/view/job-details.js"></script>
+ <script type="text/javascript">
+ var contextURL = "${context}";
+ var currentTime = ${currentTime};
+ var timezone = "${timezone}";
+ var errorMessage = null;
+ var successMessage = null;
+
+ var projectName = "${projectName}";
+ var flowName = "${flowid}";
+ var execId = "${execid}";
+ var jobId = "${jobid}";
+ var attempt = ${attempt};
+ </script>
+ </head>
+ <body>
#set ($current_page="executing")
#parse ("azkaban/webapp/servlet/velocity/nav.vm")
@@ -49,11 +49,11 @@
#parse ("azkaban/webapp/servlet/velocity/jobdetailsheader.vm")
- ## Log content.
+ ## Log content.
<div class="container-full container-fill" id="jobLogView">
- <div class="row">
- <div class="col-xs-12 col-content">
+ <div class="row">
+ <div class="col-xs-12 col-content">
<div class="log-viewer">
<div class="panel panel-default">
<div class="panel-heading">
@@ -70,25 +70,25 @@
</div>
</div>
</div>
-
+
## Error message message dialog.
<div class="container-full">
- <div class="modal" id="messageDialog">
- <div class="modal-dialog">
- <div class="modal-content">
- <div class="modal-header" id="messageTitle">
- <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
- <h4 class="modal-title">Error</h4>
- </div>
- <div class="modal-body" id="messageDiv">
- <p id="messageBox"></p>
- </div>
- </div>
- </div>
- </div>
+ <div class="modal" id="messageDialog">
+ <div class="modal-dialog">
+ <div class="modal-content">
+ <div class="modal-header" id="messageTitle">
+ <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
+ <h4 class="modal-title">Error</h4>
+ </div>
+ <div class="modal-body" id="messageDiv">
+ <p id="messageBox"></p>
+ </div>
+ </div>
+ </div>
+ </div>
- </div>
+ </div>
#end
- </body>
+ </body>
</html>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/jobhistorypage.vm b/src/main/resources/azkaban/webapp/servlet/velocity/jobhistorypage.vm
index 270f8e1..368ef8b 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/jobhistorypage.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/jobhistorypage.vm
@@ -16,31 +16,31 @@
<!DOCTYPE html>
<html lang="en">
- <head>
+ <head>
#parse("azkaban/webapp/servlet/velocity/style.vm")
#parse("azkaban/webapp/servlet/velocity/javascript.vm")
<script type="text/javascript" src="${context}/js/raphael.min.js"></script>
<script type="text/javascript" src="${context}/js/morris.min.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/util/date.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/view/time-graph.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/view/job-history.js"></script>
- <script type="text/javascript">
- var contextURL = "${context}";
- var currentTime = ${currentTime};
- var timezone = "${timezone}";
- var errorMessage = null;
- var successMessage = null;
-
- var projectId = "$projectId";
- var projectName = "$projectName";
- var jobName = "$jobid";
- var dataSeries = $dataSeries;
- </script>
- <link rel="stylesheet" type="text/css" href="${context}/css/morris.css" />
- </head>
- <body>
+ <script type="text/javascript" src="${context}/js/azkaban/util/date.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/view/time-graph.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/view/job-history.js"></script>
+ <script type="text/javascript">
+ var contextURL = "${context}";
+ var currentTime = ${currentTime};
+ var timezone = "${timezone}";
+ var errorMessage = null;
+ var successMessage = null;
+
+ var projectId = "$projectId";
+ var projectName = "$projectName";
+ var jobName = "$jobid";
+ var dataSeries = $dataSeries;
+ </script>
+ <link rel="stylesheet" type="text/css" href="${context}/css/morris.css" />
+ </head>
+ <body>
#set ($current_page="all")
#parse ("azkaban/webapp/servlet/velocity/nav.vm")
@@ -49,12 +49,12 @@
#parse ("azkaban/webapp/servlet/velocity/errormsg.vm")
#else
- ## Page header
+ ## Page header
- <div class="az-page-header page-header-bare">
- <div class="container-full">
+ <div class="az-page-header page-header-bare">
+ <div class="container-full">
<h1><a href="${context}/manager?project=${projectName}&job=${jobid}&history">Job History <small>$jobid</small></a></h1>
- </div>
+ </div>
</div>
<div class="page-breadcrumb">
<div class="container-full">
@@ -65,15 +65,15 @@
</div>
</div>
- <div class="container-full">
+ <div class="container-full">
#parse ("azkaban/webapp/servlet/velocity/alerts.vm")
## Time graph and job history table.
- <div class="row">
- <div class="col-xs-12">
- <div class="well well-clear well-sm" id="timeGraph">
+ <div class="row">
+ <div class="col-xs-12">
+ <div class="well well-clear well-sm" id="timeGraph">
</div>
<table id="all-jobs" class="table table-striped table-bordered table-condensed table-hover">
@@ -90,15 +90,15 @@
</tr>
</thead>
<tbody>
- #if ($history)
- #foreach ($job in $history)
+ #if ($history)
+ #foreach ($job in $history)
<tr>
<td class="first">
- #if ($job.attempt > 0)
+ #if ($job.attempt > 0)
<a href="${context}/executor?execid=${job.execId}">${job.execId}.${job.attempt}</a>
- #else
+ #else
<a href="${context}/executor?execid=${job.execId}">${job.execId}</a>
- #end
+ #end
</td>
<td>
<a href="${context}/manager?project=${projectName}&flow=${job.immediateFlowId}&job=${jobid}">${jobid}</a>
@@ -118,29 +118,29 @@
<a href="${context}/executor?execid=${job.execId}&job=${job.jobIdPath}&attempt=${job.attempt}">Logs</a>
</td>
</tr>
- #end
- #else
+ #end
+ #else
<tr>
<td colspan="8">No history</td>
</tr>
- #end
+ #end
</tbody>
</table>
- <ul class="pagination" id="pageSelection">
- <li id="previous" class="first"><a href="${context}/manager?project=${projectName}&job=${jobid}&history&page=${previous.nextPage}&size=${previous.size}"><span class="arrow">←</span>Previous</a></li>
- <li id="page1" #if($page1.selected) class="active" #elseif ($page1.disabled) class="disabled" #end><a href="${context}/manager?project=${projectName}&job=${jobid}&history&page=${page1.nextPage}&size=${page1.size}">${page1.page}</a></li>
- <li id="page2" #if($page2.selected) class="active" #elseif ($page2.disabled) class="disabled" #end><a href="${context}/manager?project=${projectName}&job=${jobid}&history&page=${page2.nextPage}&size=${page2.size}">${page2.page}</a></li>
- <li id="page3" #if($page3.selected) class="active" #elseif ($page3.disabled) class="disabled" #end><a href="${context}/manager?project=${projectName}&job=${jobid}&history&page=${page3.nextPage}&size=${page3.size}">${page3.page}</a></li>
- <li id="page4" #if($page4.selected) class="active" #elseif ($page4.disabled) class="disabled" #end><a href="${context}/manager?project=${projectName}&job=${jobid}&history&page=${page4.nextPage}&size=${page4.size}">${page4.page}</a></li>
- <li id="page5" #if($page5.selected) class="active" #elseif ($page5.disabled) class="disabled" #end><a href="${context}/manager?project=${projectName}&job=${jobid}&history&page=${page5.nextPage}&size=${page5.size}">${page5.page}</a></li>
- <li id="next"><a href="${context}/manager?project=${projectName}&job=${jobid}&history&page=${next.nextPage}&size=${next.size}">Next<span class="arrow">→</span></a></li>
- </ul>
+ <ul class="pagination" id="pageSelection">
+ <li id="previous" class="first"><a href="${context}/manager?project=${projectName}&job=${jobid}&history&page=${previous.nextPage}&size=${previous.size}"><span class="arrow">←</span>Previous</a></li>
+ <li id="page1" #if($page1.selected) class="active" #elseif ($page1.disabled) class="disabled" #end><a href="${context}/manager?project=${projectName}&job=${jobid}&history&page=${page1.nextPage}&size=${page1.size}">${page1.page}</a></li>
+ <li id="page2" #if($page2.selected) class="active" #elseif ($page2.disabled) class="disabled" #end><a href="${context}/manager?project=${projectName}&job=${jobid}&history&page=${page2.nextPage}&size=${page2.size}">${page2.page}</a></li>
+ <li id="page3" #if($page3.selected) class="active" #elseif ($page3.disabled) class="disabled" #end><a href="${context}/manager?project=${projectName}&job=${jobid}&history&page=${page3.nextPage}&size=${page3.size}">${page3.page}</a></li>
+ <li id="page4" #if($page4.selected) class="active" #elseif ($page4.disabled) class="disabled" #end><a href="${context}/manager?project=${projectName}&job=${jobid}&history&page=${page4.nextPage}&size=${page4.size}">${page4.page}</a></li>
+ <li id="page5" #if($page5.selected) class="active" #elseif ($page5.disabled) class="disabled" #end><a href="${context}/manager?project=${projectName}&job=${jobid}&history&page=${page5.nextPage}&size=${page5.size}">${page5.page}</a></li>
+ <li id="next"><a href="${context}/manager?project=${projectName}&job=${jobid}&history&page=${next.nextPage}&size=${next.size}">Next<span class="arrow">→</span></a></li>
+ </ul>
- </div><!-- /.col-xs-12 -->
- </div><!-- /.row -->
+ </div><!-- /.col-xs-12 -->
+ </div><!-- /.row -->
- </div>
+ </div>
#end
- </body>
+ </body>
</html>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/jobpage.vm b/src/main/resources/azkaban/webapp/servlet/velocity/jobpage.vm
index 279a61e..f975cf4 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/jobpage.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/jobpage.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -14,27 +14,27 @@
* the License.
*#
-<!DOCTYPE html>
+<!DOCTYPE html>
<html lang="en">
- <head>
+ <head>
#parse ("azkaban/webapp/servlet/velocity/style.vm")
#parse ("azkaban/webapp/servlet/velocity/javascript.vm")
- <script type="text/javascript" src="${context}/js/azkaban/view/job-edit.js"></script>
- <script type="text/javascript">
- var contextURL = "${context}";
- var currentTime = ${currentTime};
- var timezone = "${timezone}";
- var errorMessage = null;
- var successMessage = null;
-
- var projectId = "$project.id";
- var projectName = "$project.name";
- </script>
- </head>
- <body>
-
+ <script type="text/javascript" src="${context}/js/azkaban/view/job-edit.js"></script>
+ <script type="text/javascript">
+ var contextURL = "${context}";
+ var currentTime = ${currentTime};
+ var timezone = "${timezone}";
+ var errorMessage = null;
+ var successMessage = null;
+
+ var projectId = "$project.id";
+ var projectName = "$project.name";
+ </script>
+ </head>
+ <body>
+
#set ($current_page="all")
#parse ("azkaban/webapp/servlet/velocity/nav.vm")
@@ -42,10 +42,10 @@
#parse ("azkaban/webapp/servlet/velocity/errormsg.vm")
#else
- ## Page header
+ ## Page header
- <div class="az-page-header page-header-bare">
- <div class="container-full">
+ <div class="az-page-header page-header-bare">
+ <div class="container-full">
<div class="row">
<div class="header-title">
<h1><a href="${context}/manager?project=${project.name}&flow=${flowid}&job=${jobid}">Job <small>$jobid</small></a></h1>
@@ -57,7 +57,7 @@
<div class="clearfix"></div>
</div>
</div>
- </div>
+ </div>
</div>
<div class="page-breadcrumb">
<div class="container-full">
@@ -69,110 +69,110 @@
</div>
</div>
- <div class="container-full">
-
+ <div class="container-full">
+
#parse ("azkaban/webapp/servlet/velocity/alerts.vm")
-
- <div class="row row-offcanvas row-offcanvas-right">
- <div class="col-xs-12 col-sm-9">
-
- ## Job details table
-
- <div class="panel panel-default">
- <div class="panel-heading">
- <div class="pull-right">
- <button id="edit-job-btn" class="btn btn-xs btn-primary" onclick='jobEditView.show("${project.name}", "${flowid}", "${jobid}")'>Edit</button>
- </div>
- Job Properties
- </div>
-
- <table class="table table-striped table-bordered properties-table">
- <thead>
- <tr>
- <th class="tb-pname">Parameter Name</th>
- <th class="tb-pvalue">Value</th>
- </tr>
- </thead>
- <tbody>
- #foreach ($parameter in $parameters)
- <tr>
- <td class="property-key">$parameter.first</td>
+
+ <div class="row row-offcanvas row-offcanvas-right">
+ <div class="col-xs-12 col-sm-9">
+
+ ## Job details table
+
+ <div class="panel panel-default">
+ <div class="panel-heading">
+ <div class="pull-right">
+ <button id="edit-job-btn" class="btn btn-xs btn-primary" onclick='jobEditView.show("${project.name}", "${flowid}", "${jobid}")'>Edit</button>
+ </div>
+ Job Properties
+ </div>
+
+ <table class="table table-striped table-bordered properties-table">
+ <thead>
+ <tr>
+ <th class="tb-pname">Parameter Name</th>
+ <th class="tb-pvalue">Value</th>
+ </tr>
+ </thead>
+ <tbody>
+ #foreach ($parameter in $parameters)
+ <tr>
+ <td class="property-key">$parameter.first</td>
<td>$parameter.second</td>
- </tr>
- #end
- </tbody>
- </table>
- </div>
- </div><!-- /col-xs-8 -->
- <div class="col-xs-6 col-sm-3 sidebar-offcanvas">
- <div class="well" id="job-summary">
- <h3>Job <small>$jobid</small></h3>
- <p><strong>Job Type</strong> $jobtype</p>
- </div>
-
- ## Dependencies
-
- <div class="panel panel-default">
- <div class="panel-heading">Dependencies</div>
- <ul class="list-group">
- #if ($dependencies)
- #foreach($dependency in $dependencies)
- <li class="list-group-item">
- <a href="${context}/manager?project=${project.name}&flow=${flowid}&job=$dependency">$dependency</a>
- </li>
- #end
- #else
- <li class="list-group-item">No Dependencies</li>
- #end
- </ul>
- </div><!-- /panel -->
-
- ## Dependents
-
- <div class="panel panel-default">
- <div class="panel-heading">Dependents</div>
- <ul class="list-group">
- #if ($dependents)
- #foreach($dependent in $dependents)
- <li class="list-group-item">
- <a href="${context}/manager?project=${project.name}&flow=${flowid}&job=$dependent">$dependent</a>
- </li>
- #end
- #else
- <li class="list-group-item">No Dependencies</li>
- #end
-
- </ul>
- </div><!-- /panel -->
-
- <div class="panel panel-default">
- <div class="panel-heading">Properties</div>
- <ul class="list-group">
- #if ($properties)
- #foreach($property in $properties)
- <li class="list-group-item">
- <a href="${context}/manager?project=${project.name}&flow=${flowid}&job=${jobid}&prop=$property">$property</a>
- </li>
- #end
- #else
- <li class="list-group-item">No Property Files For This Job</li>
- #end
- </ul>
- </div><!-- /panel -->
- </div><!-- /col-xs-4 -->
- </div><!-- /row -->
+ </tr>
+ #end
+ </tbody>
+ </table>
+ </div>
+ </div><!-- /col-xs-8 -->
+ <div class="col-xs-6 col-sm-3 sidebar-offcanvas">
+ <div class="well" id="job-summary">
+ <h3>Job <small>$jobid</small></h3>
+ <p><strong>Job Type</strong> $jobtype</p>
+ </div>
+
+ ## Dependencies
+
+ <div class="panel panel-default">
+ <div class="panel-heading">Dependencies</div>
+ <ul class="list-group">
+ #if ($dependencies)
+ #foreach($dependency in $dependencies)
+ <li class="list-group-item">
+ <a href="${context}/manager?project=${project.name}&flow=${flowid}&job=$dependency">$dependency</a>
+ </li>
+ #end
+ #else
+ <li class="list-group-item">No Dependencies</li>
+ #end
+ </ul>
+ </div><!-- /panel -->
+
+ ## Dependents
+
+ <div class="panel panel-default">
+ <div class="panel-heading">Dependents</div>
+ <ul class="list-group">
+ #if ($dependents)
+ #foreach($dependent in $dependents)
+ <li class="list-group-item">
+ <a href="${context}/manager?project=${project.name}&flow=${flowid}&job=$dependent">$dependent</a>
+ </li>
+ #end
+ #else
+ <li class="list-group-item">No Dependencies</li>
+ #end
+
+ </ul>
+ </div><!-- /panel -->
+
+ <div class="panel panel-default">
+ <div class="panel-heading">Properties</div>
+ <ul class="list-group">
+ #if ($properties)
+ #foreach($property in $properties)
+ <li class="list-group-item">
+ <a href="${context}/manager?project=${project.name}&flow=${flowid}&job=${jobid}&prop=$property">$property</a>
+ </li>
+ #end
+ #else
+ <li class="list-group-item">No Property Files For This Job</li>
+ #end
+ </ul>
+ </div><!-- /panel -->
+ </div><!-- /col-xs-4 -->
+ </div><!-- /row -->
## Edit job modal.
- <div class="modal modal-wide" id="job-edit-pane">
- <div class="modal-dialog">
- <div class="modal-content">
- <div class="modal-header">
- <button type="button" class="close" data-dismiss="modal" aria-hidden="true" id="close-btn">×</button>
- <h4 class="modal-title">Edit Job</h4>
- </div>
- <div class="modal-body">
- <h4>Job Essentials</h4>
+ <div class="modal modal-wide" id="job-edit-pane">
+ <div class="modal-dialog">
+ <div class="modal-content">
+ <div class="modal-header">
+ <button type="button" class="close" data-dismiss="modal" aria-hidden="true" id="close-btn">×</button>
+ <h4 class="modal-title">Edit Job</h4>
+ </div>
+ <div class="modal-body">
+ <h4>Job Essentials</h4>
<table class="table table-bordered table-condensed">
<tbody>
<tr>
@@ -185,33 +185,33 @@
</tr>
</tbody>
<table>
- <h4>General Job Settings</h4>
- <p><strong>Be Aware:</strong> A job may be shared by multiple flows. The change will be global!</p>
- <table id="generalProps" class="table table-striped table-bordered">
- <thead>
- <tr>
- <th class="property-key">Name</th>
- <th>Value</th>
- </tr>
- </thead>
- <tbody>
- <tr id="addRow">
- <td id="addRow-col" colspan="2">
- <button type="button" class="btn btn-xs btn-success" id="add-btn">Add Row</button>
- </td>
- </tr>
- </tbody>
- </table>
- </div>
- <div class="modal-footer">
- <button type="button" class="btn btn-default" id="cancel-btn" data-dismiss="modal">Cancel</button>
- <button type="button" class="btn btn-primary" id="set-btn">Set/Change Job Description</button>
- </div>
- </div>
- </div>
- </div>
-
- </div><!-- /container-full -->
+ <h4>General Job Settings</h4>
+ <p><strong>Be Aware:</strong> A job may be shared by multiple flows. The change will be global!</p>
+ <table id="generalProps" class="table table-striped table-bordered">
+ <thead>
+ <tr>
+ <th class="property-key">Name</th>
+ <th>Value</th>
+ </tr>
+ </thead>
+ <tbody>
+ <tr id="addRow">
+ <td id="addRow-col" colspan="2">
+ <button type="button" class="btn btn-xs btn-success" id="add-btn">Add Row</button>
+ </td>
+ </tr>
+ </tbody>
+ </table>
+ </div>
+ <div class="modal-footer">
+ <button type="button" class="btn btn-default" id="cancel-btn" data-dismiss="modal">Cancel</button>
+ <button type="button" class="btn btn-primary" id="set-btn">Set/Change Job Description</button>
+ </div>
+ </div>
+ </div>
+ </div>
+
+ </div><!-- /container-full -->
#end
- </body>
+ </body>
</html>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/login.vm b/src/main/resources/azkaban/webapp/servlet/velocity/login.vm
index 0067a02..b0b47dc 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/login.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/login.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -22,9 +22,9 @@
#parse("azkaban/webapp/servlet/velocity/javascript.vm")
<script type="text/javascript" src="${context}/js/azkaban/view/login.js"></script>
- <script type="text/javascript">
- var contextURL = "${context}";
- </script>
+ <script type="text/javascript">
+ var contextURL = "${context}";
+ </script>
</head>
<body>
@@ -32,7 +32,7 @@
#set ($navbar_disabled = 1)
#parse ("azkaban/webapp/servlet/velocity/nav.vm")
- <div class="container-full">
+ <div class="container-full">
<div class="login">
<div class="alert alert-danger" id="error-msg"></div>
<div class="well">
@@ -47,7 +47,7 @@
</div><!-- /.well -->
</div><!-- /.login -->
- </div><!-- /container -->
+ </div><!-- /container -->
</body>
</html>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/macros.vm b/src/main/resources/azkaban/webapp/servlet/velocity/macros.vm
index 5c99c64..248dd75 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/macros.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/macros.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/messagedialog.vm b/src/main/resources/azkaban/webapp/servlet/velocity/messagedialog.vm
index ecf86e0..347ab5e 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/messagedialog.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/messagedialog.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -14,21 +14,21 @@
* the License.
*#
- <script type="text/javascript" src="${context}/js/azkaban/view/message-dialog.js"></script>
-
- <div class="modal" id="azkaban-message-dialog">
- <div class="modal-dialog">
- <div class="modal-content">
- <div class="modal-header">
- <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
- <h4 class="modal-title" id="azkaban-message-dialog-title"></h4>
- </div><!-- /modal-header -->
- <div class="modal-body">
- <p id="azkaban-message-dialog-text"></p>
- </div>
- <div class="modal-footer">
- <button type="button" class="btn btn-primary" data-dismiss="modal">Continue</button>
- </div>
- </div>
- </div>
- </div>
+ <script type="text/javascript" src="${context}/js/azkaban/view/message-dialog.js"></script>
+
+ <div class="modal" id="azkaban-message-dialog">
+ <div class="modal-dialog">
+ <div class="modal-content">
+ <div class="modal-header">
+ <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
+ <h4 class="modal-title" id="azkaban-message-dialog-title"></h4>
+ </div><!-- /modal-header -->
+ <div class="modal-body">
+ <p id="azkaban-message-dialog-text"></p>
+ </div>
+ <div class="modal-footer">
+ <button type="button" class="btn btn-primary" data-dismiss="modal">Continue</button>
+ </div>
+ </div>
+ </div>
+ </div>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/nav.vm b/src/main/resources/azkaban/webapp/servlet/velocity/nav.vm
index 91e7e66..390bef3 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/nav.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/nav.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -22,37 +22,37 @@
<div class="navbar navbar-inverse navbar-static-top">
<div class="container-full">
<div class="navbar-header">
- <div class="navbar-logo">
- <a href="${context}/">Azkaban</a>
- </div>
+ <div class="navbar-logo">
+ <a href="${context}/">Azkaban</a>
+ </div>
+ </div>
+ <div class="navbar-left navbar-enviro">
+ <div class="navbar-enviro-name">${azkaban_name}</div>
+ <div class="navbar-enviro-server">${azkaban_label}</div>
</div>
- <div class="navbar-left navbar-enviro">
- <div class="navbar-enviro-name">${azkaban_name}</div>
- <div class="navbar-enviro-server">${azkaban_label}</div>
- </div>
<div class="navbar-collapse collapse">
#if ($navbar_disabled != 1)
<ul class="nav navbar-nav">
- <li#if($current_page == 'all') class="active"#end onClick="navMenuClick('$!context/')"><a href="$!context/index">Projects</a></li>
- <li#if($current_page == 'schedule') class="active"#end onClick="navMenuClick('$!context/schedule')"><a href="$!context/schedule">Scheduling</a></li>
- <!--<li#if($current_page == 'triggers') class="active"#end onClick="navMenuClick('$!context/triggers')"><a href="$!context/triggers">Triggers</a></li>-->
- <li#if($current_page == 'executing') class="active"#end onClick="navMenuClick('$!context/executor')"><a href="$!context/executor">Executing</a></li>
- <li#if($current_page == 'history') class="active"#end onClick="navMenuClick('$!context/history')"><a href="$!context/history">History</a></li>
- #foreach ($viewer in $viewers)
- #if (!$viewer.hidden)
- <li#if($current_page == $viewer.pluginName) class="active"#end onClick="navMenuClick('$!context/$viewer.pluginPath')"><a href="$!context/$viewer.pluginPath">$viewer.pluginName</a></li>
- #end
- #end
+ <li#if($current_page == 'all') class="active"#end onClick="navMenuClick('$!context/')"><a href="$!context/index">Projects</a></li>
+ <li#if($current_page == 'schedule') class="active"#end onClick="navMenuClick('$!context/schedule')"><a href="$!context/schedule">Scheduling</a></li>
+ <!--<li#if($current_page == 'triggers') class="active"#end onClick="navMenuClick('$!context/triggers')"><a href="$!context/triggers">Triggers</a></li>-->
+ <li#if($current_page == 'executing') class="active"#end onClick="navMenuClick('$!context/executor')"><a href="$!context/executor">Executing</a></li>
+ <li#if($current_page == 'history') class="active"#end onClick="navMenuClick('$!context/history')"><a href="$!context/history">History</a></li>
+ #foreach ($viewer in $viewers)
+ #if (!$viewer.hidden)
+ <li#if($current_page == $viewer.pluginName) class="active"#end onClick="navMenuClick('$!context/$viewer.pluginPath')"><a href="$!context/$viewer.pluginPath">$viewer.pluginName</a></li>
+ #end
+ #end
- #foreach ($trigger in $triggerPlugins)
- #if (!$trigger.hidden)
- <li#if($current_page == $trigger.pluginName) class="active"#end onClick="navMenuClick('$!context/$trigger.pluginPath')"><a href="$!context/$trigger.pluginPath">$trigger.pluginName</a></li>
- #end
- #end
+ #foreach ($trigger in $triggerPlugins)
+ #if (!$trigger.hidden)
+ <li#if($current_page == $trigger.pluginName) class="active"#end onClick="navMenuClick('$!context/$trigger.pluginPath')"><a href="$!context/$trigger.pluginPath">$trigger.pluginName</a></li>
+ #end
+ #end
</ul>
<ul class="nav navbar-nav navbar-right">
<li class="dropdown">
- <a href="#" class="dropdown-toggle" data-toggle="dropdown">${user_id} <b class="caret"></b></a>
+ <a href="#" class="dropdown-toggle" data-toggle="dropdown">${user_id} <b class="caret"></b></a>
<ul class="dropdown-menu">
<li><a href="$!context?logout">Logout</a></li>
</ul>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/permissionspage.vm b/src/main/resources/azkaban/webapp/servlet/velocity/permissionspage.vm
index 5b424de..c63688e 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/permissionspage.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/permissionspage.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -14,26 +14,26 @@
* the License.
*#
-<!DOCTYPE html>
+<!DOCTYPE html>
<html lang="en">
- <head>
+ <head>
#parse ("azkaban/webapp/servlet/velocity/style.vm")
#parse ("azkaban/webapp/servlet/velocity/javascript.vm")
- <script type="text/javascript" src="${context}/js/azkaban/view/project-permissions.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/view/project-modals.js"></script>
- <script type="text/javascript">
- var contextURL = "${context}";
- var currentTime = ${currentTime};
- var timezone = "${timezone}";
- var errorMessage = null;
- var successMessage = null;
- var userId = "$username";
- var projectName = "$project.name";
- </script>
- </head>
- <body>
+ <script type="text/javascript" src="${context}/js/azkaban/view/project-permissions.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/view/project-modals.js"></script>
+ <script type="text/javascript">
+ var contextURL = "${context}";
+ var currentTime = ${currentTime};
+ var timezone = "${timezone}";
+ var errorMessage = null;
+ var successMessage = null;
+ var userId = "$username";
+ var projectName = "$project.name";
+ </script>
+ </head>
+ <body>
#set ($current_page = "all")
#parse ("azkaban/webapp/servlet/velocity/nav.vm")
@@ -42,8 +42,8 @@
#parse ("azkaban/webapp/servlet/velocity/errormsg.vm")
#else
- ## Page header.
-
+ ## Page header.
+
#parse ("azkaban/webapp/servlet/velocity/projectpageheader.vm")
## Page content.
@@ -52,13 +52,13 @@
#parse ("azkaban/webapp/servlet/velocity/alerts.vm")
- <div class="row row-offcanvas row-offcanvas-right">
- <div class="col-xs-12 col-sm-9">
-
- #set ($project_page = "permissions")
- #parse ("azkaban/webapp/servlet/velocity/projectnav.vm")
+ <div class="row row-offcanvas row-offcanvas-right">
+ <div class="col-xs-12 col-sm-9">
+
+ #set ($project_page = "permissions")
+ #parse ("azkaban/webapp/servlet/velocity/projectnav.vm")
- ## User permissions table.
+ ## User permissions table.
<div class="panel panel-success">
<div class="panel-heading">
@@ -77,56 +77,56 @@
<th class="tb-read">Read</th>
<th class="tb-write">Write</th>
<th class="tb-execute">Execute</th>
- <th class="tb-schedule">Schedule</th>
- #if ($isAdmin)
+ <th class="tb-schedule">Schedule</th>
+ #if ($isAdmin)
<th class="tb-action"></th>
#end
</tr>
</thead>
<tbody>
- #if ($permissions)
- #foreach ($perm in $permissions)
+ #if ($permissions)
+ #foreach ($perm in $permissions)
<tr>
<td class="tb-username">
- #if ($perm.first == $username)
- ${perm.first} <span class="sublabel">(you)</span>
- #else
- $perm.first
- #end
+ #if ($perm.first == $username)
+ ${perm.first} <span class="sublabel">(you)</span>
+ #else
+ $perm.first
+ #end
</td>
- #if ($perm.second.isPermissionNameSet("ADMIN"))
+ #if ($perm.second.isPermissionNameSet("ADMIN"))
<td><input id="${perm.first}-admin-checkbox" type="checkbox" name="admin" disabled="disabled" checked="true"></input></td>
<td><input id="${perm.first}-read-checkbox" type="checkbox" name="read" disabled="disabled" checked="true"></input></td>
<td><input id="${perm.first}-write-checkbox" type="checkbox" name="write" disabled="disabled" checked="true"></input></td>
<td><input id="${perm.first}-execute-checkbox" type="checkbox" name="execute" disabled="disabled" checked="true"></input></td>
<td><input id="${perm.first}-schedule-checkbox" type="checkbox" name="schedule" disabled="disabled" checked="true"></input></td>
- #else
+ #else
<td><input id="${perm.first}-admin-checkbox" type="checkbox" name="admin" disabled="disabled"></input></td>
<td><input id="${perm.first}-read-checkbox" type="checkbox" name="read" disabled="disabled" #if ($perm.second.isPermissionNameSet("READ")) checked="true" #end></input></td>
<td><input id="${perm.first}-write-checkbox" type="checkbox" name="write" disabled="disabled" #if ($perm.second.isPermissionNameSet("WRITE")) checked="true" #end></input></td>
<td><input id="${perm.first}-execute-checkbox" type="checkbox" name="execute" disabled="disabled" #if ($perm.second.isPermissionNameSet("EXECUTE")) checked="true" #end></input></td>
<td><input id="${perm.first}-schedule-checkbox" type="checkbox" name="schedule" disabled="disabled" #if ($perm.second.isPermissionNameSet("SCHEDULE")) checked="true" #end></input></td>
- #end
+ #end
- #if ($isAdmin)
+ #if ($isAdmin)
<td><button id="$perm.first" #if($perm.first == $username) disabled="disabled" class="btn btn-xs btn-disabled" #else class="btn btn-xs btn-default" #end >Change</button></td>
- #end
+ #end
</tr>
- #end
- #else
+ #end
+ #else
#if ($isAdmin)
<tr><td colspan="7">No Users Found.</td></tr>
#else
<tr><td colspan="6">No Users Found.</td></tr>
#end
- #end
+ #end
</tbody>
</table>
</div>
- ## Group permissions table.
-
+ ## Group permissions table.
+
<div class="panel panel-warning">
<div class="panel-heading">
Group
@@ -145,167 +145,167 @@
<th class="tb-write">Write</th>
<th class="tb-execute">Execute</th>
<th class="tb-schedule">Schedule</th>
- #if ($isAdmin)
+ #if ($isAdmin)
<th class="tb-action"></th>
- #end
+ #end
</tr>
</thead>
<tbody>
- #if ($groupPermissions)
- #foreach ($perm in $groupPermissions)
+ #if ($groupPermissions)
+ #foreach ($perm in $groupPermissions)
<tr>
<td class="tb-username">
- #if ($perm.first == $username)
- ${perm.first} <span class="sublabel">(you)</span>
- #else
- $perm.first
- #end
+ #if ($perm.first == $username)
+ ${perm.first} <span class="sublabel">(you)</span>
+ #else
+ $perm.first
+ #end
</td>
- #if ($perm.second.isPermissionNameSet("ADMIN"))
+ #if ($perm.second.isPermissionNameSet("ADMIN"))
<td><input id="group-${perm.first}-admin-checkbox" type="checkbox" name="admin" disabled="disabled" checked="true"></input></td>
<td><input id="group-${perm.first}-read-checkbox" type="checkbox" name="read" disabled="disabled" checked="true"></input></td>
<td><input id="group-${perm.first}-write-checkbox" type="checkbox" name="write" disabled="disabled" checked="true"></input></td>
<td><input id="group-${perm.first}-execute-checkbox" type="checkbox" name="execute" disabled="disabled" checked="true"></input></td>
<td><input id="group-${perm.first}-schedule-checkbox" type="checkbox" name="schedule" disabled="disabled" checked="true"></input></td>
- #else
+ #else
<td><input id="group-${perm.first}-admin-checkbox" type="checkbox" name="admin" disabled="disabled"></input></td>
<td><input id="group-${perm.first}-read-checkbox" type="checkbox" name="read" disabled="disabled" #if ($perm.second.isPermissionNameSet("READ")) checked="true" #end></input></td>
<td><input id="group-${perm.first}-write-checkbox" type="checkbox" name="write" disabled="disabled" #if ($perm.second.isPermissionNameSet("WRITE")) checked="true" #end></input></td>
<td><input id="group-${perm.first}-execute-checkbox" type="checkbox" name="execute" disabled="disabled" #if ($perm.second.isPermissionNameSet("EXECUTE")) checked="true" #end></input></td>
<td><input id="group-${perm.first}-schedule-checkbox" type="checkbox" name="schedule" disabled="disabled" #if ($perm.second.isPermissionNameSet("SCHEDULE")) checked="true" #end></input></td>
- #end
+ #end
- #if ($isAdmin)
+ #if ($isAdmin)
<td><button id="group-$perm.first" class="btn btn-xs btn-default">Change</button></td>
- #end
+ #end
</tr>
- #end
- #else
+ #end
+ #else
#if ($isAdmin)
<tr><td colspan="7">No Groups Found.</td></tr>
#else
<tr><td colspan="6">No Groups Found.</td></tr>
#end
- #end
+ #end
</tbody>
</table>
</div>
-
- ## Proxy users table.
-
+
+ ## Proxy users table.
+
<div class="panel panel-info">
<div class="panel-heading">
Proxy Users
#if ($isAdmin)
- <div class="pull-right">
+ <div class="pull-right">
<button id="addProxyUser" class="btn btn-xs btn-info">Add</button>
</div>
- #end
+ #end
</div>
<table class="table table-striped permission-table" id="proxy-user-table">
<thead>
<tr>
<th class="tb-username">Proxy User</th>
- #if ($isAdmin)
+ #if ($isAdmin)
<th class="tb-action"></th>
- #end
+ #end
</tr>
</thead>
<tbody>
- #if ($proxyUsers)
- #foreach ($proxyUser in $proxyUsers)
+ #if ($proxyUsers)
+ #foreach ($proxyUser in $proxyUsers)
<tr>
<td class="tb-username">#if($proxyUser == $username) ${proxyUser} <span class="sublabel">(you)</span> #else $proxyUser #end</td>
- #if ($isAdmin)
+ #if ($isAdmin)
<td><button id="proxy-${proxyUser}" name="${proxyUser}" class="btn btn-xs btn-danger">Remove</button></td>
- #end
+ #end
</tr>
- #end
- #else
+ #end
+ #else
#if ($isAdmin)
<tr><td colspan="2">No Proxy User Found.</td></tr>
#else
<tr><td>No Proxy User Found.</td></tr>
#end
- #end
+ #end
</tbody>
</table>
</div>
-
- </div><!-- /col-xs-8 -->
- <div class="col-xs-6 col-sm-3 sidebar-offcanvas">
- #parse ("azkaban/webapp/servlet/velocity/projectsidebar.vm")
- </div><!-- /col-xs-4 -->
- </div><!-- /row -->
+
+ </div><!-- /col-xs-8 -->
+ <div class="col-xs-6 col-sm-3 sidebar-offcanvas">
+ #parse ("azkaban/webapp/servlet/velocity/projectsidebar.vm")
+ </div><!-- /col-xs-4 -->
+ </div><!-- /row -->
## Remove proxy user modal dialog.
- <div class="modal" id="remove-proxy">
- <div class="modal-dialog">
- <div class="modal-content">
- <div class="modal-header">
- <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
- <h4 class="modal-title">Remove Proxy User</h4>
- </div>
- <div class="modal-body">
- <div class="alert alert-danger" id="remove-proxy-error-msg"></div>
+ <div class="modal" id="remove-proxy">
+ <div class="modal-dialog">
+ <div class="modal-content">
+ <div class="modal-header">
+ <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
+ <h4 class="modal-title">Remove Proxy User</h4>
+ </div>
+ <div class="modal-body">
+ <div class="alert alert-danger" id="remove-proxy-error-msg"></div>
<p><strong>Warning:</strong> <span id="remove-proxy-msg"></span></p>
- </div>
- <div class="modal-footer">
- <button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>
- <button type="button" class="btn btn-danger" id="remove-proxy-btn">Remove Proxy User</a>
- </div>
- </div>
- </div>
- </div>
+ </div>
+ <div class="modal-footer">
+ <button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>
+ <button type="button" class="btn btn-danger" id="remove-proxy-btn">Remove Proxy User</a>
+ </div>
+ </div>
+ </div>
+ </div>
## Add proxy user modal dialog.
- <div class="modal" id="add-proxy">
- <div class="modal-dialog">
- <div class="modal-content">
- <div class="modal-header">
- <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
- <h4 class="modal-title">Add Proxy User</h4>
- </div>
- <div class="modal-body">
- <div class="alert alert-danger" id="add-proxy-error-msg"></div>
- <fieldset class="form-horizontal">
- <div class="form-group">
- <label for="path" class="col-sm-2 control-label">Proxy</label>
- <div class="col-sm-10">
- <input type="text" name="proxyid" id="proxy-user-box" class="form-control">
- </div>
- </div>
- </fieldset>
- </div>
- <div class="modal-footer">
- <button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>
- <button type="button" class="btn btn-primary" id="add-proxy-btn">Add Proxy User</button>
- </div>
- </div>
- </div>
- </div>
-
+ <div class="modal" id="add-proxy">
+ <div class="modal-dialog">
+ <div class="modal-content">
+ <div class="modal-header">
+ <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
+ <h4 class="modal-title">Add Proxy User</h4>
+ </div>
+ <div class="modal-body">
+ <div class="alert alert-danger" id="add-proxy-error-msg"></div>
+ <fieldset class="form-horizontal">
+ <div class="form-group">
+ <label for="path" class="col-sm-2 control-label">Proxy</label>
+ <div class="col-sm-10">
+ <input type="text" name="proxyid" id="proxy-user-box" class="form-control">
+ </div>
+ </div>
+ </fieldset>
+ </div>
+ <div class="modal-footer">
+ <button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>
+ <button type="button" class="btn btn-primary" id="add-proxy-btn">Add Proxy User</button>
+ </div>
+ </div>
+ </div>
+ </div>
+
## Change permissions modal dialog.
- <div class="modal" id="change-permission">
- <div class="modal-dialog">
- <div class="modal-content">
- <div class="modal-header">
- <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
- <h4 class="modal-title" id="change-title">Change Permissions</h4>
- </div>
- <div class="modal-body">
+ <div class="modal" id="change-permission">
+ <div class="modal-dialog">
+ <div class="modal-content">
+ <div class="modal-header">
+ <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
+ <h4 class="modal-title" id="change-title">Change Permissions</h4>
+ </div>
+ <div class="modal-body">
<div class="alert alert-danger" id="change-permission-error-msg"></div>
- <fieldset class="form-horizontal">
- <div class="form-group">
- <label for="path" class="col-sm-2 control-label">User</label>
- <div class="col-sm-10">
- <input type="text" name="userid" id="user-box" class="form-control">
- </div>
- </div>
- <div class="form-group">
+ <fieldset class="form-horizontal">
+ <div class="form-group">
+ <label for="path" class="col-sm-2 control-label">User</label>
+ <div class="col-sm-10">
+ <input type="text" name="userid" id="user-box" class="form-control">
+ </div>
+ </div>
+ <div class="form-group">
<div class="col-sm-offset-2 col-sm-10">
<label class="checkbox-inline">
<input id="admin-change" name="admin" type="checkbox">
@@ -328,20 +328,20 @@
Schedule
</label>
</div>
- </div>
- </fieldset>
- </div>
- <div class="modal-footer">
- <button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>
- <button type="button" class="btn btn-primary" id="change-btn">Commit</button>
- </div>
- </div>
- </div>
- </div>
+ </div>
+ </fieldset>
+ </div>
+ <div class="modal-footer">
+ <button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>
+ <button type="button" class="btn btn-primary" id="change-btn">Commit</button>
+ </div>
+ </div>
+ </div>
+ </div>
- #parse ("azkaban/webapp/servlet/velocity/projectmodals.vm")
- #parse ("azkaban/webapp/servlet/velocity/invalidsessionmodal.vm")
- </div><!-- /container-full -->
+ #parse ("azkaban/webapp/servlet/velocity/projectmodals.vm")
+ #parse ("azkaban/webapp/servlet/velocity/invalidsessionmodal.vm")
+ </div><!-- /container-full -->
#end
- </body>
+ </body>
</html>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/projectlogpage.vm b/src/main/resources/azkaban/webapp/servlet/velocity/projectlogpage.vm
index 08dbb88..5e6a62b 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/projectlogpage.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/projectlogpage.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -14,29 +14,29 @@
* the License.
*#
-<!DOCTYPE html>
+<!DOCTYPE html>
<html lang="en">
- <head>
+ <head>
#parse ("azkaban/webapp/servlet/velocity/style.vm")
#parse ("azkaban/webapp/servlet/velocity/javascript.vm")
- <script type="text/javascript" src="${context}/js/azkaban/util/date.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/util/ajax.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/view/project-logs.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/view/project-modals.js"></script>
- <script type="text/javascript">
- var contextURL = "${context}";
- var currentTime = ${currentTime};
- var timezone = "${timezone}";
- var errorMessage = null;
- var successMessage = null;
-
- var projectId = ${project.id};
- var projectName = "$project.name";
- </script>
- </head>
- <body>
+ <script type="text/javascript" src="${context}/js/azkaban/util/date.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/util/ajax.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/view/project-logs.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/view/project-modals.js"></script>
+ <script type="text/javascript">
+ var contextURL = "${context}";
+ var currentTime = ${currentTime};
+ var timezone = "${timezone}";
+ var errorMessage = null;
+ var successMessage = null;
+
+ var projectId = ${project.id};
+ var projectName = "$project.name";
+ </script>
+ </head>
+ <body>
#set ($current_page="all")
#parse ("azkaban/webapp/servlet/velocity/nav.vm")
@@ -45,29 +45,29 @@
#parse ("azkaban/webapp/servlet/velocity/errormsg.vm")
#else
- ## Page header.
-
+ ## Page header.
+
#parse ("azkaban/webapp/servlet/velocity/projectpageheader.vm")
- ## Page content.
+ ## Page content.
<div class="container-full">
#parse ("azkaban/webapp/servlet/velocity/alerts.vm")
- <div class="row row-offcanvas row-offcanvas-right">
- <div class="col-xs-12 col-sm-9">
-
- #set ($project_page = "logs")
- #parse ("azkaban/webapp/servlet/velocity/projectnav.vm")
-
- <div class="panel panel-default" id="flow-tabs">
- <div class="panel-heading">
- <div class="pull-right" id="project-options">
- <button type="button" id="updateLogBtn" class="btn btn-xs btn-info">Refresh</button>
- </div>
- Audit Logs
- </div>
+ <div class="row row-offcanvas row-offcanvas-right">
+ <div class="col-xs-12 col-sm-9">
+
+ #set ($project_page = "logs")
+ #parse ("azkaban/webapp/servlet/velocity/projectnav.vm")
+
+ <div class="panel panel-default" id="flow-tabs">
+ <div class="panel-heading">
+ <div class="pull-right" id="project-options">
+ <button type="button" id="updateLogBtn" class="btn btn-xs btn-info">Refresh</button>
+ </div>
+ Audit Logs
+ </div>
<table class="table table-striped" id="logTable">
<thead>
<tr>
@@ -79,17 +79,17 @@
</thead>
<tbody>
</tbody>
- </table>
- </div>
- </div>
- <div class="col-xs-6 col-sm-3 sidebar-offcanvas">
- #parse ("azkaban/webapp/servlet/velocity/projectsidebar.vm")
- </div>
- </div>
+ </table>
+ </div>
+ </div>
+ <div class="col-xs-6 col-sm-3 sidebar-offcanvas">
+ #parse ("azkaban/webapp/servlet/velocity/projectsidebar.vm")
+ </div>
+ </div>
- #parse ("azkaban/webapp/servlet/velocity/projectmodals.vm")
+ #parse ("azkaban/webapp/servlet/velocity/projectmodals.vm")
- </div><!-- /container-full -->
+ </div><!-- /container-full -->
#end
- </body>
+ </body>
</html>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/projectmodals.vm b/src/main/resources/azkaban/webapp/servlet/velocity/projectmodals.vm
index 2dc064a..ccbbbdd 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/projectmodals.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/projectmodals.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -16,56 +16,56 @@
## Upload project modal
- <div class="modal" id="upload-project-modal">
- <div class="modal-dialog">
- <div class="modal-content">
- <form id="upload-project-form" enctype="multipart/form-data" method="post" action="$!context/manager">
- <div class="modal-header">
- <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
- <h4 class="modal-title">Upload Project Files</h4>
- </div>
- <div class="modal-body">
- <div class="alert alert-danger" id="upload-project-modal-error-msg">$error_msg</div>
- <fieldset class="form-horizontal">
- <div class="form-group">
- <label for="file" class="col-sm-3 control-label">Job Archive</label>
- <div class="col-sm-9">
- <input type="file" class="form-control" id="file" name="file">
- </div>
- </div>
- </fieldset>
- </div>
- <div class="modal-footer">
- <input type="hidden" name="project" value="$project.name">
- <input type="hidden" name="action" value="upload">
- <button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>
- <button type="button" class="btn btn-primary" id="upload-project-btn">Upload</button>
- </div>
- </form>
- </div>
- </div>
- </div>
+ <div class="modal" id="upload-project-modal">
+ <div class="modal-dialog">
+ <div class="modal-content">
+ <form id="upload-project-form" enctype="multipart/form-data" method="post" action="$!context/manager">
+ <div class="modal-header">
+ <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
+ <h4 class="modal-title">Upload Project Files</h4>
+ </div>
+ <div class="modal-body">
+ <div class="alert alert-danger" id="upload-project-modal-error-msg">$error_msg</div>
+ <fieldset class="form-horizontal">
+ <div class="form-group">
+ <label for="file" class="col-sm-3 control-label">Job Archive</label>
+ <div class="col-sm-9">
+ <input type="file" class="form-control" id="file" name="file">
+ </div>
+ </div>
+ </fieldset>
+ </div>
+ <div class="modal-footer">
+ <input type="hidden" name="project" value="$project.name">
+ <input type="hidden" name="action" value="upload">
+ <button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>
+ <button type="button" class="btn btn-primary" id="upload-project-btn">Upload</button>
+ </div>
+ </form>
+ </div>
+ </div>
+ </div>
- ## Delete project modal.
-
- <div class="modal" id="delete-project-modal">
- <div class="modal-dialog">
- <div class="modal-content">
- <div class="modal-header">
- <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
- <h4 class="modal-title">Delete Project</h4>
- </div>
- <div class="modal-body">
- <p><strong>Warning:</strong> This project will be deleted and may not be recoverable.</p>
- </div>
- <div class="modal-footer">
- <form id="delete-form">
- <input type="hidden" name="project" value="$project.name">
- <input type="hidden" name="delete" value="true">
- <button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>
- <button type="button" class="btn btn-danger" id="delete-btn">Delete Project</button>
- </form>
- </div>
- </div>
- </div>
- </div>
+ ## Delete project modal.
+
+ <div class="modal" id="delete-project-modal">
+ <div class="modal-dialog">
+ <div class="modal-content">
+ <div class="modal-header">
+ <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
+ <h4 class="modal-title">Delete Project</h4>
+ </div>
+ <div class="modal-body">
+ <p><strong>Warning:</strong> This project will be deleted and may not be recoverable.</p>
+ </div>
+ <div class="modal-footer">
+ <form id="delete-form">
+ <input type="hidden" name="project" value="$project.name">
+ <input type="hidden" name="delete" value="true">
+ <button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>
+ <button type="button" class="btn btn-danger" id="delete-btn">Delete Project</button>
+ </form>
+ </div>
+ </div>
+ </div>
+ </div>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/projectnav.vm b/src/main/resources/azkaban/webapp/servlet/velocity/projectnav.vm
index 591a2d1..24b3e24 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/projectnav.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/projectnav.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -14,10 +14,10 @@
* the License.
*#
- <ul class="nav nav-tabs nav-sm">
- <li#if($project_page == 'flows') class="active"#end><a href="${context}/manager?project=${project.name}">Flows</a></li>
- <li#if($project_page == 'permissions') class="active"#end><a id="project-permission-btn" href="${context}/manager?project=${project.name}&permissions">Permissions</a></li>
- #if ($admin)
- <li#if($project_page == 'logs') class="active"#end><a id="project-logs-btn" href="${context}/manager?project=${project.name}&logs">Project Logs</a></li>
- #end
- </ul>
+ <ul class="nav nav-tabs nav-sm">
+ <li#if($project_page == 'flows') class="active"#end><a href="${context}/manager?project=${project.name}">Flows</a></li>
+ <li#if($project_page == 'permissions') class="active"#end><a id="project-permission-btn" href="${context}/manager?project=${project.name}&permissions">Permissions</a></li>
+ #if ($admin)
+ <li#if($project_page == 'logs') class="active"#end><a id="project-logs-btn" href="${context}/manager?project=${project.name}&logs">Project Logs</a></li>
+ #end
+ </ul>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/projectpage.vm b/src/main/resources/azkaban/webapp/servlet/velocity/projectpage.vm
index 50e2606..360c9db 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/projectpage.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/projectpage.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -14,32 +14,32 @@
* the License.
*#
-<!DOCTYPE html>
+<!DOCTYPE html>
<html lang="en">
- <head>
+ <head>
#parse ("azkaban/webapp/servlet/velocity/style.vm")
#parse ("azkaban/webapp/servlet/velocity/javascript.vm")
#parse ("azkaban/webapp/servlet/velocity/svgflowincludes.vm")
- <script type="text/javascript" src="${context}/js/moment.min.js"></script>
- <script type="text/javascript" src="${context}/js/bootstrap-datetimepicker.min.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/view/flow-execute-dialog.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/view/project.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/view/project-modals.js"></script>
- <script type="text/javascript">
- var contextURL = "${context}";
- var currentTime = ${currentTime};
- var timezone = "${timezone}";
- var errorMessage = null;
- var successMessage = null;
-
- var projectId = ${project.id};
- var execAccess = ${exec};
- var projectName = "$project.name";
- </script>
- <link rel="stylesheet" type="text/css" href="${context}/css/bootstrap-datetimepicker.css" />
- </head>
- <body>
+ <script type="text/javascript" src="${context}/js/moment.min.js"></script>
+ <script type="text/javascript" src="${context}/js/bootstrap-datetimepicker.min.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/view/flow-execute-dialog.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/view/project.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/view/project-modals.js"></script>
+ <script type="text/javascript">
+ var contextURL = "${context}";
+ var currentTime = ${currentTime};
+ var timezone = "${timezone}";
+ var errorMessage = null;
+ var successMessage = null;
+
+ var projectId = ${project.id};
+ var execAccess = ${exec};
+ var projectName = "$project.name";
+ </script>
+ <link rel="stylesheet" type="text/css" href="${context}/css/bootstrap-datetimepicker.css" />
+ </head>
+ <body>
#set ($current_page="all")
#parse ("azkaban/webapp/servlet/velocity/nav.vm")
@@ -48,28 +48,28 @@
#parse ("azkaban/webapp/servlet/velocity/errormsg.vm")
#else
- ## Page header.
-
+ ## Page header.
+
#parse ("azkaban/webapp/servlet/velocity/projectpageheader.vm")
<div class="container-full">
-
+
#parse ("azkaban/webapp/servlet/velocity/alerts.vm")
## Page content.
- <div class="row row-offcanvas row-offcanvas-right">
- <div class="col-xs-12 col-sm-9" id="flow-tabs">
-
- #set ($project_page = "flows")
- #parse ("azkaban/webapp/servlet/velocity/projectnav.vm")
+ <div class="row row-offcanvas row-offcanvas-right">
+ <div class="col-xs-12 col-sm-9" id="flow-tabs">
+
+ #set ($project_page = "flows")
+ #parse ("azkaban/webapp/servlet/velocity/projectnav.vm")
<div id="flow-tabs">
- #if ($flows)
- #foreach ($flow in $flows)
+ #if ($flows)
+ #foreach ($flow in $flows)
<div class="panel panel-default" flow="${flow.id}" project="${project.name}">
<div class="panel-heading flow-expander" id="${flow.id}">
- #if (${exec})
+ #if (${exec})
<div class="pull-right">
<button type="button" class="btn btn-xs btn-success execute-flow" flowId="${flow.id}">Execute Flow</button>
<a href="${context}/manager?project=${project.name}&flow=${flow.id}#executions" class="btn btn-info btn-xs">Executions</a>
@@ -83,28 +83,28 @@
<ul class="list-group list-group-collapse expanded-flow-job-list" id="${flow.id}-tbody"></ul>
</div>
</div>
- #end
- #else
+ #end
+ #else
<div class="callout callout-default">
<h4>No Flows</h4>
<p>No flows have been uploaded to this project yet.</p>
</div>
- #end
+ #end
</div><!-- /#flow-tabs -->
- </div><!-- /col-xs-8 -->
+ </div><!-- /col-xs-8 -->
- <div class="col-xs-6 col-sm-3 sidebar-offcanvas">
- #parse ("azkaban/webapp/servlet/velocity/projectsidebar.vm")
- </div><!-- /col-xs-4 -->
- </div><!-- /row -->
+ <div class="col-xs-6 col-sm-3 sidebar-offcanvas">
+ #parse ("azkaban/webapp/servlet/velocity/projectsidebar.vm")
+ </div><!-- /col-xs-4 -->
+ </div><!-- /row -->
- #parse ("azkaban/webapp/servlet/velocity/projectmodals.vm")
- #parse ("azkaban/webapp/servlet/velocity/invalidsessionmodal.vm")
- #parse ("azkaban/webapp/servlet/velocity/flowexecutionpanel.vm")
- #parse ("azkaban/webapp/servlet/velocity/messagedialog.vm")
+ #parse ("azkaban/webapp/servlet/velocity/projectmodals.vm")
+ #parse ("azkaban/webapp/servlet/velocity/invalidsessionmodal.vm")
+ #parse ("azkaban/webapp/servlet/velocity/flowexecutionpanel.vm")
+ #parse ("azkaban/webapp/servlet/velocity/messagedialog.vm")
- </div><!-- /container -->
+ </div><!-- /container -->
#end
- </body>
+ </body>
</html>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/projectpageheader.vm b/src/main/resources/azkaban/webapp/servlet/velocity/projectpageheader.vm
index 80a4a08..d528ba9 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/projectpageheader.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/projectpageheader.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -14,8 +14,8 @@
* the License.
*#
- <div class="az-page-header">
- <div class="container-full">
+ <div class="az-page-header">
+ <div class="container-full">
<div class="row">
<div class="header-title" id="project-page-header">
<h1><a href="${context}/manager?project=${project.name}">Project <small>$project.name</small></a></h1>
@@ -31,5 +31,5 @@
</div>
</div>
</div>
- </div>
- </div>
+ </div>
+ </div>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/projectsidebar.vm b/src/main/resources/azkaban/webapp/servlet/velocity/projectsidebar.vm
index 5cdff7a..c730d8d 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/projectsidebar.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/projectsidebar.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -14,24 +14,24 @@
* the License.
*#
- <div class="well" id="project-sidebar">
- <h3>$project.name</h3>
- <p class="editable" id="project-description">$project.description</p>
- <div id="project-description-form" class="editable-form">
- <div class="input-group">
- <input type="text" class="form-control input-sm" id="project-description-edit" placeholder="Project description">
- <span class="input-group-btn">
- <button class="btn btn-primary btn-sm" type="button" id="project-description-btn">Save</button>
- </span>
- </div>
- </div>
- <hr>
- <p><strong>Created on</strong> $utils.formatDate($project.createTimestamp)</p>
- <p><strong>Last modified by</strong> $utils.formatDate($project.lastModifiedTimestamp)</p>
- <p><strong>Modified by</strong> $project.lastModifiedUser</p>
+ <div class="well" id="project-sidebar">
+ <h3>$project.name</h3>
+ <p class="editable" id="project-description">$project.description</p>
+ <div id="project-description-form" class="editable-form">
+ <div class="input-group">
+ <input type="text" class="form-control input-sm" id="project-description-edit" placeholder="Project description">
+ <span class="input-group-btn">
+ <button class="btn btn-primary btn-sm" type="button" id="project-description-btn">Save</button>
+ </span>
+ </div>
+ </div>
+ <hr>
+ <p><strong>Created on</strong> $utils.formatDate($project.createTimestamp)</p>
+ <p><strong>Last modified by</strong> $utils.formatDate($project.lastModifiedTimestamp)</p>
+ <p><strong>Modified by</strong> $project.lastModifiedUser</p>
- <hr>
+ <hr>
- <p><strong>Project admins:</strong> $admins</p>
- <p><strong>Your Permissions:</strong> $userpermission.toString()</p>
- </div>
+ <p><strong>Project admins:</strong> $admins</p>
+ <p><strong>Your Permissions:</strong> $userpermission.toString()</p>
+ </div>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/propertypage.vm b/src/main/resources/azkaban/webapp/servlet/velocity/propertypage.vm
index f7ce6af..3c7e0ce 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/propertypage.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/propertypage.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -14,38 +14,38 @@
* the License.
*#
-<!DOCTYPE html>
+<!DOCTYPE html>
<html lang="en">
- <head>
+ <head>
#parse("azkaban/webapp/servlet/velocity/style.vm")
#parse ("azkaban/webapp/servlet/velocity/javascript.vm")
- <script type="text/javascript">
- var contextURL = "${context}";
- var currentTime = ${currentTime};
- var timezone = "${timezone}";
- var errorMessage = null;
- var successMessage = null;
-
- var projectId = "$project.name";
- </script>
- </head>
- <body>
+ <script type="text/javascript">
+ var contextURL = "${context}";
+ var currentTime = ${currentTime};
+ var timezone = "${timezone}";
+ var errorMessage = null;
+ var successMessage = null;
+
+ var projectId = "$project.name";
+ </script>
+ </head>
+ <body>
#set($current_page="all")
#parse("azkaban/webapp/servlet/velocity/nav.vm")
-
+
#if ($errorMsg)
#parse("azkaban/webapp/servlet/velocity/errormsg.vm")
#else
- ## Page header
+ ## Page header
- <div class="az-page-header page-header-bare">
- <div class="container-full">
+ <div class="az-page-header page-header-bare">
+ <div class="container-full">
<h1><a href="${context}/manager?project=${project.name}&flow=${flowid}&job=${jobid}&prop=${property}">Properties <small>$property</small></a></h1>
- </div>
+ </div>
</div>
<div class="page-breadcrumb">
<div class="container-full">
@@ -62,67 +62,67 @@
#parse("azkaban/webapp/servlet/velocity/alerts.vm")
- <div class="row row-offcanvas row-offcanvas-right">
- <div class="col-xs-12 col-sm-9">
-
- ## Properties
-
- <div class="panel panel-default">
- <div class="panel-heading">$property</div>
-
- <table class="table table-striped table-bordered properties-table">
- <thead>
- <tr>
- <th class="tb-pname">Parameter Name</th>
- <th class="tb-pvalue">Value</th>
- </tr>
- </thead>
- <tbody>
- #foreach ($parameter in $parameters)
- <tr>
- <td class="property-key">$parameter.first</td>
+ <div class="row row-offcanvas row-offcanvas-right">
+ <div class="col-xs-12 col-sm-9">
+
+ ## Properties
+
+ <div class="panel panel-default">
+ <div class="panel-heading">$property</div>
+
+ <table class="table table-striped table-bordered properties-table">
+ <thead>
+ <tr>
+ <th class="tb-pname">Parameter Name</th>
+ <th class="tb-pvalue">Value</th>
+ </tr>
+ </thead>
+ <tbody>
+ #foreach ($parameter in $parameters)
+ <tr>
+ <td class="property-key">$parameter.first</td>
<td>$parameter.second</td>
- </tr>
- #end
- </tbody>
- </table>
- </div>
- </div><!-- /col-xs-8 -->
- <div class="col-xs-6 col-sm-3 sidebar-offcanvas">
- <div class="well" id="job-summary">
- <h4>Properties <small>$property</small></h4>
- <p><strong>Job</strong> $jobid</p>
- </div>
-
- <div class="panel panel-default">
- <div class="panel-heading">Inherited From</div>
- <ul class="list-group">
- #if ($inheritedproperties)
- #foreach ($inheritedproperty in $inheritedproperties)
- <li class="list-group-item"><a href="${context}/manager?project=${project.name}&flow=${flowid}&job=${jobid}&prop=$inheritedproperty">$inheritedproperty</a></li>
- #end
- #else
- <li class="list-group-item">No inherited properties.</li>
- #end
- </ul>
- </div>
-
- <div class="panel panel-default">
- <div class="panel-heading">Source of</div>
- <ul class="list-group">
- #if ($dependingproperties)
- #foreach ($dependingproperty in $dependingproperties)
- <li class="list-group-item"><a href="${context}/manager?project=${project.name}&flow=${flowid}&job=${jobid}&prop=$dependingproperty">$dependingproperty</a></li>
- #end
- #else
- <li class="list-group-item">No dependents.</li>
- #end
- </ul>
- </div>
- </div>
- </div><!-- /row -->
-
- </div><!-- /container-full -->
+ </tr>
+ #end
+ </tbody>
+ </table>
+ </div>
+ </div><!-- /col-xs-8 -->
+ <div class="col-xs-6 col-sm-3 sidebar-offcanvas">
+ <div class="well" id="job-summary">
+ <h4>Properties <small>$property</small></h4>
+ <p><strong>Job</strong> $jobid</p>
+ </div>
+
+ <div class="panel panel-default">
+ <div class="panel-heading">Inherited From</div>
+ <ul class="list-group">
+ #if ($inheritedproperties)
+ #foreach ($inheritedproperty in $inheritedproperties)
+ <li class="list-group-item"><a href="${context}/manager?project=${project.name}&flow=${flowid}&job=${jobid}&prop=$inheritedproperty">$inheritedproperty</a></li>
+ #end
+ #else
+ <li class="list-group-item">No inherited properties.</li>
+ #end
+ </ul>
+ </div>
+
+ <div class="panel panel-default">
+ <div class="panel-heading">Source of</div>
+ <ul class="list-group">
+ #if ($dependingproperties)
+ #foreach ($dependingproperty in $dependingproperties)
+ <li class="list-group-item"><a href="${context}/manager?project=${project.name}&flow=${flowid}&job=${jobid}&prop=$dependingproperty">$dependingproperty</a></li>
+ #end
+ #else
+ <li class="list-group-item">No dependents.</li>
+ #end
+ </ul>
+ </div>
+ </div>
+ </div><!-- /row -->
+
+ </div><!-- /container-full -->
#end
- </body>
+ </body>
</html>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/scheduledflowcalendarpage.vm b/src/main/resources/azkaban/webapp/servlet/velocity/scheduledflowcalendarpage.vm
index d5a37ca..bcb4221 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/scheduledflowcalendarpage.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/scheduledflowcalendarpage.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -14,35 +14,35 @@
* the License.
*#
-<!DOCTYPE html>
+<!DOCTYPE html>
<html lang="en">
- <head>
+ <head>
#parse ("azkaban/webapp/servlet/velocity/style.vm")
#parse ("azkaban/webapp/servlet/velocity/javascript.vm")
- <link rel="stylesheet" type="text/css" href="${context}/css/jquery-ui-1.10.1.custom.css" />
- <link rel="stylesheet" type="text/css" href="${context}/css/jquery-ui.css" />
-
- <script type="text/javascript" src="${context}/js/jqueryui/jquery-ui-1.10.1.custom.js"></script>
- <script type="text/javascript" src="${context}/js/jquery/jquery.svg.min.js"></script>
- <script type="text/javascript" src="${context}/js/jqueryui/jquery-ui-timepicker-addon.js"></script>
- <script type="text/javascript" src="${context}/js/jqueryui/jquery-ui-sliderAccess.js"></script>
+ <link rel="stylesheet" type="text/css" href="${context}/css/jquery-ui-1.10.1.custom.css" />
+ <link rel="stylesheet" type="text/css" href="${context}/css/jquery-ui.css" />
- <script type="text/javascript" src="${context}/js/azkaban/view/table-sort.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/view/schedule-svg.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/view/context-menu.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/util/svg-navigate.js"></script>
- <script type="text/javascript">
- var contextURL = "${context}";
- var currentTime = ${currentTime};
- var timezone = "${timezone}";
- var errorMessage = null;
- var successMessage = null;
- </script>
- <link rel="stylesheet" type="text/css" href="${context}/css/jquery.svg.css" />
- </head>
- <body>
+ <script type="text/javascript" src="${context}/js/jqueryui/jquery-ui-1.10.1.custom.js"></script>
+ <script type="text/javascript" src="${context}/js/jquery/jquery.svg.min.js"></script>
+ <script type="text/javascript" src="${context}/js/jqueryui/jquery-ui-timepicker-addon.js"></script>
+ <script type="text/javascript" src="${context}/js/jqueryui/jquery-ui-sliderAccess.js"></script>
+
+ <script type="text/javascript" src="${context}/js/azkaban/view/table-sort.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/view/schedule-svg.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/view/context-menu.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/util/svg-navigate.js"></script>
+ <script type="text/javascript">
+ var contextURL = "${context}";
+ var currentTime = ${currentTime};
+ var timezone = "${timezone}";
+ var errorMessage = null;
+ var successMessage = null;
+ </script>
+ <link rel="stylesheet" type="text/css" href="${context}/css/jquery.svg.css" />
+ </head>
+ <body>
#set ($current_page="schedule")
#parse ("azkaban/webapp/servlet/velocity/nav.vm")
@@ -70,9 +70,9 @@
<div id="svgDivCustom"></div>
</div>
</div>
-
+
<div id="contextMenu"></div>
</div>
#end
- </body>
+ </body>
</html>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/scheduledflowpage.vm b/src/main/resources/azkaban/webapp/servlet/velocity/scheduledflowpage.vm
index 4eac2cc..6e7c61b 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/scheduledflowpage.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/scheduledflowpage.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -14,30 +14,30 @@
* the License.
*#
-<!DOCTYPE html>
+<!DOCTYPE html>
<html lang="en">
<head>
#parse ("azkaban/webapp/servlet/velocity/style.vm")
#parse ("azkaban/webapp/servlet/velocity/javascript.vm")
- <link rel="stylesheet" type="text/css" href="${context}/css/bootstrap-datetimepicker.css" />
-
- <script type="text/javascript" src="${context}/js/moment.min.js"></script>
- <script type="text/javascript" src="${context}/js/bootstrap-datetimepicker.min.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/view/table-sort.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/view/schedule-sla.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/view/scheduled.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/util/schedule.js"></script>
- <script type="text/javascript">
- var contextURL = "${context}";
- var currentTime = ${currentTime};
- var timezone = "${timezone}";
- var errorMessage = null;
- var successMessage = null;
- </script>
- </head>
- <body>
+ <link rel="stylesheet" type="text/css" href="${context}/css/bootstrap-datetimepicker.css" />
+
+ <script type="text/javascript" src="${context}/js/moment.min.js"></script>
+ <script type="text/javascript" src="${context}/js/bootstrap-datetimepicker.min.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/view/table-sort.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/view/schedule-sla.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/view/scheduled.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/util/schedule.js"></script>
+ <script type="text/javascript">
+ var contextURL = "${context}";
+ var currentTime = ${currentTime};
+ var timezone = "${timezone}";
+ var errorMessage = null;
+ var successMessage = null;
+ </script>
+ </head>
+ <body>
#set ($current_page="schedule")
#parse ("azkaban/webapp/servlet/velocity/nav.vm")
@@ -46,22 +46,22 @@
#parse ("azkaban/webapp/servlet/velocity/errormsg.vm")
#else
- ## Page header.
+ ## Page header.
- <div class="az-page-header">
- <div class="container-full">
+ <div class="az-page-header">
+ <div class="container-full">
<h1><a href="${context}/schedule">Scheduled Flows</a></h1>
- </div>
- </div>
+ </div>
+ </div>
- ## Page content.
+ ## Page content.
- <div class="container-full">
+ <div class="container-full">
#parse ("azkaban/webapp/servlet/velocity/alerts.vm")
-
+
<div class="row">
- <div class="col-xs-12">
+ <div class="col-xs-12">
<table id="scheduledFlowsTbl" class="table table-striped table-condensed table-bordered table-hover">
<thead>
<tr>
@@ -78,8 +78,8 @@
</tr>
</thead>
<tbody>
- #if(!$schedules.isEmpty())
- #foreach($sched in $schedules)
+ #if(!$schedules.isEmpty())
+ #foreach($sched in $schedules)
<tr>
<td>${sched.scheduleId}</td>
<td class="tb-name">
@@ -96,22 +96,22 @@
<td><button type="button" id="removeSchedBtn" class="btn btn-sm btn-danger" onclick="removeSched(${sched.scheduleId})" >Remove Schedule</button></td>
<td><button type="button" id="addSlaBtn" class="btn btn-sm btn-primary" onclick="slaView.initFromSched(${sched.scheduleId}, '${sched.flowName}')" >Set SLA</button></td>
</tr>
- #end
- #else
+ #end
+ #else
<tr>
<td colspan="10">No scheduled flow found.</td>
</tr>
- #end
+ #end
</tbody>
</table>
- </div><!-- /col-xs-12 -->
- </div><!-- /row -->
+ </div><!-- /col-xs-12 -->
+ </div><!-- /row -->
## Set SLA modal.
-
- #parse ("azkaban/webapp/servlet/velocity/slapanel.vm")
- </div>
+ #parse ("azkaban/webapp/servlet/velocity/slapanel.vm")
+
+ </div>
#end
- </body>
+ </body>
</html>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/scheduleoptionspanel.vm b/src/main/resources/azkaban/webapp/servlet/velocity/scheduleoptionspanel.vm
index 908eaf3..627e042 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/scheduleoptionspanel.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/scheduleoptionspanel.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -15,161 +15,161 @@
*#
<div id="scheduleModalBackground" class="modalBackground2">
- <div id="schedule-options" class="modal modalContainer2">
- <a href='#' title='Close' class='modal-close'>x</a>
- <h3>Schedule Flow Options</h3>
- <div>
- <ul class="optionsPicker">
- <li id="scheduleGeneralOptions">General Options</li>
- <li id="scheduleFlowOptions">Flow Options</li>
- <!--li id="scheduleSlaOptions">SLA Options</li-->
- </ul>
- </div>
- <div class="optionsPane">
- <!--div id="scheduleSlaPanel" class="generalPanel panel">
- <div id="slaActions">
- <h4>SLA Alert Emails</h4>
- <dl>
- <dt >SLA Alert Emails</dt>
- <dd>
- <textarea id="slaEmails"></textarea>
- </dd>
- </dl>
- </div>
- <div id="slaRules">
- <h4>Flow SLA Rules</h4>
- <div class="tableDiv">
- <table id="flowRulesTbl">
- <thead>
- <tr>
- <th>Flow/Job</th>
- <th>Finish In</th>
- <th>Email Action</th>
- <th>Kill Action</th>
- </tr>
- </thead>
- <tbody>
- </tbody>
- </table>
- </div>
- <h4>Job SLA Rules</h4>
- <div class="tableDiv">
- <table id="jobRulesTbl">
- <thead>
- <tr>
- <th>Flow/Job</th>
- <th>Finish In</th>
- <th>Email Action</th>
- <th>Kill Action</th>
- </tr>
- </thead>
- <tbody>
- </tbody>
- </table>
- </div>
- </div>
- </div-->
- <div id="scheduleGeneralPanel" class="generalPanel panel">
- <div id="scheduleInfo">
- <dl>
- <dt>Schedule Time</dt>
- <dd>
- <input id="advhour" type="text" size="2" value="12"/>
- <input id="advminutes" type="text" size="2" value="00"/>
- <select id="advam_pm">
- <option>pm</option>
- <option>am</option>
- </select>
- <select id="advtimezone">
- <option>PDT</option>
- <option>UTC</option>
- </select>
- </dd>
- <dt>Schedule Date</dt><dd><input type="text" id="advdatepicker" /></dd>
- <dt>Recurrence</dt>
- <dd>
- <input id="advis_recurring" type="checkbox" checked />
- <span>repeat every</span>
- <input id="advperiod" type="text" size="2" value="1"/>
- <select id="advperiod_units">
- <option value="d">Days</option>
- <option value="h">Hours</option>
- <option value="m">Minutes</option>
- <option value="M">Months</option>
- <option value="w">Weeks</option>
- </select>
- </dd>
- </dl>
- </div>
- <br></br>
- <br></br>
- <div id="scheduleCompleteActions">
- <h4>Completion Actions</h4>
- <dl>
- <dt>Failure Action</dt>
- <dd>
- <select id="scheduleFailureAction" name="failureAction">
- <option value="finishCurrent">Finish Current Running</option>
- <option value="cancelImmediately">Cancel All</option>
- <option value="finishPossible">Finish All Possible</option>
- </select>
- </dd>
- <dt>Failure Email</dt>
- <dd>
- <textarea id="scheduleFailureEmails"></textarea>
- </dd>
- <dt>Notify on Failure</dt>
- <dd>
- <input id="scheduleNotifyFailureFirst" class="checkbox" type="checkbox" name="notify" value="first" checked >First Failure</input>
- <input id="scheduleNotifyFailureLast" class="checkbox" type="checkbox" name="notify" value="last">Flow Stop</input>
- </dd>
- <dt>Success Email</dt>
- <dd>
- <textarea id="scheduleSuccessEmails"></textarea>
- </dd>
- <dt>Concurrent Execution</dt>
- <dd id="scheduleExecutingJob" class="disabled">
- <input id="scheduleIgnore" class="radio" type="radio" name="concurrent" value="ignore" checked /><label class="radioLabel" for="ignore">Run Concurrently</label>
- <input id="schedulePipeline" class="radio" type="radio" name="concurrent" value="pipeline" /><label class="radioLabel" for="pipeline">Pipeline</label>
- <input id="scheduleQueue" class="radio" type="radio" name="concurrent" value="queue" /><label class="radioLabel" for="queue">Queue Job</label>
- </dd>
- </dl>
- </div>
- <div id="scheduleFlowPropertyOverride">
- <h4>Flow Property Override</h4>
- <div class="tableDiv">
- <table>
- <thead>
- <tr>
- <th>Name</th>
- <th>Value</th>
- </tr>
- </thead>
- <tbody>
- <tr id="scheduleAddRow"><td id="addRow-col" colspan="2"><span class="addIcon"></span><a href="#">Add Row</a></td></tr>
- </tbody>
- </table>
- </div>
- </div>
- </div>
- <div id="scheduleGraphPanel" class="graphPanel panel">
- <div id="scheduleJobListCustom" class="jobList">
- <div class="filterList">
- <input class="filter" placeholder=" Job Filter" />
- </div>
- <div class="list">
- </div>
- <div class="btn5 resetPanZoomBtn" >Reset Pan Zoom</div>
- </div>
- <div id="scheduleSvgDivCustom" class="svgDiv" >
- <svg class="svgGraph" xmlns="http://www.w3.org/2000/svg" version="1.1" shape-rendering="optimize-speed" text-rendering="optimize-speed" >
- </svg>
- </div>
- </div>
- </div>
- <div class="actions">
- <a class="yes btn1" id="adv-schedule-btn" href="#">Schedule</a>
- <a class="no simplemodal-close btn3" id="schedule-cancel-btn" href="#">Cancel</a>
- </div>
- </div>
+ <div id="schedule-options" class="modal modalContainer2">
+ <a href='#' title='Close' class='modal-close'>x</a>
+ <h3>Schedule Flow Options</h3>
+ <div>
+ <ul class="optionsPicker">
+ <li id="scheduleGeneralOptions">General Options</li>
+ <li id="scheduleFlowOptions">Flow Options</li>
+ <!--li id="scheduleSlaOptions">SLA Options</li-->
+ </ul>
+ </div>
+ <div class="optionsPane">
+ <!--div id="scheduleSlaPanel" class="generalPanel panel">
+ <div id="slaActions">
+ <h4>SLA Alert Emails</h4>
+ <dl>
+ <dt >SLA Alert Emails</dt>
+ <dd>
+ <textarea id="slaEmails"></textarea>
+ </dd>
+ </dl>
+ </div>
+ <div id="slaRules">
+ <h4>Flow SLA Rules</h4>
+ <div class="tableDiv">
+ <table id="flowRulesTbl">
+ <thead>
+ <tr>
+ <th>Flow/Job</th>
+ <th>Finish In</th>
+ <th>Email Action</th>
+ <th>Kill Action</th>
+ </tr>
+ </thead>
+ <tbody>
+ </tbody>
+ </table>
+ </div>
+ <h4>Job SLA Rules</h4>
+ <div class="tableDiv">
+ <table id="jobRulesTbl">
+ <thead>
+ <tr>
+ <th>Flow/Job</th>
+ <th>Finish In</th>
+ <th>Email Action</th>
+ <th>Kill Action</th>
+ </tr>
+ </thead>
+ <tbody>
+ </tbody>
+ </table>
+ </div>
+ </div>
+ </div-->
+ <div id="scheduleGeneralPanel" class="generalPanel panel">
+ <div id="scheduleInfo">
+ <dl>
+ <dt>Schedule Time</dt>
+ <dd>
+ <input id="advhour" type="text" size="2" value="12"/>
+ <input id="advminutes" type="text" size="2" value="00"/>
+ <select id="advam_pm">
+ <option>pm</option>
+ <option>am</option>
+ </select>
+ <select id="advtimezone">
+ <option>PDT</option>
+ <option>UTC</option>
+ </select>
+ </dd>
+ <dt>Schedule Date</dt><dd><input type="text" id="advdatepicker" /></dd>
+ <dt>Recurrence</dt>
+ <dd>
+ <input id="advis_recurring" type="checkbox" checked />
+ <span>repeat every</span>
+ <input id="advperiod" type="text" size="2" value="1"/>
+ <select id="advperiod_units">
+ <option value="d">Days</option>
+ <option value="h">Hours</option>
+ <option value="m">Minutes</option>
+ <option value="M">Months</option>
+ <option value="w">Weeks</option>
+ </select>
+ </dd>
+ </dl>
+ </div>
+ <br></br>
+ <br></br>
+ <div id="scheduleCompleteActions">
+ <h4>Completion Actions</h4>
+ <dl>
+ <dt>Failure Action</dt>
+ <dd>
+ <select id="scheduleFailureAction" name="failureAction">
+ <option value="finishCurrent">Finish Current Running</option>
+ <option value="cancelImmediately">Cancel All</option>
+ <option value="finishPossible">Finish All Possible</option>
+ </select>
+ </dd>
+ <dt>Failure Email</dt>
+ <dd>
+ <textarea id="scheduleFailureEmails"></textarea>
+ </dd>
+ <dt>Notify on Failure</dt>
+ <dd>
+ <input id="scheduleNotifyFailureFirst" class="checkbox" type="checkbox" name="notify" value="first" checked >First Failure</input>
+ <input id="scheduleNotifyFailureLast" class="checkbox" type="checkbox" name="notify" value="last">Flow Stop</input>
+ </dd>
+ <dt>Success Email</dt>
+ <dd>
+ <textarea id="scheduleSuccessEmails"></textarea>
+ </dd>
+ <dt>Concurrent Execution</dt>
+ <dd id="scheduleExecutingJob" class="disabled">
+ <input id="scheduleIgnore" class="radio" type="radio" name="concurrent" value="ignore" checked /><label class="radioLabel" for="ignore">Run Concurrently</label>
+ <input id="schedulePipeline" class="radio" type="radio" name="concurrent" value="pipeline" /><label class="radioLabel" for="pipeline">Pipeline</label>
+ <input id="scheduleQueue" class="radio" type="radio" name="concurrent" value="queue" /><label class="radioLabel" for="queue">Queue Job</label>
+ </dd>
+ </dl>
+ </div>
+ <div id="scheduleFlowPropertyOverride">
+ <h4>Flow Property Override</h4>
+ <div class="tableDiv">
+ <table>
+ <thead>
+ <tr>
+ <th>Name</th>
+ <th>Value</th>
+ </tr>
+ </thead>
+ <tbody>
+ <tr id="scheduleAddRow"><td id="addRow-col" colspan="2"><span class="addIcon"></span><a href="#">Add Row</a></td></tr>
+ </tbody>
+ </table>
+ </div>
+ </div>
+ </div>
+ <div id="scheduleGraphPanel" class="graphPanel panel">
+ <div id="scheduleJobListCustom" class="jobList">
+ <div class="filterList">
+ <input class="filter" placeholder=" Job Filter" />
+ </div>
+ <div class="list">
+ </div>
+ <div class="btn5 resetPanZoomBtn" >Reset Pan Zoom</div>
+ </div>
+ <div id="scheduleSvgDivCustom" class="svgDiv" >
+ <svg class="svgGraph" xmlns="http://www.w3.org/2000/svg" version="1.1" shape-rendering="optimize-speed" text-rendering="optimize-speed" >
+ </svg>
+ </div>
+ </div>
+ </div>
+ <div class="actions">
+ <a class="yes btn1" id="adv-schedule-btn" href="#">Schedule</a>
+ <a class="no simplemodal-close btn3" id="schedule-cancel-btn" href="#">Cancel</a>
+ </div>
+ </div>
</div>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/schedulepanel.vm b/src/main/resources/azkaban/webapp/servlet/velocity/schedulepanel.vm
index 3e39a5c..8aff70f 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/schedulepanel.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/schedulepanel.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -14,19 +14,19 @@
* the License.
*#
- <script type="text/javascript" src="${context}/js/azkaban/util/date.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/view/schedule-panel.js"></script>
-
- <div class="modal" id="schedule-modal">
- <div class="modal-dialog">
- <div class="modal-content">
- <div class="modal-header">
- <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
- <h4 class="modal-title">Schedule Flow Options</h4>
- </div><!-- /modal-header -->
- <div class="modal-body">
- <fieldset class="form-horizontal">
- <div class="form-group">
+ <script type="text/javascript" src="${context}/js/azkaban/util/date.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/view/schedule-panel.js"></script>
+
+ <div class="modal" id="schedule-modal">
+ <div class="modal-dialog">
+ <div class="modal-content">
+ <div class="modal-header">
+ <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
+ <h4 class="modal-title">Schedule Flow Options</h4>
+ </div><!-- /modal-header -->
+ <div class="modal-body">
+ <fieldset class="form-horizontal">
+ <div class="form-group">
<label class="col-sm-2 control-label">Time</label>
<div class="col-sm-7">
<input type="text" id="timepicker" class="form-control">
@@ -37,14 +37,14 @@
<option>UTC</option>
</select>
</div>
- </div>
- <div class="form-group">
+ </div>
+ <div class="form-group">
<label class="col-sm-2 control-label">Date</label>
<div class="col-sm-10">
<input type="text" id="datepicker" class="form-control">
</div>
- </div>
- <div class="form-group">
+ </div>
+ <div class="form-group">
<label class="col-sm-2">Recurrence</label>
<div class="col-sm-3">
<div class="checkbox">
@@ -64,13 +64,13 @@
<option value="w">Weeks</option>
</select>
</div>
- </div>
- </fieldset>
- </div>
- <div class="modal-footer">
- <button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>
- <button type="button" class="btn btn-success" id="schedule-button">Schedule</button>
- </div>
- </div>
- </div>
- </div>
+ </div>
+ </fieldset>
+ </div>
+ <div class="modal-footer">
+ <button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>
+ <button type="button" class="btn btn-success" id="schedule-button">Schedule</button>
+ </div>
+ </div>
+ </div>
+ </div>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/slapanel.vm b/src/main/resources/azkaban/webapp/servlet/velocity/slapanel.vm
index e57f737..dd25911 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/slapanel.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/slapanel.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -14,47 +14,47 @@
* the License.
*#
- <div class="modal modal-wide" id="sla-options">
- <div class="modal-dialog">
- <div class="modal-content">
- <div class="modal-header">
- <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
- <h4 class="modal-title">SLA Options</h4>
- </div>
- <div class="modal-body">
- <h4>SLA Alert Emails</h4>
- <fieldset>
- <div class="form-group">
- <label>SLA Alert Emails</label>
- <textarea id="slaEmails" class="form-control"></textarea>
- </div>
- </fieldset>
- <h4>Flow SLA Rules</h4>
- <table class="table table-striped" id="flowRulesTbl">
- <thead>
- <tr>
- <th>Flow/Job</th>
- <th>Sla Rule</th>
- <th>Duration</th>
- <th>Email Action</th>
- <th>Kill Action</th>
- </tr>
- </thead>
- <tbody>
- <tr id="addRow">
- <td id="addRow-col" colspan="5">
- <span class="addIcon"></span>
- <button type="button" class="btn btn-xs btn-success" id="add-btn">Add Row</button>
- </td>
- </tr>
- </tbody>
- </table>
- </div>
- <div class="modal-footer">
- <button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>
- <!--<button type="button" class="btn btn-danger" id="remove-sla-btn">Remove SLA</button>-->
- <button type="button" class="btn btn-primary" id="set-sla-btn">Set/Change SLA</button>
- </div>
- </div>
- </div>
- </div>
+ <div class="modal modal-wide" id="sla-options">
+ <div class="modal-dialog">
+ <div class="modal-content">
+ <div class="modal-header">
+ <button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
+ <h4 class="modal-title">SLA Options</h4>
+ </div>
+ <div class="modal-body">
+ <h4>SLA Alert Emails</h4>
+ <fieldset>
+ <div class="form-group">
+ <label>SLA Alert Emails</label>
+ <textarea id="slaEmails" class="form-control"></textarea>
+ </div>
+ </fieldset>
+ <h4>Flow SLA Rules</h4>
+ <table class="table table-striped" id="flowRulesTbl">
+ <thead>
+ <tr>
+ <th>Flow/Job</th>
+ <th>Sla Rule</th>
+ <th>Duration</th>
+ <th>Email Action</th>
+ <th>Kill Action</th>
+ </tr>
+ </thead>
+ <tbody>
+ <tr id="addRow">
+ <td id="addRow-col" colspan="5">
+ <span class="addIcon"></span>
+ <button type="button" class="btn btn-xs btn-success" id="add-btn">Add Row</button>
+ </td>
+ </tr>
+ </tbody>
+ </table>
+ </div>
+ <div class="modal-footer">
+ <button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>
+ <!--<button type="button" class="btn btn-danger" id="remove-sla-btn">Remove SLA</button>-->
+ <button type="button" class="btn btn-primary" id="set-sla-btn">Set/Change SLA</button>
+ </div>
+ </div>
+ </div>
+ </div>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/style.vm b/src/main/resources/azkaban/webapp/servlet/velocity/style.vm
index 1cebf28..381b5e4 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/style.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/style.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -16,22 +16,22 @@
<meta charset="utf-8">
<title>#appname()</title>
-
- <link rel="shortcut icon" href="${context}/favicon.ico" />
+
+ <link rel="shortcut icon" href="${context}/favicon.ico" />
<!-- Bootstrap core CSS -->
<link href="/css/bootstrap.css" rel="stylesheet">
- <link href="/css/azkaban.css" rel="stylesheet">
- <style type="text/css">
- .navbar-enviro .navbar-enviro-name {
- color: ${azkaban_color};
- }
- .navbar-inverse {
- border-top: 5px solid ${azkaban_color};
- }
- .navbar-inverse .navbar-nav > .active >a {
- border-bottom: 1px solid ${azkaban_color};
- }
- </style>
+ <link href="/css/azkaban.css" rel="stylesheet">
+ <style type="text/css">
+ .navbar-enviro .navbar-enviro-name {
+ color: ${azkaban_color};
+ }
+ .navbar-inverse {
+ border-top: 5px solid ${azkaban_color};
+ }
+ .navbar-inverse .navbar-nav > .active >a {
+ border-bottom: 1px solid ${azkaban_color};
+ }
+ </style>
<!-- HTML5 shim and Respond.js IE8 support of HTML5 elements and media queries -->
<!--[if lt IE 9]>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/svgflowincludes.vm b/src/main/resources/azkaban/webapp/servlet/velocity/svgflowincludes.vm
index 70a3ff6..c669d26 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/svgflowincludes.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/svgflowincludes.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -14,24 +14,24 @@
* the License.
*#
- <script type="text/javascript" src="${context}/js/jquery.svg.min.js"></script>
- <script type="text/javascript" src="${context}/js/jquery.svganim.min.js"></script>
- <script type="text/javascript" src="${context}/js/jquery.svgfilter.min.js"></script>
-
- <script type="text/javascript" src="${context}/js/azkaban/util/common.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/util/date.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/util/ajax.js"></script>
-
- <script type="text/javascript" src="${context}/js/azkaban/util/svgutils.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/util/svg-navigate.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/util/layout.js"></script>
-
- <script type="text/javascript" src="${context}/js/azkaban/view/context-menu.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/util/job-status.js"></script>
+ <script type="text/javascript" src="${context}/js/jquery.svg.min.js"></script>
+ <script type="text/javascript" src="${context}/js/jquery.svganim.min.js"></script>
+ <script type="text/javascript" src="${context}/js/jquery.svgfilter.min.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/util/flow-loader.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/view/job-list.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/model/svg-graph.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/view/svg-graph.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/util/common.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/util/date.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/util/ajax.js"></script>
- <link rel="stylesheet" type="text/css" href="${context}/css/azkaban-graph.css" />
+ <script type="text/javascript" src="${context}/js/azkaban/util/svgutils.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/util/svg-navigate.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/util/layout.js"></script>
+
+ <script type="text/javascript" src="${context}/js/azkaban/view/context-menu.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/util/job-status.js"></script>
+
+ <script type="text/javascript" src="${context}/js/azkaban/util/flow-loader.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/view/job-list.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/model/svg-graph.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/view/svg-graph.js"></script>
+
+ <link rel="stylesheet" type="text/css" href="${context}/css/azkaban-graph.css" />
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/title.vm b/src/main/resources/azkaban/webapp/servlet/velocity/title.vm
index 6244263..1d13dbc 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/title.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/title.vm
@@ -1,20 +1,20 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*#
- <h1 class="logo"><a href="${context}/" title="Azkaban">Azkaban</a></h1>
- <div class="enviro">
- <p class="enviro-name">${azkaban_name}</p>
- <p class="enviro-server">${azkaban_label}</p>
- </div>
+ <h1 class="logo"><a href="${context}/" title="Azkaban">Azkaban</a></h1>
+ <div class="enviro">
+ <p class="enviro-name">${azkaban_name}</p>
+ <p class="enviro-server">${azkaban_label}</p>
+ </div>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/triggerspage.vm b/src/main/resources/azkaban/webapp/servlet/velocity/triggerspage.vm
index c48ec9e..7e5afcf 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/triggerspage.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/triggerspage.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn, Inc
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -14,31 +14,31 @@
* the License.
*#
-<!DOCTYPE html>
+<!DOCTYPE html>
<html lang="en">
- <head>
+ <head>
#parse("azkaban/webapp/servlet/velocity/style.vm")
#parse("azkaban/webapp/servlet/velocity/javascript.vm")
- <link rel="stylesheet" type="text/css" href="${context}/css/jquery-ui-1.10.1.custom.css" />
- <link rel="stylesheet" type="text/css" href="${context}/css/jquery-ui.css" />
-
- <script type="text/javascript" src="${context}/js/jqueryui/jquery-ui-1.10.1.custom.js"></script>
- <script type="text/javascript" src="${context}/js/jqueryui/jquery-ui-timepicker-addon.js"></script>
- <script type="text/javascript" src="${context}/js/jqueryui/jquery-ui-sliderAccess.js"></script>
+ <link rel="stylesheet" type="text/css" href="${context}/css/jquery-ui-1.10.1.custom.css" />
+ <link rel="stylesheet" type="text/css" href="${context}/css/jquery-ui.css" />
- <script type="text/javascript" src="${context}/js/azkaban/view/table-sort.js"></script>
- <script type="text/javascript" src="${context}/js/azkaban/view/triggers.js"></script>
- <script type="text/javascript">
- var contextURL = "${context}";
- var currentTime = ${currentTime};
- var timezone = "${timezone}";
- var errorMessage = null;
- var successMessage = null;
- </script>
- </head>
- <body>
+ <script type="text/javascript" src="${context}/js/jqueryui/jquery-ui-1.10.1.custom.js"></script>
+ <script type="text/javascript" src="${context}/js/jqueryui/jquery-ui-timepicker-addon.js"></script>
+ <script type="text/javascript" src="${context}/js/jqueryui/jquery-ui-sliderAccess.js"></script>
+
+ <script type="text/javascript" src="${context}/js/azkaban/view/table-sort.js"></script>
+ <script type="text/javascript" src="${context}/js/azkaban/view/triggers.js"></script>
+ <script type="text/javascript">
+ var contextURL = "${context}";
+ var currentTime = ${currentTime};
+ var timezone = "${timezone}";
+ var errorMessage = null;
+ var successMessage = null;
+ </script>
+ </head>
+ <body>
#set ($current_page="triggers")
#parse ("azkaban/webapp/servlet/velocity/nav.vm")
@@ -52,12 +52,12 @@
<h1>All Triggers</h1>
</div>
</div>
-
+
<div class="container-full">
#parse ("azkaban/webapp/servlet/velocity/alerts.vm")
- <div class="row">
+ <div class="row">
<div class="col-xs-12">
<table id="triggersTbl" class="table table-striped table-bordered table-condensed table-hover">
<thead>
@@ -91,5 +91,5 @@
</div>
</div>
#end
- </body>
+ </body>
</html>
diff --git a/src/main/resources/azkaban/webapp/servlet/velocity/viewer.vm b/src/main/resources/azkaban/webapp/servlet/velocity/viewer.vm
index cb5e8d6..7bb9000 100644
--- a/src/main/resources/azkaban/webapp/servlet/velocity/viewer.vm
+++ b/src/main/resources/azkaban/webapp/servlet/velocity/viewer.vm
@@ -1,12 +1,12 @@
#*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -14,29 +14,29 @@
* the License.
*#
-<!DOCTYPE html>
+<!DOCTYPE html>
<html>
- <head>
+ <head>
#parse ("azkaban/webapp/servlet/velocity/style.vm")
#parse ("azkaban/webapp/servlet/velocity/javascript.vm")
- <script type="text/javascript">
- var contextURL = "${context}";
- var currentTime = ${currentTime};
- var timezone = "${timezone}";
- </script>
- <style>
- .logout-label {
- float: right;
- margin: 3px;
- }
- .logout-submit {
- float: right;
- }
- </style>
- </head>
- <body>
+ <script type="text/javascript">
+ var contextURL = "${context}";
+ var currentTime = ${currentTime};
+ var timezone = "${timezone}";
+ </script>
+ <style>
+ .logout-label {
+ float: right;
+ margin: 3px;
+ }
+ .logout-submit {
+ float: right;
+ }
+ </style>
+ </head>
+ <body>
#set ($current_page="viewer")
#parse ("azkaban/webapp/servlet/velocity/nav.vm")
@@ -48,7 +48,7 @@
#parse ("azkaban/webapp/servlet/velocity/alerts.vm")
#parse ($viewervm)
- </div>
+ </div>
#end
- </body>
+ </body>
</html>
diff --git a/src/restli/java/azkaban/restli/ProjectManagerResource.java b/src/restli/java/azkaban/restli/ProjectManagerResource.java
index 5255711..a3b61b8 100644
--- a/src/restli/java/azkaban/restli/ProjectManagerResource.java
+++ b/src/restli/java/azkaban/restli/ProjectManagerResource.java
@@ -40,71 +40,79 @@ import com.linkedin.restli.server.resources.ResourceContextHolder;
@RestLiActions(name = "project", namespace = "azkaban.restli")
public class ProjectManagerResource extends ResourceContextHolder {
- private static final Logger logger = Logger.getLogger(ProjectManagerResource.class);
-
- public AzkabanWebServer getAzkaban() {
- return AzkabanWebServer.getInstance();
- }
-
- @Action(name = "deploy")
- public String deploy(
- @ActionParam("sessionId") String sessionId,
- @ActionParam("projectName") String projectName,
- @ActionParam("packageUrl") String packageUrl)
- throws ProjectManagerException, UserManagerException, ServletException, IOException {
- logger.info("Deploy called. {sessionId: " + sessionId +
- ", projectName: " + projectName +
- ", packageUrl:" + packageUrl + "}");
-
- String ip = (String)this.getContext().getRawRequestContext().getLocalAttr("REMOTE_ADDR");
- User user = ResourceUtils.getUserFromSessionId(sessionId, ip);
- ProjectManager projectManager = getAzkaban().getProjectManager();
- Project project = projectManager.getProject(projectName);
- if (project == null) {
- throw new ProjectManagerException("Project '" + projectName + "' not found.");
- }
-
- if (!ResourceUtils.hasPermission(project, user, Permission.Type.WRITE)) {
- String errorMsg = "User " + user.getUserId() + " has no permission to write to project " + project.getName();
- logger.error(errorMsg);
- throw new ProjectManagerException(errorMsg);
- }
+ private static final Logger logger = Logger
+ .getLogger(ProjectManagerResource.class);
- logger.info("Target package URL is " + packageUrl);
- URL url = null;
- try {
- url = new URL(packageUrl);
- } catch (MalformedURLException e) {
- String errorMsg = "URL " + packageUrl + " is malformed.";
- logger.error(errorMsg, e);
- throw new ProjectManagerException(errorMsg, e);
- }
-
- String filename = getFileName(url.getFile());
- File tempDir = Utils.createTempDir();
- File archiveFile = new File(tempDir, filename);
- try {
- // Since zip files can be large, don't specify an explicit read or connection
- // timeout. This will cause the call to block until the download is complete.
- logger.info("Downloading package from " + packageUrl);
- FileUtils.copyURLToFile(url, archiveFile);
-
- logger.info("Downloaded to " + archiveFile.toString());
- projectManager.uploadProject(project, archiveFile, "zip", user);
- } catch (IOException e) {
- String errorMsg = "Download of URL " + packageUrl + " to " + archiveFile.toString() + " failed";
- logger.error(errorMsg, e);
- throw new ProjectManagerException(errorMsg, e);
- }
- finally {
- if (tempDir.exists()) {
- FileUtils.deleteDirectory(tempDir);
- }
- }
- return Integer.toString(project.getVersion());
- }
+ public AzkabanWebServer getAzkaban() {
+ return AzkabanWebServer.getInstance();
+ }
- private String getFileName(String file) {
- return file.substring(file.lastIndexOf("/") + 1);
- }
+ @Action(name = "deploy")
+ public String deploy(@ActionParam("sessionId") String sessionId,
+ @ActionParam("projectName") String projectName,
+ @ActionParam("packageUrl") String packageUrl)
+ throws ProjectManagerException, UserManagerException, ServletException,
+ IOException {
+ logger.info("Deploy called. {sessionId: " + sessionId + ", projectName: "
+ + projectName + ", packageUrl:" + packageUrl + "}");
+
+ String ip =
+ (String) this.getContext().getRawRequestContext()
+ .getLocalAttr("REMOTE_ADDR");
+ User user = ResourceUtils.getUserFromSessionId(sessionId, ip);
+ ProjectManager projectManager = getAzkaban().getProjectManager();
+ Project project = projectManager.getProject(projectName);
+ if (project == null) {
+ throw new ProjectManagerException("Project '" + projectName
+ + "' not found.");
+ }
+
+ if (!ResourceUtils.hasPermission(project, user, Permission.Type.WRITE)) {
+ String errorMsg =
+ "User " + user.getUserId()
+ + " has no permission to write to project " + project.getName();
+ logger.error(errorMsg);
+ throw new ProjectManagerException(errorMsg);
+ }
+
+ logger.info("Target package URL is " + packageUrl);
+ URL url = null;
+ try {
+ url = new URL(packageUrl);
+ } catch (MalformedURLException e) {
+ String errorMsg = "URL " + packageUrl + " is malformed.";
+ logger.error(errorMsg, e);
+ throw new ProjectManagerException(errorMsg, e);
+ }
+
+ String filename = getFileName(url.getFile());
+ File tempDir = Utils.createTempDir();
+ File archiveFile = new File(tempDir, filename);
+ try {
+ // Since zip files can be large, don't specify an explicit read or
+ // connection
+ // timeout. This will cause the call to block until the download is
+ // complete.
+ logger.info("Downloading package from " + packageUrl);
+ FileUtils.copyURLToFile(url, archiveFile);
+
+ logger.info("Downloaded to " + archiveFile.toString());
+ projectManager.uploadProject(project, archiveFile, "zip", user);
+ } catch (IOException e) {
+ String errorMsg =
+ "Download of URL " + packageUrl + " to " + archiveFile.toString()
+ + " failed";
+ logger.error(errorMsg, e);
+ throw new ProjectManagerException(errorMsg, e);
+ } finally {
+ if (tempDir.exists()) {
+ FileUtils.deleteDirectory(tempDir);
+ }
+ }
+ return Integer.toString(project.getVersion());
+ }
+
+ private String getFileName(String file) {
+ return file.substring(file.lastIndexOf("/") + 1);
+ }
}
diff --git a/src/restli/java/azkaban/restli/ResourceUtils.java b/src/restli/java/azkaban/restli/ResourceUtils.java
index 18229cf..8ca2f5f 100644
--- a/src/restli/java/azkaban/restli/ResourceUtils.java
+++ b/src/restli/java/azkaban/restli/ResourceUtils.java
@@ -25,33 +25,35 @@ import azkaban.webapp.AzkabanWebServer;
import azkaban.webapp.session.Session;
public class ResourceUtils {
-
- public static boolean hasPermission(Project project, User user, Permission.Type type) {
- UserManager userManager = AzkabanWebServer.getInstance().getUserManager();
- if (project.hasPermission(user, type)) {
- return true;
- }
-
- for (String roleName: user.getRoles()) {
- Role role = userManager.getRole(roleName);
- if (role.getPermission().isPermissionSet(type) ||
- role.getPermission().isPermissionSet(Permission.Type.ADMIN)) {
- return true;
- }
- }
-
- return false;
- }
-
- public static User getUserFromSessionId(String sessionId, String ip) throws UserManagerException {
- Session session = AzkabanWebServer.getInstance().getSessionCache().getSession(sessionId);
- if (session == null) {
- throw new UserManagerException("Invalid session. Login required");
- }
- else if (!session.getIp().equals(ip)) {
- throw new UserManagerException("Invalid session. Session expired.");
- }
-
- return session.getUser();
- }
+
+ public static boolean hasPermission(Project project, User user,
+ Permission.Type type) {
+ UserManager userManager = AzkabanWebServer.getInstance().getUserManager();
+ if (project.hasPermission(user, type)) {
+ return true;
+ }
+
+ for (String roleName : user.getRoles()) {
+ Role role = userManager.getRole(roleName);
+ if (role.getPermission().isPermissionSet(type)
+ || role.getPermission().isPermissionSet(Permission.Type.ADMIN)) {
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ public static User getUserFromSessionId(String sessionId, String ip)
+ throws UserManagerException {
+ Session session =
+ AzkabanWebServer.getInstance().getSessionCache().getSession(sessionId);
+ if (session == null) {
+ throw new UserManagerException("Invalid session. Login required");
+ } else if (!session.getIp().equals(ip)) {
+ throw new UserManagerException("Invalid session. Session expired.");
+ }
+
+ return session.getUser();
+ }
}
diff --git a/src/restli/java/azkaban/restli/UserManagerResource.java b/src/restli/java/azkaban/restli/UserManagerResource.java
index 1a22263..a55c630 100644
--- a/src/restli/java/azkaban/restli/UserManagerResource.java
+++ b/src/restli/java/azkaban/restli/UserManagerResource.java
@@ -31,65 +31,70 @@ import com.linkedin.restli.server.annotations.ActionParam;
import com.linkedin.restli.server.annotations.RestLiActions;
import com.linkedin.restli.server.resources.ResourceContextHolder;
-
@RestLiActions(name = "user", namespace = "azkaban.restli")
public class UserManagerResource extends ResourceContextHolder {
- private static final Logger logger = Logger.getLogger(UserManagerResource.class);
-
- public AzkabanWebServer getAzkaban() {
- return AzkabanWebServer.getInstance();
- }
-
- @Action(name = "login")
- public String login(
- @ActionParam("username") String username,
- @ActionParam("password") String password)
- throws UserManagerException, ServletException {
- String ip = (String)this.getContext().getRawRequestContext().getLocalAttr("REMOTE_ADDR");
- logger.info("Attempting to login for " + username + " from ip '" + ip + "'");
-
- Session session = createSession(username, password, ip);
-
- logger.info("Session id " + session.getSessionId() + " created for user '" + username + "' and ip " + ip);
- return session.getSessionId();
- }
-
- @Action(name = "getUserFromSessionId")
- public User getUserFromSessionId(@ActionParam("sessionId") String sessionId) {
- String ip = (String)this.getContext().getRawRequestContext().getLocalAttr("REMOTE_ADDR");
- Session session = getSessionFromSessionId(sessionId, ip);
- azkaban.user.User azUser = session.getUser();
-
- // Fill out the restli object with properties from the Azkaban user
- User user = new User();
- user.setUserId(azUser.getUserId());
- user.setEmail(azUser.getEmail());
- return user;
- }
-
- private Session createSession(String username, String password, String ip)
- throws UserManagerException, ServletException {
- UserManager manager = getAzkaban().getUserManager();
- azkaban.user.User user = manager.getUser(username, password);
-
- String randomUID = UUID.randomUUID().toString();
- Session session = new Session(randomUID, user, ip);
- getAzkaban().getSessionCache().addSession(session);
-
- return session;
- }
-
- private Session getSessionFromSessionId(String sessionId, String remoteIp) {
- if (sessionId == null) {
- return null;
- }
-
- Session session = getAzkaban().getSessionCache().getSession(sessionId);
- // Check if the IP's are equal. If not, we invalidate the sesson.
- if (session == null || !remoteIp.equals(session.getIp())) {
- return null;
- }
-
- return session;
- }
+ private static final Logger logger = Logger
+ .getLogger(UserManagerResource.class);
+
+ public AzkabanWebServer getAzkaban() {
+ return AzkabanWebServer.getInstance();
+ }
+
+ @Action(name = "login")
+ public String login(@ActionParam("username") String username,
+ @ActionParam("password") String password) throws UserManagerException,
+ ServletException {
+ String ip =
+ (String) this.getContext().getRawRequestContext()
+ .getLocalAttr("REMOTE_ADDR");
+ logger
+ .info("Attempting to login for " + username + " from ip '" + ip + "'");
+
+ Session session = createSession(username, password, ip);
+
+ logger.info("Session id " + session.getSessionId() + " created for user '"
+ + username + "' and ip " + ip);
+ return session.getSessionId();
+ }
+
+ @Action(name = "getUserFromSessionId")
+ public User getUserFromSessionId(@ActionParam("sessionId") String sessionId) {
+ String ip =
+ (String) this.getContext().getRawRequestContext()
+ .getLocalAttr("REMOTE_ADDR");
+ Session session = getSessionFromSessionId(sessionId, ip);
+ azkaban.user.User azUser = session.getUser();
+
+ // Fill out the restli object with properties from the Azkaban user
+ User user = new User();
+ user.setUserId(azUser.getUserId());
+ user.setEmail(azUser.getEmail());
+ return user;
+ }
+
+ private Session createSession(String username, String password, String ip)
+ throws UserManagerException, ServletException {
+ UserManager manager = getAzkaban().getUserManager();
+ azkaban.user.User user = manager.getUser(username, password);
+
+ String randomUID = UUID.randomUUID().toString();
+ Session session = new Session(randomUID, user, ip);
+ getAzkaban().getSessionCache().addSession(session);
+
+ return session;
+ }
+
+ private Session getSessionFromSessionId(String sessionId, String remoteIp) {
+ if (sessionId == null) {
+ return null;
+ }
+
+ Session session = getAzkaban().getSessionCache().getSession(sessionId);
+ // Check if the IP's are equal. If not, we invalidate the sesson.
+ if (session == null || !remoteIp.equals(session.getIp())) {
+ return null;
+ }
+
+ return session;
+ }
}
\ No newline at end of file
src/web/js/azkaban/model/job-log.js 10(+5 -5)
diff --git a/src/web/js/azkaban/model/job-log.js b/src/web/js/azkaban/model/job-log.js
index d295578..681875b 100644
--- a/src/web/js/azkaban/model/job-log.js
+++ b/src/web/js/azkaban/model/job-log.js
@@ -1,12 +1,12 @@
/*
* Copyright 2014 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -23,12 +23,12 @@ azkaban.JobLogModel = Backbone.Model.extend({
},
refresh: function() {
- var requestURL = contextURL + "/executor";
+ var requestURL = contextURL + "/executor";
var finished = false;
var date = new Date();
var startTime = date.getTime();
-
+
while (!finished) {
var requestData = {
"execid": execId,
src/web/js/azkaban/model/svg-graph.js 114(+57 -57)
diff --git a/src/web/js/azkaban/model/svg-graph.js b/src/web/js/azkaban/model/svg-graph.js
index c683997..580b747 100644
--- a/src/web/js/azkaban/model/svg-graph.js
+++ b/src/web/js/azkaban/model/svg-graph.js
@@ -1,12 +1,12 @@
/*
* Copyright 2014 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -17,63 +17,63 @@
$.namespace('azkaban');
azkaban.GraphModel = Backbone.Model.extend({
- initialize: function() {
+ initialize: function() {
- },
+ },
- /*
- * Process and add data from JSON.
- */
- addFlow: function(data) {
- this.processFlowData(data);
- this.set({'data': data});
- },
-
- processFlowData: function(data) {
- var nodes = {};
- var edges = new Array();
+ /*
+ * Process and add data from JSON.
+ */
+ addFlow: function(data) {
+ this.processFlowData(data);
+ this.set({'data': data});
+ },
- // Create a node map
- for (var i = 0; i < data.nodes.length; ++i) {
- var node = data.nodes[i];
- nodes[node.id] = node;
- if (!node.status) {
- node.status = "READY";
- }
- }
+ processFlowData: function(data) {
+ var nodes = {};
+ var edges = new Array();
- // Create each node in and out nodes. Create an edge list.
- for (var i = 0; i < data.nodes.length; ++i) {
- var node = data.nodes[i];
- if (node.in) {
- for (var j = 0; j < node.in.length; ++j) {
- var fromNode = nodes[node.in[j]];
- if (!fromNode.outNodes) {
- fromNode.outNodes = {};
- }
- if (!node.inNodes) {
- node.inNodes = {};
- }
-
- fromNode.outNodes[node.id] = node;
- node.inNodes[fromNode.id] = fromNode;
- edges.push({to: node.id, from: fromNode.id});
- }
- }
- }
+ // Create a node map
+ for (var i = 0; i < data.nodes.length; ++i) {
+ var node = data.nodes[i];
+ nodes[node.id] = node;
+ if (!node.status) {
+ node.status = "READY";
+ }
+ }
- // Iterate over the nodes again. Parse the data if they're embedded flow data.
- // Assign each nodes to the parent flow data.
- for (var key in nodes) {
- var node = nodes[key];
- node.parent = data;
- if (node.type == "flow") {
- this.processFlowData(node);
- }
- }
-
- // Assign the node map and the edge list
- data.nodeMap = nodes;
- data.edges = edges;
- }
+ // Create each node in and out nodes. Create an edge list.
+ for (var i = 0; i < data.nodes.length; ++i) {
+ var node = data.nodes[i];
+ if (node.in) {
+ for (var j = 0; j < node.in.length; ++j) {
+ var fromNode = nodes[node.in[j]];
+ if (!fromNode.outNodes) {
+ fromNode.outNodes = {};
+ }
+ if (!node.inNodes) {
+ node.inNodes = {};
+ }
+
+ fromNode.outNodes[node.id] = node;
+ node.inNodes[fromNode.id] = fromNode;
+ edges.push({to: node.id, from: fromNode.id});
+ }
+ }
+ }
+
+ // Iterate over the nodes again. Parse the data if they're embedded flow data.
+ // Assign each nodes to the parent flow data.
+ for (var key in nodes) {
+ var node = nodes[key];
+ node.parent = data;
+ if (node.type == "flow") {
+ this.processFlowData(node);
+ }
+ }
+
+ // Assign the node map and the edge list
+ data.nodeMap = nodes;
+ data.edges = edges;
+ }
});
src/web/js/azkaban/util/ajax.js 350(+175 -175)
diff --git a/src/web/js/azkaban/util/ajax.js b/src/web/js/azkaban/util/ajax.js
index 342e2af..1194d85 100644
--- a/src/web/js/azkaban/util/ajax.js
+++ b/src/web/js/azkaban/util/ajax.js
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -15,148 +15,148 @@
*/
function ajaxCall(requestURL, data, callback) {
- var successHandler = function(data) {
- if (data.error == "session") {
- // We need to relogin.
- var errorDialog = document.getElementById("invalid-session");
- if (errorDialog) {
- $(errorDialog).modal({
- closeHTML: "<a href='#' title='Close' class='modal-close'>x</a>",
- position: ["20%",],
- containerId: 'confirm-container',
- containerCss: {
- 'height': '220px',
- 'width': '565px'
- },
- onClose: function (dialog) {
- window.location.reload();
- }
- });
- }
- }
- else {
- callback.call(this,data);
- }
- };
- $.get(requestURL, data, successHandler, "json");
+ var successHandler = function(data) {
+ if (data.error == "session") {
+ // We need to relogin.
+ var errorDialog = document.getElementById("invalid-session");
+ if (errorDialog) {
+ $(errorDialog).modal({
+ closeHTML: "<a href='#' title='Close' class='modal-close'>x</a>",
+ position: ["20%",],
+ containerId: 'confirm-container',
+ containerCss: {
+ 'height': '220px',
+ 'width': '565px'
+ },
+ onClose: function (dialog) {
+ window.location.reload();
+ }
+ });
+ }
+ }
+ else {
+ callback.call(this,data);
+ }
+ };
+ $.get(requestURL, data, successHandler, "json");
}
function executeFlow(executingData) {
- executeURL = contextURL + "/executor";
- var successHandler = function(data) {
- if (data.error) {
- flowExecuteDialogView.hideExecutionOptionPanel();
- messageDialogView.show("Error Executing Flow", data.error);
- }
- else {
- flowExecuteDialogView.hideExecutionOptionPanel();
- messageDialogView.show("Flow submitted", data.message,
- function() {
- var redirectURL = contextURL + "/executor?execid=" + data.execid;
- window.location.href = redirectURL;
- }
- );
- }
- };
-
- $.get(executeURL, executingData, successHandler, "json");
+ executeURL = contextURL + "/executor";
+ var successHandler = function(data) {
+ if (data.error) {
+ flowExecuteDialogView.hideExecutionOptionPanel();
+ messageDialogView.show("Error Executing Flow", data.error);
+ }
+ else {
+ flowExecuteDialogView.hideExecutionOptionPanel();
+ messageDialogView.show("Flow submitted", data.message,
+ function() {
+ var redirectURL = contextURL + "/executor?execid=" + data.execid;
+ window.location.href = redirectURL;
+ }
+ );
+ }
+ };
+
+ $.get(executeURL, executingData, successHandler, "json");
}
function fetchFlowInfo(model, projectName, flowId, execId) {
- var fetchData = {"project": projectName, "ajax":"flowInfo", "flow":flowId};
- if (execId) {
- fetchData.execid = execId;
- }
-
- var executeURL = contextURL + "/executor";
- var successHandler = function(data) {
- if (data.error) {
- alert(data.error);
- }
- else {
- model.set({
- "successEmails": data.successEmails,
- "failureEmails": data.failureEmails,
- "failureAction": data.failureAction,
- "notifyFailure": {
- "first": data.notifyFailureFirst,
- "last": data.notifyFailureLast
- },
- "flowParams": data.flowParam,
- "isRunning": data.running,
- "nodeStatus": data.nodeStatus,
- "concurrentOption": data.concurrentOptions,
- "pipelineLevel": data.pipelineLevel,
- "pipelineExecution": data.pipelineExecution,
- "queueLevel":data.queueLevel
- });
- }
- model.trigger("change:flowinfo");
- };
-
- $.ajax({
- url: executeURL,
- data: fetchData,
- success: successHandler,
- dataType: "json",
- async: false
- });
+ var fetchData = {"project": projectName, "ajax":"flowInfo", "flow":flowId};
+ if (execId) {
+ fetchData.execid = execId;
+ }
+
+ var executeURL = contextURL + "/executor";
+ var successHandler = function(data) {
+ if (data.error) {
+ alert(data.error);
+ }
+ else {
+ model.set({
+ "successEmails": data.successEmails,
+ "failureEmails": data.failureEmails,
+ "failureAction": data.failureAction,
+ "notifyFailure": {
+ "first": data.notifyFailureFirst,
+ "last": data.notifyFailureLast
+ },
+ "flowParams": data.flowParam,
+ "isRunning": data.running,
+ "nodeStatus": data.nodeStatus,
+ "concurrentOption": data.concurrentOptions,
+ "pipelineLevel": data.pipelineLevel,
+ "pipelineExecution": data.pipelineExecution,
+ "queueLevel":data.queueLevel
+ });
+ }
+ model.trigger("change:flowinfo");
+ };
+
+ $.ajax({
+ url: executeURL,
+ data: fetchData,
+ success: successHandler,
+ dataType: "json",
+ async: false
+ });
}
function fetchFlow(model, projectName, flowId, sync) {
- // Just in case people don't set sync
- sync = sync ? true : false;
- var managerUrl = contextURL + "/manager";
- var fetchData = {
- "ajax" : "fetchflowgraph",
- "project" : projectName,
- "flow" : flowId
- };
- var successHandler = function(data) {
- if (data.error) {
- alert(data.error);
- }
- else {
- var disabled = data.disabled ? data.disabled : {};
- model.set({
- flowId: data.flowId,
- data: data,
- disabled: disabled
- });
-
- var nodeMap = {};
- for (var i = 0; i < data.nodes.length; ++i) {
- var node = data.nodes[i];
- nodeMap[node.id] = node;
- }
-
- for (var i = 0; i < data.edges.length; ++i) {
- var edge = data.edges[i];
-
- if (!nodeMap[edge.target].in) {
- nodeMap[edge.target].in = {};
- }
- var targetInMap = nodeMap[edge.target].in;
- targetInMap[edge.from] = nodeMap[edge.from];
-
- if (!nodeMap[edge.from].out) {
- nodeMap[edge.from].out = {};
- }
- var sourceOutMap = nodeMap[edge.from].out;
- sourceOutMap[edge.target] = nodeMap[edge.target];
- }
-
- model.set({nodeMap: nodeMap});
- }
- };
-
- $.ajax({
- url: managerUrl,
- data: fetchData,
- success: successHandler,
- dataType: "json",
- async: !sync
- });
+ // Just in case people don't set sync
+ sync = sync ? true : false;
+ var managerUrl = contextURL + "/manager";
+ var fetchData = {
+ "ajax" : "fetchflowgraph",
+ "project" : projectName,
+ "flow" : flowId
+ };
+ var successHandler = function(data) {
+ if (data.error) {
+ alert(data.error);
+ }
+ else {
+ var disabled = data.disabled ? data.disabled : {};
+ model.set({
+ flowId: data.flowId,
+ data: data,
+ disabled: disabled
+ });
+
+ var nodeMap = {};
+ for (var i = 0; i < data.nodes.length; ++i) {
+ var node = data.nodes[i];
+ nodeMap[node.id] = node;
+ }
+
+ for (var i = 0; i < data.edges.length; ++i) {
+ var edge = data.edges[i];
+
+ if (!nodeMap[edge.target].in) {
+ nodeMap[edge.target].in = {};
+ }
+ var targetInMap = nodeMap[edge.target].in;
+ targetInMap[edge.from] = nodeMap[edge.from];
+
+ if (!nodeMap[edge.from].out) {
+ nodeMap[edge.from].out = {};
+ }
+ var sourceOutMap = nodeMap[edge.from].out;
+ sourceOutMap[edge.target] = nodeMap[edge.target];
+ }
+
+ model.set({nodeMap: nodeMap});
+ }
+ };
+
+ $.ajax({
+ url: managerUrl,
+ data: fetchData,
+ success: successHandler,
+ dataType: "json",
+ async: !sync
+ });
}
/**
@@ -164,43 +164,43 @@ function fetchFlow(model, projectName, flowId, sync) {
*
*/
function flowExecutingStatus(projectName, flowId) {
- var requestURL = contextURL + "/executor";
-
- var executionIds;
- var successHandler = function(data) {
- if (data.error == "session") {
- // We need to relogin.
- var errorDialog = document.getElementById("invalid-session");
- if (errorDialog) {
- $(errorDialog).modal({
- closeHTML: "<a href='#' title='Close' class='modal-close'>x</a>",
- position: ["20%",],
- containerId: 'confirm-container',
- containerCss: {
- 'height': '220px',
- 'width': '565px'
- },
- onClose: function (dialog) {
- window.location.reload();
- }
- });
- }
- }
- else {
- executionIds = data.execIds;
- }
- };
- $.ajax({
- url: requestURL,
- async: false,
- data: {
- "ajax": "getRunning",
- "project": projectName,
- "flow": flowId
- },
- error: function(data) {},
- success: successHandler
- });
-
- return executionIds;
+ var requestURL = contextURL + "/executor";
+
+ var executionIds;
+ var successHandler = function(data) {
+ if (data.error == "session") {
+ // We need to relogin.
+ var errorDialog = document.getElementById("invalid-session");
+ if (errorDialog) {
+ $(errorDialog).modal({
+ closeHTML: "<a href='#' title='Close' class='modal-close'>x</a>",
+ position: ["20%",],
+ containerId: 'confirm-container',
+ containerCss: {
+ 'height': '220px',
+ 'width': '565px'
+ },
+ onClose: function (dialog) {
+ window.location.reload();
+ }
+ });
+ }
+ }
+ else {
+ executionIds = data.execIds;
+ }
+ };
+ $.ajax({
+ url: requestURL,
+ async: false,
+ data: {
+ "ajax": "getRunning",
+ "project": projectName,
+ "flow": flowId
+ },
+ error: function(data) {},
+ success: successHandler
+ });
+
+ return executionIds;
}
src/web/js/azkaban/util/common.js 34(+17 -17)
diff --git a/src/web/js/azkaban/util/common.js b/src/web/js/azkaban/util/common.js
index 974000c..a295218 100644
--- a/src/web/js/azkaban/util/common.js
+++ b/src/web/js/azkaban/util/common.js
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -15,26 +15,26 @@
*/
function addClass(el, name) {
- if (!hasClass(el, name)) {
- var classes = el.getAttribute("class");
- classes += classes ? ' ' + name : '' +name;
- el.setAttribute("class", classes);
- }
+ if (!hasClass(el, name)) {
+ var classes = el.getAttribute("class");
+ classes += classes ? ' ' + name : '' +name;
+ el.setAttribute("class", classes);
+ }
}
function removeClass(el, name) {
- if (hasClass(el, name)) {
- var classes = el.getAttribute("class");
- el.setAttribute("class", classes.replace(new RegExp('(\\s|^)'+name+'(\\s|$)'),' ').replace(/^\s+|\s+$/g, ''));
- }
+ if (hasClass(el, name)) {
+ var classes = el.getAttribute("class");
+ el.setAttribute("class", classes.replace(new RegExp('(\\s|^)'+name+'(\\s|$)'),' ').replace(/^\s+|\s+$/g, ''));
+ }
}
function hasClass(el, name) {
- var classes = el.getAttribute("class");
- if (classes == null) {
- return false;
- }
- return new RegExp('(\\s|^)'+name+'(\\s|$)').test(classes);
+ var classes = el.getAttribute("class");
+ if (classes == null) {
+ return false;
+ }
+ return new RegExp('(\\s|^)'+name+'(\\s|$)').test(classes);
}
function sizeStrToBytes(str) {
src/web/js/azkaban/util/date.js 108(+54 -54)
diff --git a/src/web/js/azkaban/util/date.js b/src/web/js/azkaban/util/date.js
index d1c7791..c7dca87 100644
--- a/src/web/js/azkaban/util/date.js
+++ b/src/web/js/azkaban/util/date.js
@@ -17,76 +17,76 @@
var TIMESTAMP_LENGTH = 13;
var getDuration = function(startMs, endMs) {
- if (startMs) {
- if (endMs == null || endMs < startMs) {
- return "-";
- }
+ if (startMs) {
+ if (endMs == null || endMs < startMs) {
+ return "-";
+ }
- var diff = endMs - startMs;
- return formatDuration(diff, false);
- }
+ var diff = endMs - startMs;
+ return formatDuration(diff, false);
+ }
- return "-";
+ return "-";
}
var formatDuration = function(duration, millisecSig) {
- var diff = duration;
- var seconds = Math.floor(diff / 1000);
-
- if (seconds < 60) {
- if (millisecSig) {
- return (diff / 1000).toFixed(millisecSig) + " s";
- }
- else {
- return seconds + " sec";
- }
- }
-
- var mins = Math.floor(seconds / 60);
- seconds = seconds % 60;
- if (mins < 60) {
- return mins + "m " + seconds + "s";
- }
-
- var hours = Math.floor(mins / 60);
- mins = mins % 60;
- if (hours < 24) {
- return hours + "h " + mins + "m " + seconds + "s";
- }
-
- var days = Math.floor(hours / 24);
- hours = hours % 24;
-
- return days + "d " + hours + "h " + mins + "m";
+ var diff = duration;
+ var seconds = Math.floor(diff / 1000);
+
+ if (seconds < 60) {
+ if (millisecSig) {
+ return (diff / 1000).toFixed(millisecSig) + " s";
+ }
+ else {
+ return seconds + " sec";
+ }
+ }
+
+ var mins = Math.floor(seconds / 60);
+ seconds = seconds % 60;
+ if (mins < 60) {
+ return mins + "m " + seconds + "s";
+ }
+
+ var hours = Math.floor(mins / 60);
+ mins = mins % 60;
+ if (hours < 24) {
+ return hours + "h " + mins + "m " + seconds + "s";
+ }
+
+ var days = Math.floor(hours / 24);
+ hours = hours % 24;
+
+ return days + "d " + hours + "h " + mins + "m";
}
var getDateFormat = function(date) {
- var year = date.getFullYear();
- var month = getTwoDigitStr(date.getMonth() + 1);
- var day = getTwoDigitStr(date.getDate());
+ var year = date.getFullYear();
+ var month = getTwoDigitStr(date.getMonth() + 1);
+ var day = getTwoDigitStr(date.getDate());
- var hours = getTwoDigitStr(date.getHours());
- var minutes = getTwoDigitStr(date.getMinutes());
- var second = getTwoDigitStr(date.getSeconds());
+ var hours = getTwoDigitStr(date.getHours());
+ var minutes = getTwoDigitStr(date.getMinutes());
+ var second = getTwoDigitStr(date.getSeconds());
- var datestring = year + "-" + month + "-" + day + " " + hours + ":" +
- minutes + " " + second + "s";
- return datestring;
+ var datestring = year + "-" + month + "-" + day + " " + hours + ":" +
+ minutes + " " + second + "s";
+ return datestring;
}
var getHourMinSec = function(date) {
- var hours = getTwoDigitStr(date.getHours());
- var minutes = getTwoDigitStr(date.getMinutes());
- var second = getTwoDigitStr(date.getSeconds());
+ var hours = getTwoDigitStr(date.getHours());
+ var minutes = getTwoDigitStr(date.getMinutes());
+ var second = getTwoDigitStr(date.getSeconds());
- var timestring = hours + ":" + minutes + " " + second + "s";
- return timestring;
+ var timestring = hours + ":" + minutes + " " + second + "s";
+ return timestring;
}
var getTwoDigitStr = function(value) {
- if (value < 10) {
- return "0" + value;
- }
+ if (value < 10) {
+ return "0" + value;
+ }
- return value;
+ return value;
}
src/web/js/azkaban/util/flow-loader.js 246(+123 -123)
diff --git a/src/web/js/azkaban/util/flow-loader.js b/src/web/js/azkaban/util/flow-loader.js
index 7c9cf29..015f02a 100644
--- a/src/web/js/azkaban/util/flow-loader.js
+++ b/src/web/js/azkaban/util/flow-loader.js
@@ -1,12 +1,12 @@
/*
* Copyright 2014 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -17,28 +17,28 @@
var extendedViewPanels = {};
var extendedDataModels = {};
var openJobDisplayCallback = function(nodeId, flowId, evt) {
- console.log("Open up data");
+ console.log("Open up data");
- /*
- $("#flowInfoBase").before(cloneStuff);
- var requestURL = contextURL + "/manager";
-
- $.get(
+ /*
+ $("#flowInfoBase").before(cloneStuff);
+ var requestURL = contextURL + "/manager";
+
+ $.get(
requestURL,
{"project": projectName, "ajax":"fetchflownodedata", "flow":flowId, "node": nodeId},
function(data) {
- var graphModel = new azkaban.GraphModel();
- graphModel.set({id: data.id, flow: data.flowData, type: data.type, props: data.props});
-
- var flowData = data.flowData;
- if (flowData) {
- createModelFromAjaxCall(flowData, graphModel);
- }
-
- var backboneView = new azkaban.FlowExtendedViewPanel({el:cloneStuff, model: graphModel});
- extendedViewPanels[nodeInfoPanelID] = backboneView;
- extendedDataModels[nodeInfoPanelID] = graphModel;
- backboneView.showExtendedView(evt);
+ var graphModel = new azkaban.GraphModel();
+ graphModel.set({id: data.id, flow: data.flowData, type: data.type, props: data.props});
+
+ var flowData = data.flowData;
+ if (flowData) {
+ createModelFromAjaxCall(flowData, graphModel);
+ }
+
+ var backboneView = new azkaban.FlowExtendedViewPanel({el:cloneStuff, model: graphModel});
+ extendedViewPanels[nodeInfoPanelID] = backboneView;
+ extendedDataModels[nodeInfoPanelID] = graphModel;
+ backboneView.showExtendedView(evt);
},
"json"
);
@@ -46,122 +46,122 @@ var openJobDisplayCallback = function(nodeId, flowId, evt) {
}
var createNewPanel = function(node, model, evt) {
- var parentPath = node.parentPath;
-
- var nodeInfoPanelID = parentPath ? parentPath + ":" + node.id + "-info" : node.id + "-info";
- var cloneStuff = $("#flowInfoBase").clone();
- cloneStuff.data = node;
- $(cloneStuff).attr("id", nodeInfoPanelID);
- $("#flowInfoBase").before(cloneStuff);
-
- var backboneView = new azkaban.FlowExtendedViewPanel({el:cloneStuff, model: model});
- node.panel = backboneView;
- backboneView.showExtendedView(evt);
+ var parentPath = node.parentPath;
+
+ var nodeInfoPanelID = parentPath ? parentPath + ":" + node.id + "-info" : node.id + "-info";
+ var cloneStuff = $("#flowInfoBase").clone();
+ cloneStuff.data = node;
+ $(cloneStuff).attr("id", nodeInfoPanelID);
+ $("#flowInfoBase").before(cloneStuff);
+
+ var backboneView = new azkaban.FlowExtendedViewPanel({el:cloneStuff, model: model});
+ node.panel = backboneView;
+ backboneView.showExtendedView(evt);
}
var closeAllSubDisplays = function() {
- $(".flowExtendedView").hide();
+ $(".flowExtendedView").hide();
}
var nodeClickCallback = function(event, model, node) {
- console.log("Node clicked callback");
-
- var target = event.currentTarget;
- var type = node.type;
- var flowId = node.parent.flow;
- var jobId = node.id;
-
- var requestURL = contextURL + "/manager?project=" + projectName + "&flow=" + flowId + "&job=" + jobId;
- var menu = [];
-
- if (type == "flow") {
- var flowRequestURL = contextURL + "/manager?project=" + projectName + "&flow=" + node.flowId;
- if (node.expanded) {
- menu = [{title: "Collapse Flow...", callback: function() {model.trigger("collapseFlow", node);}}];
- }
- else {
- menu = [{title: "Expand Flow...", callback: function() {model.trigger("expandFlow", node);}}];
- }
-
- $.merge(menu, [
- // {title: "View Properties...", callback: function() {openJobDisplayCallback(jobId, flowId, event)}},
- {break: 1},
- {title: "Open Flow...", callback: function() {window.location.href=flowRequestURL;}},
- {title: "Open Flow in New Window...", callback: function() {window.open(flowRequestURL);}},
- {break: 1},
- {title: "Open Properties...", callback: function() {window.location.href=requestURL;}},
- {title: "Open Properties in New Window...", callback: function() {window.open(requestURL);}},
- {break: 1},
- {title: "Center Flow", callback: function() {model.trigger("centerNode", node);}}
- ]);
- }
- else {
- menu = [
- // {title: "View Properties...", callback: function() {openJobDisplayCallback(jobId, flowId, event)}},
- // {break: 1},
- {title: "Open Job...", callback: function() {window.location.href=requestURL;}},
- {title: "Open Job in New Window...", callback: function() {window.open(requestURL);}},
- {break: 1},
- {title: "Center Job", callback: function() {model.trigger("centerNode", node)}}
- ];
- }
- contextMenuView.show(event, menu);
+ console.log("Node clicked callback");
+
+ var target = event.currentTarget;
+ var type = node.type;
+ var flowId = node.parent.flow;
+ var jobId = node.id;
+
+ var requestURL = contextURL + "/manager?project=" + projectName + "&flow=" + flowId + "&job=" + jobId;
+ var menu = [];
+
+ if (type == "flow") {
+ var flowRequestURL = contextURL + "/manager?project=" + projectName + "&flow=" + node.flowId;
+ if (node.expanded) {
+ menu = [{title: "Collapse Flow...", callback: function() {model.trigger("collapseFlow", node);}}];
+ }
+ else {
+ menu = [{title: "Expand Flow...", callback: function() {model.trigger("expandFlow", node);}}];
+ }
+
+ $.merge(menu, [
+ // {title: "View Properties...", callback: function() {openJobDisplayCallback(jobId, flowId, event)}},
+ {break: 1},
+ {title: "Open Flow...", callback: function() {window.location.href=flowRequestURL;}},
+ {title: "Open Flow in New Window...", callback: function() {window.open(flowRequestURL);}},
+ {break: 1},
+ {title: "Open Properties...", callback: function() {window.location.href=requestURL;}},
+ {title: "Open Properties in New Window...", callback: function() {window.open(requestURL);}},
+ {break: 1},
+ {title: "Center Flow", callback: function() {model.trigger("centerNode", node);}}
+ ]);
+ }
+ else {
+ menu = [
+ // {title: "View Properties...", callback: function() {openJobDisplayCallback(jobId, flowId, event)}},
+ // {break: 1},
+ {title: "Open Job...", callback: function() {window.location.href=requestURL;}},
+ {title: "Open Job in New Window...", callback: function() {window.open(requestURL);}},
+ {break: 1},
+ {title: "Center Job", callback: function() {model.trigger("centerNode", node)}}
+ ];
+ }
+ contextMenuView.show(event, menu);
}
var jobClickCallback = function(event, model, node) {
- console.log("Node clicked callback");
- var target = event.currentTarget;
- var type = node.type;
- var flowId = node.parent.flow;
- var jobId = node.id;
-
- var requestURL = contextURL + "/manager?project=" + projectName + "&flow=" + flowId + "&job=" + node.id;
-
- var menu;
- if (type == "flow") {
- var flowRequestURL = contextURL + "/manager?project=" + projectName + "&flow=" + node.flowId;
- menu = [
- // {title: "View Properties...", callback: function() {openJobDisplayCallback(jobId, flowId, event)}},
- // {break: 1},
- {title: "Open Flow...", callback: function() {window.location.href=flowRequestURL;}},
- {title: "Open Flow in New Window...", callback: function() {window.open(flowRequestURL);}},
- {break: 1},
- {title: "Open Properties...", callback: function() {window.location.href=requestURL;}},
- {title: "Open Properties in New Window...", callback: function() {window.open(requestURL);}},
- {break: 1},
- {title: "Center Flow", callback: function() {model.trigger("centerNode", node)}}
- ];
- }
- else {
- menu = [
- // {title: "View Job...", callback: function() {openJobDisplayCallback(jobId, flowId, event)}},
- // {break: 1},
- {title: "Open Job...", callback: function() {window.location.href=requestURL;}},
- {title: "Open Job in New Window...", callback: function() {window.open(requestURL);}},
- {break: 1},
- {title: "Center Job", callback: function() {graphModel.trigger("centerNode", node)}}
- ];
- }
- contextMenuView.show(event, menu);
+ console.log("Node clicked callback");
+ var target = event.currentTarget;
+ var type = node.type;
+ var flowId = node.parent.flow;
+ var jobId = node.id;
+
+ var requestURL = contextURL + "/manager?project=" + projectName + "&flow=" + flowId + "&job=" + node.id;
+
+ var menu;
+ if (type == "flow") {
+ var flowRequestURL = contextURL + "/manager?project=" + projectName + "&flow=" + node.flowId;
+ menu = [
+ // {title: "View Properties...", callback: function() {openJobDisplayCallback(jobId, flowId, event)}},
+ // {break: 1},
+ {title: "Open Flow...", callback: function() {window.location.href=flowRequestURL;}},
+ {title: "Open Flow in New Window...", callback: function() {window.open(flowRequestURL);}},
+ {break: 1},
+ {title: "Open Properties...", callback: function() {window.location.href=requestURL;}},
+ {title: "Open Properties in New Window...", callback: function() {window.open(requestURL);}},
+ {break: 1},
+ {title: "Center Flow", callback: function() {model.trigger("centerNode", node)}}
+ ];
+ }
+ else {
+ menu = [
+ // {title: "View Job...", callback: function() {openJobDisplayCallback(jobId, flowId, event)}},
+ // {break: 1},
+ {title: "Open Job...", callback: function() {window.location.href=requestURL;}},
+ {title: "Open Job in New Window...", callback: function() {window.open(requestURL);}},
+ {break: 1},
+ {title: "Center Job", callback: function() {graphModel.trigger("centerNode", node)}}
+ ];
+ }
+ contextMenuView.show(event, menu);
}
var edgeClickCallback = function(event, model) {
- console.log("Edge clicked callback");
+ console.log("Edge clicked callback");
}
var graphClickCallback = function(event, model) {
- console.log("Graph clicked callback");
- var data = model.get("data");
- var flowId = data.flow;
- var requestURL = contextURL + "/manager?project=" + projectName + "&flow=" + flowId;
-
- var menu = [
- {title: "Open Flow...", callback: function() {window.location.href=requestURL;}},
- {title: "Open Flow in New Window...", callback: function() {window.open(requestURL);}},
- {break: 1},
- {title: "Center Graph", callback: function() {model.trigger("resetPanZoom");}}
- ];
-
- contextMenuView.show(event, menu);
+ console.log("Graph clicked callback");
+ var data = model.get("data");
+ var flowId = data.flow;
+ var requestURL = contextURL + "/manager?project=" + projectName + "&flow=" + flowId;
+
+ var menu = [
+ {title: "Open Flow...", callback: function() {window.location.href=requestURL;}},
+ {title: "Open Flow in New Window...", callback: function() {window.open(requestURL);}},
+ {break: 1},
+ {title: "Center Graph", callback: function() {model.trigger("resetPanZoom");}}
+ ];
+
+ contextMenuView.show(event, menu);
}
src/web/js/azkaban/util/job-status.js 34(+17 -17)
diff --git a/src/web/js/azkaban/util/job-status.js b/src/web/js/azkaban/util/job-status.js
index deef88e..fd61693 100644
--- a/src/web/js/azkaban/util/job-status.js
+++ b/src/web/js/azkaban/util/job-status.js
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -16,18 +16,18 @@
var statusList = ["FAILED", "FAILED_FINISHING", "SUCCEEDED", "RUNNING", "WAITING", "KILLED", "DISABLED", "READY", "CANCELLED", "UNKNOWN", "PAUSED", "SKIPPED", "QUEUED"];
var statusStringMap = {
- "QUEUED": "Queued",
- "SKIPPED": "Skipped",
- "PREPARING": "Preparing",
- "FAILED": "Failed",
- "SUCCEEDED": "Success",
- "FAILED_FINISHING": "Running w/Failure",
- "RUNNING": "Running",
- "WAITING": "Waiting",
- "KILLED": "Killed",
- "CANCELLED": "Cancelled",
- "DISABLED": "Disabled",
- "READY": "Ready",
- "UNKNOWN": "Unknown",
- "PAUSED": "Paused"
+ "QUEUED": "Queued",
+ "SKIPPED": "Skipped",
+ "PREPARING": "Preparing",
+ "FAILED": "Failed",
+ "SUCCEEDED": "Success",
+ "FAILED_FINISHING": "Running w/Failure",
+ "RUNNING": "Running",
+ "WAITING": "Waiting",
+ "KILLED": "Killed",
+ "CANCELLED": "Cancelled",
+ "DISABLED": "Disabled",
+ "READY": "Ready",
+ "UNKNOWN": "Unknown",
+ "PAUSED": "Paused"
};
src/web/js/azkaban/util/layout.js 672(+336 -336)
diff --git a/src/web/js/azkaban/util/layout.js b/src/web/js/azkaban/util/layout.js
index 96cdd21..acdb832 100644
--- a/src/web/js/azkaban/util/layout.js
+++ b/src/web/js/azkaban/util/layout.js
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -21,364 +21,364 @@ var maxHeight = 200;
var cornerGap = 10;
var idSort = function(a, b) {
- if ( a.id < b.id ) {
- return -1;
- }
- else if ( a.id > b.id ) {
- return 1;
- }
- else {
- return 0;
- }
+ if ( a.id < b.id ) {
+ return -1;
+ }
+ else if ( a.id > b.id ) {
+ return 1;
+ }
+ else {
+ return 0;
+ }
}
function prepareLayout(nodes, hmargin, layers, nodeMap) {
- var maxLayer = 0;
- var nodeQueue = new Array();
- // Find start layers first
- for (var i=0; i < nodes.length; ++i) {
- var node = nodes[i];
- if (node.inNodes) {
- // We sort here. Why? To keep the node drawing consistent
- node.in.sort(idSort);
- }
- else {
- // We sort here. Why? To keep it up and running.
- nodeQueue.push(node);
- }
- }
- // Sort here. To keep the node drawing consistent
- nodes.sort(idSort);
-
- // calculate level
- // breath first search the sucker
- var index = 0;
- while(index < nodeQueue.length) {
- var node = nodeQueue[index];
- if (node.inNodes) {
- var level = 0;
- for (var key in node.inNodes) {
- level = Math.max(level, node.inNodes[key].level);
- }
- node.level = level + 1;
- }
- else {
- node.level = 0;
- }
-
- if (node.outNodes) {
- for (var key in node.outNodes) {
- nodeQueue.push(node.outNodes[key]);
- }
- }
- index++;
- }
-
- // Assign to layers
- for (var i = 0; i < nodes.length; ++i) {
- var width = nodes[i].width ? nodes[i].width : nodes[i].label.length * 11.5 + 4;
- var height = nodes[i].height ? nodes[i].height : 1;
- var node = { id: nodes[i].id, node: nodes[i], level: nodes[i].level, in:[], out:[], width: width + hmargin, x:0, height:height };
- nodeMap[nodes[i].id] = node;
- maxLayer = Math.max(node.level, maxLayer);
- if(!layers[node.level]) {
- layers[node.level] = [];
- }
-
- layers[node.level].push(node);
- }
-
- layers.maxLayer = maxLayer;
+ var maxLayer = 0;
+ var nodeQueue = new Array();
+ // Find start layers first
+ for (var i=0; i < nodes.length; ++i) {
+ var node = nodes[i];
+ if (node.inNodes) {
+ // We sort here. Why? To keep the node drawing consistent
+ node.in.sort(idSort);
+ }
+ else {
+ // We sort here. Why? To keep it up and running.
+ nodeQueue.push(node);
+ }
+ }
+ // Sort here. To keep the node drawing consistent
+ nodes.sort(idSort);
+
+ // calculate level
+ // breath first search the sucker
+ var index = 0;
+ while(index < nodeQueue.length) {
+ var node = nodeQueue[index];
+ if (node.inNodes) {
+ var level = 0;
+ for (var key in node.inNodes) {
+ level = Math.max(level, node.inNodes[key].level);
+ }
+ node.level = level + 1;
+ }
+ else {
+ node.level = 0;
+ }
+
+ if (node.outNodes) {
+ for (var key in node.outNodes) {
+ nodeQueue.push(node.outNodes[key]);
+ }
+ }
+ index++;
+ }
+
+ // Assign to layers
+ for (var i = 0; i < nodes.length; ++i) {
+ var width = nodes[i].width ? nodes[i].width : nodes[i].label.length * 11.5 + 4;
+ var height = nodes[i].height ? nodes[i].height : 1;
+ var node = { id: nodes[i].id, node: nodes[i], level: nodes[i].level, in:[], out:[], width: width + hmargin, x:0, height:height };
+ nodeMap[nodes[i].id] = node;
+ maxLayer = Math.max(node.level, maxLayer);
+ if(!layers[node.level]) {
+ layers[node.level] = [];
+ }
+
+ layers[node.level].push(node);
+ }
+
+ layers.maxLayer = maxLayer;
}
function respaceGraph(nodes, edges) {
-
+
}
function layoutGraph(nodes, edges, hmargin) {
- var startLayer = [];
-
- var nodeMap = {};
- var layers = {};
-
- if (!hmargin) {
- hmargin = 8;
- }
-
- prepareLayout(nodes, hmargin, layers, nodeMap);
- var maxLayer = layers.maxLayer;
-
- // Create dummy nodes
- var edgeDummies = {};
- for (var i=0; i < edges.length; ++i ) {
- var edge = edges[i];
- var src = edges[i].from;
- var dest = edges[i].to;
-
- var edgeId = src + ">>" + dest;
-
- var srcNode = nodeMap[src];
- var destNode = nodeMap[dest];
-
- var lastNode = srcNode;
-
- var guides = [];
-
- for (var j = srcNode.level + 1; j < destNode.level; ++j) {
- var dummyNode = {level: j, in: [], x: lastNode.x, out: [], realSrc: srcNode, realDest: destNode, width: 10, height: 10};
- layers[j].push(dummyNode);
- dummyNode.in.push(lastNode);
- lastNode.out.push(dummyNode);
- lastNode = dummyNode;
-
- guides.push(dummyNode);
- }
-
- destNode.in.push(lastNode);
- lastNode.out.push(destNode);
-
- if (edgeDummies.length != 0) {
- edgeDummies[edgeId] = guides;
- }
- }
-
- spreadLayerSmart(layers[maxLayer]);
- sort(layers[maxLayer]);
- for (var i=maxLayer - 1; i >=0; --i) {
- uncrossWithOut(layers[i]);
- sort(layers[i]);
-
- spreadLayerSmart(layers[i]);
- }
-
- // The top level can get out of alignment, so we do this kick back
- // manouver before we seriously get started sorting.
- if (maxLayer > 1) {
- uncrossWithIn(layers[1]);
- sort(layers[1]);
- spreadLayerSmart(layers[1]);
-
- uncrossWithOut(layers[0]);
- sort(layers[0]);
- spreadLayerSmart(layers[0]);
- }
-
- // Uncross down
- for (var i=1; i <= maxLayer; ++i) {
- uncrossWithIn(layers[i]);
- sort(layers[i]);
- spreadLayerSmart(layers[i]);
- }
-
- // Space it vertically
- spaceVertically(layers, maxLayer);
-
- // Assign points to nodes
- for (var i = 0; i < nodes.length; ++i) {
- var node = nodes[i];
- var layerNode = nodeMap[node.id];
- node.x = layerNode.x;
- node.y = layerNode.y;
- }
-
- // Dummy node for more points.
- for (var i = 0; i < edges.length; ++i) {
- var edge = edges[i];
- var src = edges[i].from;
- var dest = edges[i].to;
-
- var edgeId = src + ">>" + dest;
- if (edgeDummies[edgeId] && edgeDummies[edgeId].length > 0) {
- var prevX = nodeMap[src].x;
- var destX = nodeMap[dest].x;
-
- var guides = [];
- var dummies = edgeDummies[edgeId];
- for (var j=0; j< dummies.length; ++j) {
- var point = {x: dummies[j].x, y: dummies[j].y};
- guides.push(point);
-
- var nextX = j == dummies.length - 1 ? destX: dummies[j + 1].x;
- if (point.x != prevX && point.x != nextX) {
- // Add gap
- if ((point.x > prevX) == (point.x > nextX)) {
- guides.push({x: point.x, y:point.y + cornerGap});
- }
- }
- prevX = point.x;
- }
-
- edge.guides = guides;
- }
- else {
- edge.guides = null;
- }
- }
+ var startLayer = [];
+
+ var nodeMap = {};
+ var layers = {};
+
+ if (!hmargin) {
+ hmargin = 8;
+ }
+
+ prepareLayout(nodes, hmargin, layers, nodeMap);
+ var maxLayer = layers.maxLayer;
+
+ // Create dummy nodes
+ var edgeDummies = {};
+ for (var i=0; i < edges.length; ++i ) {
+ var edge = edges[i];
+ var src = edges[i].from;
+ var dest = edges[i].to;
+
+ var edgeId = src + ">>" + dest;
+
+ var srcNode = nodeMap[src];
+ var destNode = nodeMap[dest];
+
+ var lastNode = srcNode;
+
+ var guides = [];
+
+ for (var j = srcNode.level + 1; j < destNode.level; ++j) {
+ var dummyNode = {level: j, in: [], x: lastNode.x, out: [], realSrc: srcNode, realDest: destNode, width: 10, height: 10};
+ layers[j].push(dummyNode);
+ dummyNode.in.push(lastNode);
+ lastNode.out.push(dummyNode);
+ lastNode = dummyNode;
+
+ guides.push(dummyNode);
+ }
+
+ destNode.in.push(lastNode);
+ lastNode.out.push(destNode);
+
+ if (edgeDummies.length != 0) {
+ edgeDummies[edgeId] = guides;
+ }
+ }
+
+ spreadLayerSmart(layers[maxLayer]);
+ sort(layers[maxLayer]);
+ for (var i=maxLayer - 1; i >=0; --i) {
+ uncrossWithOut(layers[i]);
+ sort(layers[i]);
+
+ spreadLayerSmart(layers[i]);
+ }
+
+ // The top level can get out of alignment, so we do this kick back
+ // manouver before we seriously get started sorting.
+ if (maxLayer > 1) {
+ uncrossWithIn(layers[1]);
+ sort(layers[1]);
+ spreadLayerSmart(layers[1]);
+
+ uncrossWithOut(layers[0]);
+ sort(layers[0]);
+ spreadLayerSmart(layers[0]);
+ }
+
+ // Uncross down
+ for (var i=1; i <= maxLayer; ++i) {
+ uncrossWithIn(layers[i]);
+ sort(layers[i]);
+ spreadLayerSmart(layers[i]);
+ }
+
+ // Space it vertically
+ spaceVertically(layers, maxLayer);
+
+ // Assign points to nodes
+ for (var i = 0; i < nodes.length; ++i) {
+ var node = nodes[i];
+ var layerNode = nodeMap[node.id];
+ node.x = layerNode.x;
+ node.y = layerNode.y;
+ }
+
+ // Dummy node for more points.
+ for (var i = 0; i < edges.length; ++i) {
+ var edge = edges[i];
+ var src = edges[i].from;
+ var dest = edges[i].to;
+
+ var edgeId = src + ">>" + dest;
+ if (edgeDummies[edgeId] && edgeDummies[edgeId].length > 0) {
+ var prevX = nodeMap[src].x;
+ var destX = nodeMap[dest].x;
+
+ var guides = [];
+ var dummies = edgeDummies[edgeId];
+ for (var j=0; j< dummies.length; ++j) {
+ var point = {x: dummies[j].x, y: dummies[j].y};
+ guides.push(point);
+
+ var nextX = j == dummies.length - 1 ? destX: dummies[j + 1].x;
+ if (point.x != prevX && point.x != nextX) {
+ // Add gap
+ if ((point.x > prevX) == (point.x > nextX)) {
+ guides.push({x: point.x, y:point.y + cornerGap});
+ }
+ }
+ prevX = point.x;
+ }
+
+ edge.guides = guides;
+ }
+ else {
+ edge.guides = null;
+ }
+ }
}
function spreadLayerSmart(layer) {
- var ranges = [];
- ranges.push({
- start: 0,
- end: 0,
- width: layer[0].width,
- x: layer[0].x,
- index: 0
- });
- var largestRangeIndex = -1;
-
- var totalX = layer[0].x;
- var totalWidth = layer[0].width;
- var count = 1;
-
- for (var i = 1; i < layer.length; ++i ) {
- var prevRange = ranges[ranges.length - 1];
- var delta = layer[i].x - prevRange.x;
-
- if (delta == 0) {
- prevRange.end = i;
- prevRange.width += layer[i].width;
- totalWidth += layer[i].width;
- }
- else {
- totalWidth += Math.max(layer[i].width, delta);
- ranges.push({
- start: i,
- end: i,
- width: layer[i].width,
- x: layer[i].x,
- index: ranges.length
- });
- }
-
- totalX += layer[i].x;
- count++;
- }
-
- // Space the ranges, but place the left and right most last
- var startIndex = 0;
- var endIndex = 0;
- if (ranges.length == 1) {
- startIndex = -1;
- endIndex = 1;
- }
- else if ((ranges.length % 2) == 1) {
- var index = Math.ceil(ranges.length/2);
- startIndex = index - 1;
- endIndex = index + 1;
- }
- else {
- var e = ranges.length/2;
- var s = e - 1;
-
- var crossPointS = ranges[s].x + ranges[s].width/2;
- var crossPointE = ranges[e].x - ranges[e].width/2;
-
- if (crossPointS > crossPointE) {
- var midPoint = (ranges[s].x + ranges[e].x)/2;
- ranges[s].x = midPoint - ranges[s].width/2;
- ranges[e].x = midPoint + ranges[e].width/2;
- }
-
- startIndex = s - 1;
- endIndex = e + 1;
- }
-
- for (var i = startIndex; i >= 0; --i) {
- var range = ranges[i];
- var crossPointS = range.x + range.width/2;
- var crossPointE = ranges[i + 1].x - ranges[i + 1].width/2;
- if (crossPointE < crossPointS) {
- range.x -= crossPointS - crossPointE;
- }
- }
-
- for (var i = endIndex; i < ranges.length; ++i) {
- var range = ranges[i];
- var crossPointE = range.x - range.width/2;
- var crossPointS = ranges[i - 1].x + ranges[i - 1].width/2;
- if (crossPointE < crossPointS) {
- range.x += crossPointS - crossPointE;
- }
- }
-
- for (var i = 0; i < ranges.length; ++i) {
- var range = ranges[i];
- if (range.start == range.end) {
- layer[range.start].x = range.x;
- }
- else {
- var start = range.x - range.width/2;
- for (var j=range.start;j <=range.end; ++j) {
- layer[j].x = start + layer[j].width/2;
- start += layer[j].width;
- }
- }
- }
+ var ranges = [];
+ ranges.push({
+ start: 0,
+ end: 0,
+ width: layer[0].width,
+ x: layer[0].x,
+ index: 0
+ });
+ var largestRangeIndex = -1;
+
+ var totalX = layer[0].x;
+ var totalWidth = layer[0].width;
+ var count = 1;
+
+ for (var i = 1; i < layer.length; ++i ) {
+ var prevRange = ranges[ranges.length - 1];
+ var delta = layer[i].x - prevRange.x;
+
+ if (delta == 0) {
+ prevRange.end = i;
+ prevRange.width += layer[i].width;
+ totalWidth += layer[i].width;
+ }
+ else {
+ totalWidth += Math.max(layer[i].width, delta);
+ ranges.push({
+ start: i,
+ end: i,
+ width: layer[i].width,
+ x: layer[i].x,
+ index: ranges.length
+ });
+ }
+
+ totalX += layer[i].x;
+ count++;
+ }
+
+ // Space the ranges, but place the left and right most last
+ var startIndex = 0;
+ var endIndex = 0;
+ if (ranges.length == 1) {
+ startIndex = -1;
+ endIndex = 1;
+ }
+ else if ((ranges.length % 2) == 1) {
+ var index = Math.ceil(ranges.length/2);
+ startIndex = index - 1;
+ endIndex = index + 1;
+ }
+ else {
+ var e = ranges.length/2;
+ var s = e - 1;
+
+ var crossPointS = ranges[s].x + ranges[s].width/2;
+ var crossPointE = ranges[e].x - ranges[e].width/2;
+
+ if (crossPointS > crossPointE) {
+ var midPoint = (ranges[s].x + ranges[e].x)/2;
+ ranges[s].x = midPoint - ranges[s].width/2;
+ ranges[e].x = midPoint + ranges[e].width/2;
+ }
+
+ startIndex = s - 1;
+ endIndex = e + 1;
+ }
+
+ for (var i = startIndex; i >= 0; --i) {
+ var range = ranges[i];
+ var crossPointS = range.x + range.width/2;
+ var crossPointE = ranges[i + 1].x - ranges[i + 1].width/2;
+ if (crossPointE < crossPointS) {
+ range.x -= crossPointS - crossPointE;
+ }
+ }
+
+ for (var i = endIndex; i < ranges.length; ++i) {
+ var range = ranges[i];
+ var crossPointE = range.x - range.width/2;
+ var crossPointS = ranges[i - 1].x + ranges[i - 1].width/2;
+ if (crossPointE < crossPointS) {
+ range.x += crossPointS - crossPointE;
+ }
+ }
+
+ for (var i = 0; i < ranges.length; ++i) {
+ var range = ranges[i];
+ if (range.start == range.end) {
+ layer[range.start].x = range.x;
+ }
+ else {
+ var start = range.x - range.width/2;
+ for (var j=range.start;j <=range.end; ++j) {
+ layer[j].x = start + layer[j].width/2;
+ start += layer[j].width;
+ }
+ }
+ }
}
function spaceVertically(layers, maxLayer) {
- var startY = 0;
- var startLayer = layers[0];
- var startMaxHeight = 1;
- for (var i=0; i < startLayer.length; ++i) {
- startLayer[i].y = startY;
- startMaxHeight = Math.max(startMaxHeight, startLayer[i].height);
- }
-
- var minHeight = 40;
- for (var a=1; a <= maxLayer; ++a) {
- var maxDelta = 0;
- var layer = layers[a];
-
- var layerMaxHeight = 1;
- for (var i=0; i < layer.length; ++i) {
- layerMaxHeight = Math.max(layerMaxHeight, layer[i].height);
-
- for (var j=0; j < layer[i].in.length; ++j) {
- var upper = layer[i].in[j];
- var delta = Math.abs(upper.x - layer[i].x);
- maxDelta = Math.max(maxDelta, delta);
- }
- }
-
- console.log("Max " + maxDelta);
- var calcHeight = maxDelta*degreeRatio;
-
- var newMinHeight = minHeight + startMaxHeight/2 + layerMaxHeight / 2;
- startMaxHeight = layerMaxHeight;
-
- startY += Math.max(calcHeight, newMinHeight);
- for (var i=0; i < layer.length; ++i) {
- layer[i].y=startY;
- }
- }
+ var startY = 0;
+ var startLayer = layers[0];
+ var startMaxHeight = 1;
+ for (var i=0; i < startLayer.length; ++i) {
+ startLayer[i].y = startY;
+ startMaxHeight = Math.max(startMaxHeight, startLayer[i].height);
+ }
+
+ var minHeight = 40;
+ for (var a=1; a <= maxLayer; ++a) {
+ var maxDelta = 0;
+ var layer = layers[a];
+
+ var layerMaxHeight = 1;
+ for (var i=0; i < layer.length; ++i) {
+ layerMaxHeight = Math.max(layerMaxHeight, layer[i].height);
+
+ for (var j=0; j < layer[i].in.length; ++j) {
+ var upper = layer[i].in[j];
+ var delta = Math.abs(upper.x - layer[i].x);
+ maxDelta = Math.max(maxDelta, delta);
+ }
+ }
+
+ console.log("Max " + maxDelta);
+ var calcHeight = maxDelta*degreeRatio;
+
+ var newMinHeight = minHeight + startMaxHeight/2 + layerMaxHeight / 2;
+ startMaxHeight = layerMaxHeight;
+
+ startY += Math.max(calcHeight, newMinHeight);
+ for (var i=0; i < layer.length; ++i) {
+ layer[i].y=startY;
+ }
+ }
}
function uncrossWithIn(layer) {
- for (var i = 0; i < layer.length; ++i) {
- var pos = findAverage(layer[i].in);
- layer[i].x = pos;
- }
+ for (var i = 0; i < layer.length; ++i) {
+ var pos = findAverage(layer[i].in);
+ layer[i].x = pos;
+ }
}
function findAverage(nodes) {
- var sum = 0;
- for (var i = 0; i < nodes.length; ++i) {
- sum += nodes[i].x;
- }
- return sum/nodes.length;
+ var sum = 0;
+ for (var i = 0; i < nodes.length; ++i) {
+ sum += nodes[i].x;
+ }
+ return sum/nodes.length;
}
function uncrossWithOut(layer) {
- for (var i = 0; i < layer.length; ++i) {
- var pos = findAverage(layer[i].out);
- layer[i].x = pos;
- }
+ for (var i = 0; i < layer.length; ++i) {
+ var pos = findAverage(layer[i].out);
+ layer[i].x = pos;
+ }
}
function sort(layer) {
- layer.sort(function(a, b) {
- return a.x - b.x;
- });
+ layer.sort(function(a, b) {
+ return a.x - b.x;
+ });
}
src/web/js/azkaban/util/schedule.js 66(+33 -33)
diff --git a/src/web/js/azkaban/util/schedule.js b/src/web/js/azkaban/util/schedule.js
index 77f6954..de3da39 100644
--- a/src/web/js/azkaban/util/schedule.js
+++ b/src/web/js/azkaban/util/schedule.js
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -15,37 +15,37 @@
*/
function removeSched(scheduleId) {
- var scheduleURL = contextURL + "/schedule"
- var redirectURL = contextURL + "/schedule"
- var requestData = {
- "action": "removeSched",
- "scheduleId":scheduleId
- };
- var successHandler = function(data) {
- if (data.error) {
- $('#errorMsg').text(data.error);
- }
- else {
- window.location = redirectURL;
- }
- };
- $.post(scheduleURL, requestData, successHandler, "json");
+ var scheduleURL = contextURL + "/schedule"
+ var redirectURL = contextURL + "/schedule"
+ var requestData = {
+ "action": "removeSched",
+ "scheduleId":scheduleId
+ };
+ var successHandler = function(data) {
+ if (data.error) {
+ $('#errorMsg').text(data.error);
+ }
+ else {
+ window.location = redirectURL;
+ }
+ };
+ $.post(scheduleURL, requestData, successHandler, "json");
}
function removeSla(scheduleId) {
- var scheduleURL = contextURL + "/schedule"
- var redirectURL = contextURL + "/schedule"
- var requestData = {
- "action": "removeSla",
- "scheduleId": scheduleId
- };
- var successHandler = function(data) {
- if (data.error) {
- $('#errorMsg').text(data.error)
- }
- else {
- window.location = redirectURL
- }
- };
- $.post(scheduleURL, requestData, successHandler, "json");
+ var scheduleURL = contextURL + "/schedule"
+ var redirectURL = contextURL + "/schedule"
+ var requestData = {
+ "action": "removeSla",
+ "scheduleId": scheduleId
+ };
+ var successHandler = function(data) {
+ if (data.error) {
+ $('#errorMsg').text(data.error)
+ }
+ else {
+ window.location = redirectURL
+ }
+ };
+ $.post(scheduleURL, requestData, successHandler, "json");
}
src/web/js/azkaban/util/svg-navigate.js 784(+392 -392)
diff --git a/src/web/js/azkaban/util/svg-navigate.js b/src/web/js/azkaban/util/svg-navigate.js
index 0cae23e..0d11d6e 100644
--- a/src/web/js/azkaban/util/svg-navigate.js
+++ b/src/web/js/azkaban/util/svg-navigate.js
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -15,393 +15,393 @@
*/
(function($) {
- var mouseUp = function(evt) {
- if (evt.button > 1) {
- return;
- }
- var target = evt.target;
- target.mx = evt.clientX;
- target.my = evt.clientY;
- target.mDown = false;
- }
-
- var mouseDown = function(evt) {
- if (evt.button > 1) {
- return;
- }
-
- var target = evt.target;
- target.mx = evt.clientX;
- target.my = evt.clientY;
- target.mDown = true;
- }
-
- var mouseOut = function(evt) {
- var target = evt.target;
- target.mx = evt.clientX;
- target.my = evt.clientY;
- target.mDown = false;
- }
-
- var mouseMove = function(evt) {
- var target = evt.target;
- if (target.mDown) {
- var dx = evt.clientX - target.mx;
- var dy = evt.clientY - target.my;
-
- evt.dragX = dx;
- evt.dragY = dy;
- mouseDrag(evt);
- }
-
- target.mx = evt.clientX;
- target.my = evt.clientY;
- }
-
- var mouseDrag = function(evt) {
- translateDeltaGraph(evt.target, evt.dragX, evt.dragY);
- }
-
- var mouseScrolled = function(evt) {
- if (!evt) {
- evt = window.event;
- }
- var target = evt.currentTarget;
-
- var leftOffset = 0;
- var topOffset = 0;
- if (!target.marker) {
- while (!target.farthestViewportElement) {
- target = target.parentNode;
- }
-
- target = target.farthestViewportElement;
- }
-
- // Trackball/trackpad vs wheel. Need to accommodate
- var delta = 0;
- if (evt.wheelDelta) {
- if (evt.wheelDelta > 0) {
- delta = Math.ceil(evt.wheelDelta / 120);
- }
- else {
- delta = Math.floor(evt.wheelDelta / 120);
- }
- }
- else if (evt.detail) {
- if (evt.detail > 0) {
- delta = -Math.ceil(evt.detail / 3);
- }
- else {
- delta = -Math.floor(evt.detail / 3);
- }
- }
-
- var zoomLevel = boundZoomLevel(target, target.zoomIndex + delta);
- target.zoomIndex = zoomLevel;
- var scale = target.zoomLevels[zoomLevel];
-
- var x = evt.offsetX;
- var y = evt.offsetY;
- if (!x) {
- var position = $(target.parentElement).position();
- x = evt.layerX - position.left;
- y = evt.layerY - position.top;
- }
-
- evt.stopPropagation();
- evt.preventDefault();
-
- scaleGraph(target, scale, x, y);
- }
-
- this.boundZoomLevel = function(target, level) {
- if (level >= target.settings.zoomNumLevels) {
- return target.settings.zoomNumLevels - 1;
- }
- else if (level <= 0) {
- return 0;
- }
-
- return level;
- }
-
- this.scaleGraph = function(target, scale, x, y) {
- var sfactor = scale / target.scale;
- target.scale = scale;
-
- target.translateX = sfactor * target.translateX + x - sfactor * x;
- target.translateY = sfactor * target.translateY + y - sfactor * y;
-
- if (target.model) {
- target.model.trigger("scaled");
- }
- retransform(target);
- }
-
- this.translateDeltaGraph = function(target, x, y) {
- target.translateX += x;
- target.translateY += y;
- if (target.model) {
- target.model.trigger("panned");
- }
- retransform(target);
- }
-
- this.retransform = function(target) {
- var gs = target.childNodes;
-
- var transformString = "translate(" + target.translateX + "," + target.translateY +
- ") scale(" + target.scale + ")";
-
- for (var i = 0; i < gs.length; ++i) {
- var g = gs[i];
- if (g.nodeName == 'g') {
- g.setAttribute("transform", transformString);
- }
- }
-
- if (target.model) {
- var obj = target.model.get("transform");
- if (obj) {
- obj.scale = target.scale;
- obj.height = target.parentNode.clientHeight;
- obj.width = target.parentNode.clientWidth;
-
- obj.x1 = target.translateX;
- obj.y1 = target.translateY;
- obj.x2 = obj.x1 + obj.width * obj.scale;
- obj.y2 = obj.y1 + obj.height * obj.scale;
- }
- }
- }
-
- this.resetTransform = function(target) {
- var settings = target.settings;
- target.translateX = settings.x;
- target.translateY = settings.y;
-
- if (settings.x < settings.x2) {
- var factor = 0.90;
-
- // Reset scale and stuff.
- var divHeight = target.parentNode.clientHeight;
- var divWidth = target.parentNode.clientWidth;
-
- var width = settings.x2 - settings.x;
- var height = settings.y2 - settings.y;
- var aspectRatioGraph = height / width;
- var aspectRatioDiv = divHeight / divWidth;
-
- var scale = aspectRatioGraph > aspectRatioDiv
- ? (divHeight / height) * factor
- : (divWidth / width) * factor;
- target.scale = scale;
- }
- else {
- target.zoomIndex = boundZoomLevel(target, settings.zoomIndex);
- target.scale = target.zoomLevels[target.zoomIndex];
- }
- }
-
- this.animateTransform = function(target, scale, x, y, duration) {
- var zoomLevel = calculateZoomLevel(scale, target.zoomLevels);
- target.fromScaleLevel = target.zoomIndex;
- target.toScaleLevel = zoomLevel;
- target.fromX = target.translateX;
- target.fromY = target.translateY;
- target.fromScale = target.scale;
- target.toScale = target.zoomLevels[zoomLevel];
- target.toX = x;
- target.toY = y;
- target.startTime = new Date().getTime();
- target.endTime = target.startTime + duration;
-
- this.animateTick(target);
- }
-
- this.animateTick = function(target) {
- var time = new Date().getTime();
- if (time < target.endTime) {
- var timeDiff = time - target.startTime;
- var progress = timeDiff / (target.endTime - target.startTime);
-
- target.scale = (target.toScale - target.fromScale) * progress + target.fromScale;
- target.translateX = (target.toX - target.fromX) * progress + target.fromX;
- target.translateY = (target.toY - target.fromY) * progress + target.fromY;
- retransform(target);
- setTimeout(function() {
- this.animateTick(target)
- }, 1);
- }
- else {
- target.zoomIndex = target.toScaleLevel;
- target.scale = target.zoomLevels[target.zoomIndex];
- target.translateX = target.toX;
- target.translateY = target.toY;
- retransform(target);
- }
- }
-
- this.calculateZoomScale = function(scaleLevel, numLevels, points) {
- if (scaleLevel <= 0) {
- return points[0];
- }
- else if (scaleLevel >= numLevels) {
- return points[points.length - 1];
- }
- var factor = (scaleLevel / numLevels) * (points.length - 1);
- var floorIdx = Math.floor(factor);
- var ceilingIdx = Math.ceil(factor);
-
- var b = factor - floorIdx;
-
- return b * (points[ceilingIdx] - points[floorIdx]) + points[floorIdx];
- }
-
- this.calculateZoomLevel = function(scale, zoomLevels) {
- if (scale >= zoomLevels[zoomLevels.length - 1]) {
- return zoomLevels.length - 1;
- }
- else if (scale <= zoomLevels[0]) {
- return 0;
- }
-
- var i = 0;
- // Plain old linear scan
- for (; i < zoomLevels.length; ++i) {
- if (scale < zoomLevels[i]) {
- i--;
- break;
- }
- }
-
- if (i < 0) {
- return 0;
- }
-
- return i;
- }
-
- var methods = {
- init : function(options) {
- var settings = {
- x : 0,
- y : 0,
- x2 : 0,
- y2 : 0,
- minX : -1000,
- minY : -1000,
- maxX : 1000,
- maxY : 1000,
- zoomIndex : 24,
- zoomPoints : [ 0.1, 0.14, 0.2, 0.4, 0.8, 1, 1.6, 2.4, 4, 8, 16 ],
- zoomNumLevels : 48
- };
- if (options) {
- $.extend(settings, options);
- }
- return this.each(function() {
- var $this = $(this);
- this.settings = settings;
- this.marker = true;
-
- if (window.addEventListener) {
- this.addEventListener('DOMMouseScroll', mouseScrolled,false);
- }
- this.onmousewheel = mouseScrolled;
- this.onmousedown = mouseDown;
- this.onmouseup = mouseUp;
- this.onmousemove = mouseMove;
- this.onmouseout = mouseOut;
-
- this.zoomLevels = new Array(settings.zoomNumLevels);
- for ( var i = 0; i < settings.zoomNumLevels; ++i) {
- var scale = calculateZoomScale(i, settings.zoomNumLevels, settings.zoomPoints);
- this.zoomLevels[i] = scale;
- }
- resetTransform(this);
- });
- },
- transformToBox : function(arguments) {
- var $this = $(this);
- var target = ($this)[0];
- var x = arguments.x;
- var y = arguments.y;
- var factor = 0.9;
- var duration = arguments.duration;
-
- var width = arguments.width ? arguments.width : 1;
- var height = arguments.height ? arguments.height : 1;
-
- var divHeight = target.parentNode.clientHeight;
- var divWidth = target.parentNode.clientWidth;
-
- var aspectRatioGraph = height / width;
- var aspectRatioDiv = divHeight / divWidth;
-
- var scale = aspectRatioGraph > aspectRatioDiv
- ? (divHeight / height) * factor
- : (divWidth / width) * factor;
-
- if (arguments.maxScale) {
- if (scale > arguments.maxScale) {
- scale = arguments.maxScale;
- }
- }
- if (arguments.minScale) {
- if (scale < arguments.minScale) {
- scale = arguments.minScale;
- }
- }
-
- // Center
- var scaledWidth = width * scale;
- var scaledHeight = height * scale;
-
- var sx = (divWidth - scaledWidth) / 2 - scale * x;
- var sy = (divHeight - scaledHeight) / 2 - scale * y;
- console.log("sx,sy:" + sx + "," + sy);
-
- if (duration != 0 && !duration) {
- duration = 500;
- }
-
- animateTransform(target, scale, sx, sy, duration);
- },
- attachNavigateModel : function(arguments) {
- var $this = $(this);
- var target = ($this)[0];
- target.model = arguments;
-
- if (target.model) {
- var obj = {};
- obj.scale = target.scale;
- obj.height = target.parentNode.clientHeight;
- obj.width = target.parentNode.clientWidth;
-
- obj.x1 = target.translateX;
- obj.y1 = target.translateY;
- obj.x2 = obj.x1 + obj.height * obj.scale;
- obj.y2 = obj.y1 + obj.width * obj.scale;
-
- target.model.set({
- transform : obj
- });
- }
- }
- };
-
- // Main Constructor
- $.fn.svgNavigate = function(method) {
- if (methods[method]) {
- return methods[method].apply(this, Array.prototype.slice.call(arguments, 1));
- }
- else if (typeof method === 'object' || !method) {
- return methods.init.apply(this, arguments);
- }
- else {
- $.error('Method ' + method + ' does not exist on svgNavigate');
- }
- };
+ var mouseUp = function(evt) {
+ if (evt.button > 1) {
+ return;
+ }
+ var target = evt.target;
+ target.mx = evt.clientX;
+ target.my = evt.clientY;
+ target.mDown = false;
+ }
+
+ var mouseDown = function(evt) {
+ if (evt.button > 1) {
+ return;
+ }
+
+ var target = evt.target;
+ target.mx = evt.clientX;
+ target.my = evt.clientY;
+ target.mDown = true;
+ }
+
+ var mouseOut = function(evt) {
+ var target = evt.target;
+ target.mx = evt.clientX;
+ target.my = evt.clientY;
+ target.mDown = false;
+ }
+
+ var mouseMove = function(evt) {
+ var target = evt.target;
+ if (target.mDown) {
+ var dx = evt.clientX - target.mx;
+ var dy = evt.clientY - target.my;
+
+ evt.dragX = dx;
+ evt.dragY = dy;
+ mouseDrag(evt);
+ }
+
+ target.mx = evt.clientX;
+ target.my = evt.clientY;
+ }
+
+ var mouseDrag = function(evt) {
+ translateDeltaGraph(evt.target, evt.dragX, evt.dragY);
+ }
+
+ var mouseScrolled = function(evt) {
+ if (!evt) {
+ evt = window.event;
+ }
+ var target = evt.currentTarget;
+
+ var leftOffset = 0;
+ var topOffset = 0;
+ if (!target.marker) {
+ while (!target.farthestViewportElement) {
+ target = target.parentNode;
+ }
+
+ target = target.farthestViewportElement;
+ }
+
+ // Trackball/trackpad vs wheel. Need to accommodate
+ var delta = 0;
+ if (evt.wheelDelta) {
+ if (evt.wheelDelta > 0) {
+ delta = Math.ceil(evt.wheelDelta / 120);
+ }
+ else {
+ delta = Math.floor(evt.wheelDelta / 120);
+ }
+ }
+ else if (evt.detail) {
+ if (evt.detail > 0) {
+ delta = -Math.ceil(evt.detail / 3);
+ }
+ else {
+ delta = -Math.floor(evt.detail / 3);
+ }
+ }
+
+ var zoomLevel = boundZoomLevel(target, target.zoomIndex + delta);
+ target.zoomIndex = zoomLevel;
+ var scale = target.zoomLevels[zoomLevel];
+
+ var x = evt.offsetX;
+ var y = evt.offsetY;
+ if (!x) {
+ var position = $(target.parentElement).position();
+ x = evt.layerX - position.left;
+ y = evt.layerY - position.top;
+ }
+
+ evt.stopPropagation();
+ evt.preventDefault();
+
+ scaleGraph(target, scale, x, y);
+ }
+
+ this.boundZoomLevel = function(target, level) {
+ if (level >= target.settings.zoomNumLevels) {
+ return target.settings.zoomNumLevels - 1;
+ }
+ else if (level <= 0) {
+ return 0;
+ }
+
+ return level;
+ }
+
+ this.scaleGraph = function(target, scale, x, y) {
+ var sfactor = scale / target.scale;
+ target.scale = scale;
+
+ target.translateX = sfactor * target.translateX + x - sfactor * x;
+ target.translateY = sfactor * target.translateY + y - sfactor * y;
+
+ if (target.model) {
+ target.model.trigger("scaled");
+ }
+ retransform(target);
+ }
+
+ this.translateDeltaGraph = function(target, x, y) {
+ target.translateX += x;
+ target.translateY += y;
+ if (target.model) {
+ target.model.trigger("panned");
+ }
+ retransform(target);
+ }
+
+ this.retransform = function(target) {
+ var gs = target.childNodes;
+
+ var transformString = "translate(" + target.translateX + "," + target.translateY +
+ ") scale(" + target.scale + ")";
+
+ for (var i = 0; i < gs.length; ++i) {
+ var g = gs[i];
+ if (g.nodeName == 'g') {
+ g.setAttribute("transform", transformString);
+ }
+ }
+
+ if (target.model) {
+ var obj = target.model.get("transform");
+ if (obj) {
+ obj.scale = target.scale;
+ obj.height = target.parentNode.clientHeight;
+ obj.width = target.parentNode.clientWidth;
+
+ obj.x1 = target.translateX;
+ obj.y1 = target.translateY;
+ obj.x2 = obj.x1 + obj.width * obj.scale;
+ obj.y2 = obj.y1 + obj.height * obj.scale;
+ }
+ }
+ }
+
+ this.resetTransform = function(target) {
+ var settings = target.settings;
+ target.translateX = settings.x;
+ target.translateY = settings.y;
+
+ if (settings.x < settings.x2) {
+ var factor = 0.90;
+
+ // Reset scale and stuff.
+ var divHeight = target.parentNode.clientHeight;
+ var divWidth = target.parentNode.clientWidth;
+
+ var width = settings.x2 - settings.x;
+ var height = settings.y2 - settings.y;
+ var aspectRatioGraph = height / width;
+ var aspectRatioDiv = divHeight / divWidth;
+
+ var scale = aspectRatioGraph > aspectRatioDiv
+ ? (divHeight / height) * factor
+ : (divWidth / width) * factor;
+ target.scale = scale;
+ }
+ else {
+ target.zoomIndex = boundZoomLevel(target, settings.zoomIndex);
+ target.scale = target.zoomLevels[target.zoomIndex];
+ }
+ }
+
+ this.animateTransform = function(target, scale, x, y, duration) {
+ var zoomLevel = calculateZoomLevel(scale, target.zoomLevels);
+ target.fromScaleLevel = target.zoomIndex;
+ target.toScaleLevel = zoomLevel;
+ target.fromX = target.translateX;
+ target.fromY = target.translateY;
+ target.fromScale = target.scale;
+ target.toScale = target.zoomLevels[zoomLevel];
+ target.toX = x;
+ target.toY = y;
+ target.startTime = new Date().getTime();
+ target.endTime = target.startTime + duration;
+
+ this.animateTick(target);
+ }
+
+ this.animateTick = function(target) {
+ var time = new Date().getTime();
+ if (time < target.endTime) {
+ var timeDiff = time - target.startTime;
+ var progress = timeDiff / (target.endTime - target.startTime);
+
+ target.scale = (target.toScale - target.fromScale) * progress + target.fromScale;
+ target.translateX = (target.toX - target.fromX) * progress + target.fromX;
+ target.translateY = (target.toY - target.fromY) * progress + target.fromY;
+ retransform(target);
+ setTimeout(function() {
+ this.animateTick(target)
+ }, 1);
+ }
+ else {
+ target.zoomIndex = target.toScaleLevel;
+ target.scale = target.zoomLevels[target.zoomIndex];
+ target.translateX = target.toX;
+ target.translateY = target.toY;
+ retransform(target);
+ }
+ }
+
+ this.calculateZoomScale = function(scaleLevel, numLevels, points) {
+ if (scaleLevel <= 0) {
+ return points[0];
+ }
+ else if (scaleLevel >= numLevels) {
+ return points[points.length - 1];
+ }
+ var factor = (scaleLevel / numLevels) * (points.length - 1);
+ var floorIdx = Math.floor(factor);
+ var ceilingIdx = Math.ceil(factor);
+
+ var b = factor - floorIdx;
+
+ return b * (points[ceilingIdx] - points[floorIdx]) + points[floorIdx];
+ }
+
+ this.calculateZoomLevel = function(scale, zoomLevels) {
+ if (scale >= zoomLevels[zoomLevels.length - 1]) {
+ return zoomLevels.length - 1;
+ }
+ else if (scale <= zoomLevels[0]) {
+ return 0;
+ }
+
+ var i = 0;
+ // Plain old linear scan
+ for (; i < zoomLevels.length; ++i) {
+ if (scale < zoomLevels[i]) {
+ i--;
+ break;
+ }
+ }
+
+ if (i < 0) {
+ return 0;
+ }
+
+ return i;
+ }
+
+ var methods = {
+ init : function(options) {
+ var settings = {
+ x : 0,
+ y : 0,
+ x2 : 0,
+ y2 : 0,
+ minX : -1000,
+ minY : -1000,
+ maxX : 1000,
+ maxY : 1000,
+ zoomIndex : 24,
+ zoomPoints : [ 0.1, 0.14, 0.2, 0.4, 0.8, 1, 1.6, 2.4, 4, 8, 16 ],
+ zoomNumLevels : 48
+ };
+ if (options) {
+ $.extend(settings, options);
+ }
+ return this.each(function() {
+ var $this = $(this);
+ this.settings = settings;
+ this.marker = true;
+
+ if (window.addEventListener) {
+ this.addEventListener('DOMMouseScroll', mouseScrolled,false);
+ }
+ this.onmousewheel = mouseScrolled;
+ this.onmousedown = mouseDown;
+ this.onmouseup = mouseUp;
+ this.onmousemove = mouseMove;
+ this.onmouseout = mouseOut;
+
+ this.zoomLevels = new Array(settings.zoomNumLevels);
+ for ( var i = 0; i < settings.zoomNumLevels; ++i) {
+ var scale = calculateZoomScale(i, settings.zoomNumLevels, settings.zoomPoints);
+ this.zoomLevels[i] = scale;
+ }
+ resetTransform(this);
+ });
+ },
+ transformToBox : function(arguments) {
+ var $this = $(this);
+ var target = ($this)[0];
+ var x = arguments.x;
+ var y = arguments.y;
+ var factor = 0.9;
+ var duration = arguments.duration;
+
+ var width = arguments.width ? arguments.width : 1;
+ var height = arguments.height ? arguments.height : 1;
+
+ var divHeight = target.parentNode.clientHeight;
+ var divWidth = target.parentNode.clientWidth;
+
+ var aspectRatioGraph = height / width;
+ var aspectRatioDiv = divHeight / divWidth;
+
+ var scale = aspectRatioGraph > aspectRatioDiv
+ ? (divHeight / height) * factor
+ : (divWidth / width) * factor;
+
+ if (arguments.maxScale) {
+ if (scale > arguments.maxScale) {
+ scale = arguments.maxScale;
+ }
+ }
+ if (arguments.minScale) {
+ if (scale < arguments.minScale) {
+ scale = arguments.minScale;
+ }
+ }
+
+ // Center
+ var scaledWidth = width * scale;
+ var scaledHeight = height * scale;
+
+ var sx = (divWidth - scaledWidth) / 2 - scale * x;
+ var sy = (divHeight - scaledHeight) / 2 - scale * y;
+ console.log("sx,sy:" + sx + "," + sy);
+
+ if (duration != 0 && !duration) {
+ duration = 500;
+ }
+
+ animateTransform(target, scale, sx, sy, duration);
+ },
+ attachNavigateModel : function(arguments) {
+ var $this = $(this);
+ var target = ($this)[0];
+ target.model = arguments;
+
+ if (target.model) {
+ var obj = {};
+ obj.scale = target.scale;
+ obj.height = target.parentNode.clientHeight;
+ obj.width = target.parentNode.clientWidth;
+
+ obj.x1 = target.translateX;
+ obj.y1 = target.translateY;
+ obj.x2 = obj.x1 + obj.height * obj.scale;
+ obj.y2 = obj.y1 + obj.width * obj.scale;
+
+ target.model.set({
+ transform : obj
+ });
+ }
+ }
+ };
+
+ // Main Constructor
+ $.fn.svgNavigate = function(method) {
+ if (methods[method]) {
+ return methods[method].apply(this, Array.prototype.slice.call(arguments, 1));
+ }
+ else if (typeof method === 'object' || !method) {
+ return methods.init.apply(this, arguments);
+ }
+ else {
+ $.error('Method ' + method + ' does not exist on svgNavigate');
+ }
+ };
})(jQuery);
src/web/js/azkaban/util/svgutils.js 111(+54 -57)
diff --git a/src/web/js/azkaban/util/svgutils.js b/src/web/js/azkaban/util/svgutils.js
index 33dea78..3802f86 100644
--- a/src/web/js/azkaban/util/svgutils.js
+++ b/src/web/js/azkaban/util/svgutils.js
@@ -1,70 +1,67 @@
-function hasClass(el, name)
-{
- var classes = el.getAttribute("class");
- if (classes == null) {
- return false;
- }
- return new RegExp('(\\s|^)'+name+'(\\s|$)').test(classes);
+function hasClass(el, name) {
+ var classes = el.getAttribute("class");
+ if (classes == null) {
+ return false;
+ }
+ return new RegExp('(\\s|^)'+name+'(\\s|$)').test(classes);
}
-function addClass(el, name)
-{
- if (!hasClass(el, name)) {
- var classes = el.getAttribute("class");
- if (classes) {
- classes += ' ' + name;
- }
- else {
- classes = name;
- }
- el.setAttribute("class", classes);
- }
+function addClass(el, name) {
+ if (!hasClass(el, name)) {
+ var classes = el.getAttribute("class");
+ if (classes) {
+ classes += ' ' + name;
+ }
+ else {
+ classes = name;
+ }
+ el.setAttribute("class", classes);
+ }
}
-function removeClass(el, name)
-{
- if (hasClass(el, name)) {
- var classes = el.getAttribute("class");
- el.setAttribute("class", classes.replace(new RegExp('(\\s|^)'+name+'(\\s|$)'),' ').replace(/^\s+|\s+$/g, ''));
- }
+function removeClass(el, name) {
+ if (hasClass(el, name)) {
+ var classes = el.getAttribute("class");
+ el.setAttribute("class", classes.replace(new RegExp('(\\s|^)'+name+'(\\s|$)'),' ').replace(/^\s+|\s+$/g, ''));
+ }
}
function translateStr(x, y) {
- return "translate(" + x + "," + y + ")";
+ return "translate(" + x + "," + y + ")";
}
function animatePolylineEdge(svg, edge, newPoints, time) {
- var oldEdgeGuides = edge.oldpoints;
-
- var interval = 10;
- var numsteps = time/interval;
-
- var deltaEdges = new Array();
- for (var i=0; i < oldEdgeGuides.length; ++i) {
- var startPoint = oldEdgeGuides[i];
- var endPoint = newPoints[i];
-
- var deltaX = (endPoint[0] - startPoint[0])/numsteps;
- var deltaY = (endPoint[1] - startPoint[1])/numsteps;
- deltaEdges.push([deltaX, deltaY]);
- }
-
- animatePolyLineLoop(svg, edge, oldEdgeGuides, deltaEdges, numsteps, 25);
+ var oldEdgeGuides = edge.oldpoints;
+
+ var interval = 10;
+ var numsteps = time/interval;
+
+ var deltaEdges = new Array();
+ for (var i=0; i < oldEdgeGuides.length; ++i) {
+ var startPoint = oldEdgeGuides[i];
+ var endPoint = newPoints[i];
+
+ var deltaX = (endPoint[0] - startPoint[0])/numsteps;
+ var deltaY = (endPoint[1] - startPoint[1])/numsteps;
+ deltaEdges.push([deltaX, deltaY]);
+ }
+
+ animatePolyLineLoop(svg, edge, oldEdgeGuides, deltaEdges, numsteps, 25);
}
function animatePolyLineLoop(svg, edge, lastPoints, deltaEdges, step, time) {
- for (var i=0; i < deltaEdges.length; ++i) {
- lastPoints[i][0] += deltaEdges[i][0];
- lastPoints[i][1] += deltaEdges[i][1];
- }
-
- svg.change(edge.line, {points: lastPoints});
- if (step > 0) {
- setTimeout(
- function(){
- animatePolyLineLoop(svg, edge, lastPoints, deltaEdges, step - 1);
- },
- time
- );
- }
-}
\ No newline at end of file
+ for (var i=0; i < deltaEdges.length; ++i) {
+ lastPoints[i][0] += deltaEdges[i][0];
+ lastPoints[i][1] += deltaEdges[i][1];
+ }
+
+ svg.change(edge.line, {points: lastPoints});
+ if (step > 0) {
+ setTimeout(
+ function(){
+ animatePolyLineLoop(svg, edge, lastPoints, deltaEdges, step - 1);
+ },
+ time
+ );
+ }
+}
src/web/js/azkaban/view/admin-setup.js 220(+113 -107)
diff --git a/src/web/js/azkaban/view/admin-setup.js b/src/web/js/azkaban/view/admin-setup.js
index 632f5d0..c3730da 100644
--- a/src/web/js/azkaban/view/admin-setup.js
+++ b/src/web/js/azkaban/view/admin-setup.js
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -18,116 +18,122 @@ $.namespace('azkaban');
var dbUploadPanel;
azkaban.DBUploadPanel= Backbone.View.extend({
- events : {
- "click #upload-jar-btn" : "handleUpload"
- },
- initialize : function(settings) {
- },
- render: function() {
- },
- handleUpload: function(){
- var filename = $("#file").val();
- if (filename.length > 4) {
- var lastIndexOf = filename.lastIndexOf('.');
- var lastIndexOfForwardSlash = filename.lastIndexOf('\\');
- var lastIndexOfBackwardSlash = filename.lastIndexOf('/');
-
- var startIndex = Math.max(lastIndexOfForwardSlash, lastIndexOfBackwardSlash);
- startIndex += 1;
-
- var subfilename = filename.substring(startIndex, filename.length);
- var end = filename.substring(lastIndexOf, filename.length);
- if (end != ".jar") {
- alert("File "+ subfilename + " doesn't appear to be a jar. Looking for mysql-connector*.jar");
- return;
- }
- else if (subfilename.substr(0, "mysql-connector".length) != "mysql-connector") {
- alert("File "+ subfilename + " doesn't appear to be a mysql connector jar. Looking for mysql-connector*.jar");
- return;
- }
-
- console.log("Looks valid, uploading.");
- var uploadForm = document.getElementById("upload-form");
- var formData = new FormData(uploadForm);
- var contextUrl = contextURL;
-
- var xhr = new XMLHttpRequest();
- xhr.onreadystatechange=function() {
- if (xhr.readyState==4) {
- var data = JSON.parse(xhr.responseText);
- if (data.error) {
- alert(data.error);
- }
- else {
- $("#installed").html("Uploaded <span class=bold>" + data.jarname + "</span>");
- }
- }
- }
- xhr.open("POST", "uploadServlet");
- xhr.send(formData);
+ events : {
+ "click #upload-jar-btn" : "handleUpload"
+ },
+ initialize : function(settings) {
+ },
+ render: function() {
+ },
+ handleUpload: function(){
+ var filename = $("#file").val();
+ if (filename.length > 4) {
+ var lastIndexOf = filename.lastIndexOf('.');
+ var lastIndexOfForwardSlash = filename.lastIndexOf('\\');
+ var lastIndexOfBackwardSlash = filename.lastIndexOf('/');
- console.log("Finished.");
-
- }
- else {
- alert("File doesn't appear to be valid.");
- }
- }
+ var startIndex = Math.max(lastIndexOfForwardSlash, lastIndexOfBackwardSlash);
+ startIndex += 1;
+
+ var subfilename = filename.substring(startIndex, filename.length);
+ var end = filename.substring(lastIndexOf, filename.length);
+ if (end != ".jar") {
+ alert("File "+ subfilename + " doesn't appear to be a jar. Looking for mysql-connector*.jar");
+ return;
+ }
+ else if (subfilename.substr(0, "mysql-connector".length) != "mysql-connector") {
+ alert("File "+ subfilename + " doesn't appear to be a mysql connector jar. Looking for mysql-connector*.jar");
+ return;
+ }
+
+ console.log("Looks valid, uploading.");
+ var uploadForm = document.getElementById("upload-form");
+ var formData = new FormData(uploadForm);
+ var contextUrl = contextURL;
+
+ var xhr = new XMLHttpRequest();
+ xhr.onreadystatechange=function() {
+ if (xhr.readyState==4) {
+ var data = JSON.parse(xhr.responseText);
+ if (data.error) {
+ alert(data.error);
+ }
+ else {
+ $("#installed").html("Uploaded <span class=bold>" + data.jarname + "</span>");
+ }
+ }
+ }
+ xhr.open("POST", "uploadServlet");
+ xhr.send(formData);
+
+ console.log("Finished.");
+ }
+ else {
+ alert("File doesn't appear to be valid.");
+ }
+ }
});
var dbConnectionsPanel;
azkaban.DBConnectionPanel= Backbone.View.extend({
- events : {
- "click #save-connection-button" : "handleSaveConnection"
- },
- initialize : function(settings) {
- if (verified) {
- $("#save-results").text(message);
- $("#save-results").css("color", "#00CC00");
- } else {
- $("#save-results").hide();
- }
- },
- render: function() {
- },
- handleSaveConnection: function(){
- var host = $("#host").val();
- var port = $("#port").val();
- var database = $("#database").val();
- var username = $("#username").val();
- var password = $("#password").val();
-
- var contextUrl = contextURL;
- $.post(
- contextUrl,
- {ajax:"saveDbConnection", host: host, port:port, database: database, username: username, password: password},
- function(data) {
- if (data.error) {
- verified = false;
- $("#save-results").text(data.error);
- $("#save-results").css("color", "#FF0000");
- }
- else if (data.success) {
- verified = true;
- $("#save-results").text(data.success);
- $("#save-results").css("color", "#00CC00");
- }
- $("#save-results").show();
- }
- );
- }
+ events : {
+ "click #save-connection-button" : "handleSaveConnection"
+ },
+ initialize : function(settings) {
+ if (verified) {
+ $("#save-results").text(message);
+ $("#save-results").css("color", "#00CC00");
+ } else {
+ $("#save-results").hide();
+ }
+ },
+ render: function() {
+ },
+ handleSaveConnection: function(){
+ var host = $("#host").val();
+ var port = $("#port").val();
+ var database = $("#database").val();
+ var username = $("#username").val();
+ var password = $("#password").val();
+
+ var contextUrl = contextURL;
+ $.post(
+ contextUrl,
+ {
+ ajax: "saveDbConnection",
+ host: host,
+ port: port,
+ database: database,
+ username: username,
+ password: password
+ },
+ function(data) {
+ if (data.error) {
+ verified = false;
+ $("#save-results").text(data.error);
+ $("#save-results").css("color", "#FF0000");
+ }
+ else if (data.success) {
+ verified = true;
+ $("#save-results").text(data.success);
+ $("#save-results").css("color", "#00CC00");
+ }
+ $("#save-results").show();
+ }
+ );
+ }
});
$(function() {
- dbUploadPanel = new azkaban.DBUploadPanel({el:$( '#dbuploadpanel')});
- dbConnectionPanel = new azkaban.DBConnectionPanel({el:$( '#dbsettingspanel')});
-
- $("#saveAndContinue").click(function(data) {
- if (!verified) {
- alert("The database connection hasn't been verified.");
- }
- else {
- window.location="/?usersetup";
- }
- });
+ dbUploadPanel = new azkaban.DBUploadPanel({el:$( '#dbuploadpanel')});
+ dbConnectionPanel = new azkaban.DBConnectionPanel({el:$( '#dbsettingspanel')});
+
+ $("#saveAndContinue").click(function(data) {
+ if (!verified) {
+ alert("The database connection hasn't been verified.");
+ }
+ else {
+ window.location="/?usersetup";
+ }
+ });
});
src/web/js/azkaban/view/context-menu.js 102(+51 -51)
diff --git a/src/web/js/azkaban/view/context-menu.js b/src/web/js/azkaban/view/context-menu.js
index 0e92dd2..486817e 100644
--- a/src/web/js/azkaban/view/context-menu.js
+++ b/src/web/js/azkaban/view/context-menu.js
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -17,72 +17,72 @@
$.namespace('azkaban');
azkaban.ContextMenuView = Backbone.View.extend({
- events: {
- },
-
+ events: {
+ },
+
initialize: function(settings) {
- var div = this.el;
- $('body').click(function(e) {
- $(".contextMenu").remove();
- });
- $('body').bind("contextmenu", function(e) {$(".contextMenu").remove()});
- },
-
+ var div = this.el;
+ $('body').click(function(e) {
+ $(".contextMenu").remove();
+ });
+ $('body').bind("contextmenu", function(e) {$(".contextMenu").remove()});
+ },
+
show: function(evt, menu) {
- console.log("Show context menu");
- $(".contextMenu").remove();
- var x = evt.pageX;
- var y = evt.pageY;
+ console.log("Show context menu");
+ $(".contextMenu").remove();
+ var x = evt.pageX;
+ var y = evt.pageY;
+
+ var contextMenu = this.setupMenu(menu);
+ $(contextMenu).css({top: y, left: x});
+ $(this.el).after(contextMenu);
+ },
- var contextMenu = this.setupMenu(menu);
- $(contextMenu).css({top: y, left: x});
- $(this.el).after(contextMenu);
- },
-
hide: function(evt) {
- console.log("Hide context menu");
- $(".contextMenu").remove();
- },
-
+ console.log("Hide context menu");
+ $(".contextMenu").remove();
+ },
+
handleClick: function(evt) {
- console.log("handling click");
- },
-
+ console.log("handling click");
+ },
+
setupMenu: function(menu) {
- var contextMenu = document.createElement("div");
- $(contextMenu).addClass("contextMenu");
- var ul = document.createElement("ul");
- $(contextMenu).append(ul);
+ var contextMenu = document.createElement("div");
+ $(contextMenu).addClass("contextMenu");
+ var ul = document.createElement("ul");
+ $(contextMenu).append(ul);
- for (var i = 0; i < menu.length; ++i) {
- var menuItem = document.createElement("li");
- if (menu[i].break) {
- $(menuItem).addClass("break");
- $(ul).append(menuItem);
+ for (var i = 0; i < menu.length; ++i) {
+ var menuItem = document.createElement("li");
+ if (menu[i].break) {
+ $(menuItem).addClass("break");
+ $(ul).append(menuItem);
continue;
- }
+ }
var title = menu[i].title;
var callback = menu[i].callback;
$(menuItem).addClass("menuitem");
$(menuItem).text(title);
menuItem.callback = callback;
- $(menuItem).click(function() {
- $(contextMenu).hide();
+ $(menuItem).click(function() {
+ $(contextMenu).hide();
this.callback.call();
});
-
+
if (menu[i].submenu) {
var expandSymbol = document.createElement("div");
$(expandSymbol).addClass("expandSymbol");
$(menuItem).append(expandSymbol);
-
+
var subMenu = this.setupMenu(menu[i].submenu);
$(subMenu).addClass("subMenu");
subMenu.parent = contextMenu;
menuItem.subMenu = subMenu;
$(subMenu).hide();
$(this.el).after(subMenu);
-
+
$(menuItem).mouseenter(function() {
$(".subMenu").hide();
var menuItem = this;
@@ -94,7 +94,7 @@ azkaban.ContextMenuView = Backbone.View.extend({
var top = offset.top;
var width = $(menuItem).width();
var subMenu = menuItem.subMenu;
-
+
var newLeft = left + width - 5;
$(subMenu).css({left: newLeft, top: top});
$(subMenu).show();
@@ -103,15 +103,15 @@ azkaban.ContextMenuView = Backbone.View.extend({
});
$(menuItem).mouseleave(function() {this.selected = false;});
}
- $(ul).append(menuItem);
- }
+ $(ul).append(menuItem);
+ }
- return contextMenu;
- }
+ return contextMenu;
+ }
});
var contextMenuView;
$(function() {
- contextMenuView = new azkaban.ContextMenuView({el:$('#contextMenu')});
- contextMenuView.hide();
+ contextMenuView = new azkaban.ContextMenuView({el:$('#contextMenu')});
+ contextMenuView.hide();
});
diff --git a/src/web/js/azkaban/view/executions.js b/src/web/js/azkaban/view/executions.js
index 58b75c4..2724d7c 100644
--- a/src/web/js/azkaban/view/executions.js
+++ b/src/web/js/azkaban/view/executions.js
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
src/web/js/azkaban/view/flow.js 830(+415 -415)
diff --git a/src/web/js/azkaban/view/flow.js b/src/web/js/azkaban/view/flow.js
index 6ed8d89..8feca89 100644
--- a/src/web/js/azkaban/view/flow.js
+++ b/src/web/js/azkaban/view/flow.js
@@ -17,69 +17,69 @@
$.namespace('azkaban');
var handleJobMenuClick = function(action, el, pos) {
- var jobid = el[0].jobid;
- var requestURL = contextURL + "/manager?project=" + projectName + "&flow=" +
- flowId + "&job=" + jobid;
- if (action == "open") {
- window.location.href = requestURL;
- }
- else if (action == "openwindow") {
- window.open(requestURL);
- }
+ var jobid = el[0].jobid;
+ var requestURL = contextURL + "/manager?project=" + projectName + "&flow=" +
+ flowId + "&job=" + jobid;
+ if (action == "open") {
+ window.location.href = requestURL;
+ }
+ else if (action == "openwindow") {
+ window.open(requestURL);
+ }
}
var flowTabView;
azkaban.FlowTabView = Backbone.View.extend({
- events: {
- "click #graphViewLink": "handleGraphLinkClick",
- "click #executionsViewLink": "handleExecutionLinkClick",
- "click #summaryViewLink": "handleSummaryLinkClick"
- },
-
- initialize: function(settings) {
- var selectedView = settings.selectedView;
- if (selectedView == "executions") {
- this.handleExecutionLinkClick();
- }
- else {
- this.handleGraphLinkClick();
- }
- },
-
- render: function() {
- console.log("render graph");
- },
-
- handleGraphLinkClick: function(){
- $("#executionsViewLink").removeClass("active");
- $("#graphViewLink").addClass("active");
- $('#summaryViewLink').removeClass('active');
-
- $("#executionsView").hide();
- $("#graphView").show();
- $('#summaryView').hide();
- },
-
- handleExecutionLinkClick: function() {
- $("#graphViewLink").removeClass("active");
- $("#executionsViewLink").addClass("active");
- $('#summaryViewLink').removeClass('active');
-
- $("#graphView").hide();
- $("#executionsView").show();
- $('#summaryView').hide();
- executionModel.trigger("change:view");
- },
+ events: {
+ "click #graphViewLink": "handleGraphLinkClick",
+ "click #executionsViewLink": "handleExecutionLinkClick",
+ "click #summaryViewLink": "handleSummaryLinkClick"
+ },
+
+ initialize: function(settings) {
+ var selectedView = settings.selectedView;
+ if (selectedView == "executions") {
+ this.handleExecutionLinkClick();
+ }
+ else {
+ this.handleGraphLinkClick();
+ }
+ },
+
+ render: function() {
+ console.log("render graph");
+ },
+
+ handleGraphLinkClick: function(){
+ $("#executionsViewLink").removeClass("active");
+ $("#graphViewLink").addClass("active");
+ $('#summaryViewLink').removeClass('active');
+
+ $("#executionsView").hide();
+ $("#graphView").show();
+ $('#summaryView').hide();
+ },
+
+ handleExecutionLinkClick: function() {
+ $("#graphViewLink").removeClass("active");
+ $("#executionsViewLink").addClass("active");
+ $('#summaryViewLink').removeClass('active');
+
+ $("#graphView").hide();
+ $("#executionsView").show();
+ $('#summaryView').hide();
+ executionModel.trigger("change:view");
+ },
handleSummaryLinkClick: function() {
- $('#graphViewLink').removeClass('active');
- $('#executionsViewLink').removeClass('active');
- $('#summaryViewLink').addClass('active');
-
- $('#graphView').hide();
- $('#executionsView').hide();
- $('#summaryView').show();
- },
+ $('#graphViewLink').removeClass('active');
+ $('#executionsViewLink').removeClass('active');
+ $('#summaryViewLink').addClass('active');
+
+ $('#graphView').hide();
+ $('#executionsView').hide();
+ $('#summaryView').show();
+ },
});
var jobListView;
@@ -87,254 +87,254 @@ var svgGraphView;
var executionsView;
azkaban.ExecutionsView = Backbone.View.extend({
- events: {
- "click #pageSelection li": "handleChangePageSelection"
- },
-
- initialize: function(settings) {
- this.model.bind('change:view', this.handleChangeView, this);
- this.model.bind('render', this.render, this);
- this.model.set({page: 1, pageSize: 16});
- this.model.bind('change:page', this.handlePageChange, this);
- },
-
- render: function(evt) {
- console.log("render");
- // Render page selections
- var tbody = $("#execTableBody");
- tbody.empty();
-
- var executions = this.model.get("executions");
- for (var i = 0; i < executions.length; ++i) {
- var row = document.createElement("tr");
-
- var tdId = document.createElement("td");
- var execA = document.createElement("a");
- $(execA).attr("href", contextURL + "/executor?execid=" + executions[i].execId);
- $(execA).text(executions[i].execId);
- tdId.appendChild(execA);
- row.appendChild(tdId);
-
- var tdUser = document.createElement("td");
- $(tdUser).text(executions[i].submitUser);
- row.appendChild(tdUser);
-
- var startTime = "-";
- if (executions[i].startTime != -1) {
- var startDateTime = new Date(executions[i].startTime);
- startTime = getDateFormat(startDateTime);
- }
-
- var tdStartTime = document.createElement("td");
- $(tdStartTime).text(startTime);
- row.appendChild(tdStartTime);
-
- var endTime = "-";
- var lastTime = executions[i].endTime;
- if (executions[i].endTime != -1) {
- var endDateTime = new Date(executions[i].endTime);
- endTime = getDateFormat(endDateTime);
- }
- else {
- lastTime = (new Date()).getTime();
- }
-
- var tdEndTime = document.createElement("td");
- $(tdEndTime).text(endTime);
- row.appendChild(tdEndTime);
-
- var tdElapsed = document.createElement("td");
- $(tdElapsed).text( getDuration(executions[i].startTime, lastTime));
- row.appendChild(tdElapsed);
-
- var tdStatus = document.createElement("td");
- var status = document.createElement("div");
- $(status).addClass("status");
- $(status).addClass(executions[i].status);
- $(status).text(statusStringMap[executions[i].status]);
- tdStatus.appendChild(status);
- row.appendChild(tdStatus);
-
- var tdAction = document.createElement("td");
- row.appendChild(tdAction);
-
- tbody.append(row);
- }
-
- this.renderPagination(evt);
- },
-
- renderPagination: function(evt) {
- var total = this.model.get("total");
- total = total? total : 1;
- var pageSize = this.model.get("pageSize");
- var numPages = Math.ceil(total / pageSize);
-
- this.model.set({"numPages": numPages});
- var page = this.model.get("page");
-
- //Start it off
- $("#pageSelection .active").removeClass("active");
-
- // Disable if less than 5
- console.log("Num pages " + numPages)
- var i = 1;
- for (; i <= numPages && i <= 5; ++i) {
- $("#page" + i).removeClass("disabled");
- }
- for (; i <= 5; ++i) {
- $("#page" + i).addClass("disabled");
- }
-
- // Disable prev/next if necessary.
- if (page > 1) {
- $("#previous").removeClass("disabled");
- $("#previous")[0].page = page - 1;
- $("#previous a").attr("href", "#page" + (page - 1));
- }
- else {
- $("#previous").addClass("disabled");
- }
-
- if (page < numPages) {
- $("#next")[0].page = page + 1;
- $("#next").removeClass("disabled");
- $("#next a").attr("href", "#page" + (page + 1));
- }
- else {
- $("#next")[0].page = page + 1;
- $("#next").addClass("disabled");
- }
-
- // Selection is always in middle unless at barrier.
- var startPage = 0;
- var selectionPosition = 0;
- if (page < 3) {
- selectionPosition = page;
- startPage = 1;
- }
- else if (page == numPages) {
- selectionPosition = 5;
- startPage = numPages - 4;
- }
- else if (page == numPages - 1) {
- selectionPosition = 4;
- startPage = numPages - 4;
- }
- else {
- selectionPosition = 3;
- startPage = page - 2;
- }
-
- $("#page"+selectionPosition).addClass("active");
- $("#page"+selectionPosition)[0].page = page;
- var selecta = $("#page" + selectionPosition + " a");
- selecta.text(page);
- selecta.attr("href", "#page" + page);
-
- for (var j = 0; j < 5; ++j) {
- var realPage = startPage + j;
- var elementId = "#page" + (j+1);
-
- $(elementId)[0].page = realPage;
- var a = $(elementId + " a");
- a.text(realPage);
- a.attr("href", "#page" + realPage);
- }
- },
-
- handleChangePageSelection: function(evt) {
- if ($(evt.currentTarget).hasClass("disabled")) {
- return;
- }
- var page = evt.currentTarget.page;
- this.model.set({"page": page});
- },
-
- handleChangeView: function(evt) {
- if (this.init) {
- return;
- }
- console.log("init");
- this.handlePageChange(evt);
- this.init = true;
- },
-
- handlePageChange: function(evt) {
- var page = this.model.get("page") - 1;
- var pageSize = this.model.get("pageSize");
- var requestURL = contextURL + "/manager";
-
- var model = this.model;
- var requestData = {
- "project": projectName,
- "flow": flowId,
- "ajax": "fetchFlowExecutions",
- "start": page * pageSize,
- "length": pageSize
- };
- var successHandler = function(data) {
- model.set({
- "executions": data.executions,
- "total": data.total
- });
- model.trigger("render");
- };
- $.get(requestURL, requestData, successHandler, "json");
- }
+ events: {
+ "click #pageSelection li": "handleChangePageSelection"
+ },
+
+ initialize: function(settings) {
+ this.model.bind('change:view', this.handleChangeView, this);
+ this.model.bind('render', this.render, this);
+ this.model.set({page: 1, pageSize: 16});
+ this.model.bind('change:page', this.handlePageChange, this);
+ },
+
+ render: function(evt) {
+ console.log("render");
+ // Render page selections
+ var tbody = $("#execTableBody");
+ tbody.empty();
+
+ var executions = this.model.get("executions");
+ for (var i = 0; i < executions.length; ++i) {
+ var row = document.createElement("tr");
+
+ var tdId = document.createElement("td");
+ var execA = document.createElement("a");
+ $(execA).attr("href", contextURL + "/executor?execid=" + executions[i].execId);
+ $(execA).text(executions[i].execId);
+ tdId.appendChild(execA);
+ row.appendChild(tdId);
+
+ var tdUser = document.createElement("td");
+ $(tdUser).text(executions[i].submitUser);
+ row.appendChild(tdUser);
+
+ var startTime = "-";
+ if (executions[i].startTime != -1) {
+ var startDateTime = new Date(executions[i].startTime);
+ startTime = getDateFormat(startDateTime);
+ }
+
+ var tdStartTime = document.createElement("td");
+ $(tdStartTime).text(startTime);
+ row.appendChild(tdStartTime);
+
+ var endTime = "-";
+ var lastTime = executions[i].endTime;
+ if (executions[i].endTime != -1) {
+ var endDateTime = new Date(executions[i].endTime);
+ endTime = getDateFormat(endDateTime);
+ }
+ else {
+ lastTime = (new Date()).getTime();
+ }
+
+ var tdEndTime = document.createElement("td");
+ $(tdEndTime).text(endTime);
+ row.appendChild(tdEndTime);
+
+ var tdElapsed = document.createElement("td");
+ $(tdElapsed).text( getDuration(executions[i].startTime, lastTime));
+ row.appendChild(tdElapsed);
+
+ var tdStatus = document.createElement("td");
+ var status = document.createElement("div");
+ $(status).addClass("status");
+ $(status).addClass(executions[i].status);
+ $(status).text(statusStringMap[executions[i].status]);
+ tdStatus.appendChild(status);
+ row.appendChild(tdStatus);
+
+ var tdAction = document.createElement("td");
+ row.appendChild(tdAction);
+
+ tbody.append(row);
+ }
+
+ this.renderPagination(evt);
+ },
+
+ renderPagination: function(evt) {
+ var total = this.model.get("total");
+ total = total? total : 1;
+ var pageSize = this.model.get("pageSize");
+ var numPages = Math.ceil(total / pageSize);
+
+ this.model.set({"numPages": numPages});
+ var page = this.model.get("page");
+
+ //Start it off
+ $("#pageSelection .active").removeClass("active");
+
+ // Disable if less than 5
+ console.log("Num pages " + numPages)
+ var i = 1;
+ for (; i <= numPages && i <= 5; ++i) {
+ $("#page" + i).removeClass("disabled");
+ }
+ for (; i <= 5; ++i) {
+ $("#page" + i).addClass("disabled");
+ }
+
+ // Disable prev/next if necessary.
+ if (page > 1) {
+ $("#previous").removeClass("disabled");
+ $("#previous")[0].page = page - 1;
+ $("#previous a").attr("href", "#page" + (page - 1));
+ }
+ else {
+ $("#previous").addClass("disabled");
+ }
+
+ if (page < numPages) {
+ $("#next")[0].page = page + 1;
+ $("#next").removeClass("disabled");
+ $("#next a").attr("href", "#page" + (page + 1));
+ }
+ else {
+ $("#next")[0].page = page + 1;
+ $("#next").addClass("disabled");
+ }
+
+ // Selection is always in middle unless at barrier.
+ var startPage = 0;
+ var selectionPosition = 0;
+ if (page < 3) {
+ selectionPosition = page;
+ startPage = 1;
+ }
+ else if (page == numPages) {
+ selectionPosition = 5;
+ startPage = numPages - 4;
+ }
+ else if (page == numPages - 1) {
+ selectionPosition = 4;
+ startPage = numPages - 4;
+ }
+ else {
+ selectionPosition = 3;
+ startPage = page - 2;
+ }
+
+ $("#page"+selectionPosition).addClass("active");
+ $("#page"+selectionPosition)[0].page = page;
+ var selecta = $("#page" + selectionPosition + " a");
+ selecta.text(page);
+ selecta.attr("href", "#page" + page);
+
+ for (var j = 0; j < 5; ++j) {
+ var realPage = startPage + j;
+ var elementId = "#page" + (j+1);
+
+ $(elementId)[0].page = realPage;
+ var a = $(elementId + " a");
+ a.text(realPage);
+ a.attr("href", "#page" + realPage);
+ }
+ },
+
+ handleChangePageSelection: function(evt) {
+ if ($(evt.currentTarget).hasClass("disabled")) {
+ return;
+ }
+ var page = evt.currentTarget.page;
+ this.model.set({"page": page});
+ },
+
+ handleChangeView: function(evt) {
+ if (this.init) {
+ return;
+ }
+ console.log("init");
+ this.handlePageChange(evt);
+ this.init = true;
+ },
+
+ handlePageChange: function(evt) {
+ var page = this.model.get("page") - 1;
+ var pageSize = this.model.get("pageSize");
+ var requestURL = contextURL + "/manager";
+
+ var model = this.model;
+ var requestData = {
+ "project": projectName,
+ "flow": flowId,
+ "ajax": "fetchFlowExecutions",
+ "start": page * pageSize,
+ "length": pageSize
+ };
+ var successHandler = function(data) {
+ model.set({
+ "executions": data.executions,
+ "total": data.total
+ });
+ model.trigger("render");
+ };
+ $.get(requestURL, requestData, successHandler, "json");
+ }
});
var summaryView;
azkaban.SummaryView = Backbone.View.extend({
- events: {
+ events: {
'click #analyze-btn': 'fetchLastRun'
- },
-
- initialize: function(settings) {
- this.model.bind('change:view', this.handleChangeView, this);
- this.model.bind('render', this.render, this);
-
- this.fetchDetails();
- this.fetchSchedule();
- this.model.trigger('render');
- },
-
- fetchDetails: function() {
- var requestURL = contextURL + "/manager";
- var requestData = {
- 'ajax': 'fetchflowdetails',
- 'project': projectName,
- 'flow': flowId
- };
-
- var model = this.model;
-
- var successHandler = function(data) {
- console.log(data);
- model.set({
- 'jobTypes': data.jobTypes
- });
- model.trigger('render');
- };
- $.get(requestURL, requestData, successHandler, 'json');
- },
+ },
+
+ initialize: function(settings) {
+ this.model.bind('change:view', this.handleChangeView, this);
+ this.model.bind('render', this.render, this);
+
+ this.fetchDetails();
+ this.fetchSchedule();
+ this.model.trigger('render');
+ },
+
+ fetchDetails: function() {
+ var requestURL = contextURL + "/manager";
+ var requestData = {
+ 'ajax': 'fetchflowdetails',
+ 'project': projectName,
+ 'flow': flowId
+ };
+
+ var model = this.model;
+
+ var successHandler = function(data) {
+ console.log(data);
+ model.set({
+ 'jobTypes': data.jobTypes
+ });
+ model.trigger('render');
+ };
+ $.get(requestURL, requestData, successHandler, 'json');
+ },
fetchSchedule: function() {
- var requestURL = contextURL + "/schedule"
- var requestData = {
- 'ajax': 'fetchSchedule',
- 'projectId': projectId,
- 'flowId': flowId
- };
- var model = this.model;
+ var requestURL = contextURL + "/schedule"
+ var requestData = {
+ 'ajax': 'fetchSchedule',
+ 'projectId': projectId,
+ 'flowId': flowId
+ };
+ var model = this.model;
var view = this;
- var successHandler = function(data) {
- model.set({'schedule': data.schedule});
- model.trigger('render');
+ var successHandler = function(data) {
+ model.set({'schedule': data.schedule});
+ model.trigger('render');
view.fetchSla();
- };
- $.get(requestURL, requestData, successHandler, 'json');
- },
+ };
+ $.get(requestURL, requestData, successHandler, 'json');
+ },
fetchSla: function() {
var schedule = this.model.get('schedule');
@@ -342,25 +342,25 @@ azkaban.SummaryView = Backbone.View.extend({
return;
}
- var requestURL = contextURL + "/schedule"
- var requestData = {
- "scheduleId": schedule.scheduleId,
- "ajax": "slaInfo"
- };
- var model = this.model;
- var successHandler = function(data) {
+ var requestURL = contextURL + "/schedule"
+ var requestData = {
+ "scheduleId": schedule.scheduleId,
+ "ajax": "slaInfo"
+ };
+ var model = this.model;
+ var successHandler = function(data) {
if (data == null || data.settings == null || data.settings.length == 0) {
return;
}
schedule.slaOptions = true;
model.set({'schedule': schedule});
- model.trigger('render');
- };
- $.get(requestURL, requestData, successHandler, 'json');
- },
+ model.trigger('render');
+ };
+ $.get(requestURL, requestData, successHandler, 'json');
+ },
- fetchLastRun: function() {
- var requestURL = contextURL + "/manager";
+ fetchLastRun: function() {
+ var requestURL = contextURL + "/manager";
var requestData = {
'ajax': 'fetchLastSuccessfulFlowExecution',
'project': projectName,
@@ -377,22 +377,22 @@ azkaban.SummaryView = Backbone.View.extend({
flowStatsView.show(data.execId);
};
$.get(requestURL, requestData, successHandler, 'json');
- },
-
- handleChangeView: function(evt) {
- },
-
- render: function(evt) {
- var data = {
- projectName: projectName,
- flowName: flowId,
- jobTypes: this.model.get('jobTypes'),
- schedule: this.model.get('schedule'),
- };
- dust.render("flowsummary", data, function(err, out) {
- $('#summary-view-content').html(out);
- });
- },
+ },
+
+ handleChangeView: function(evt) {
+ },
+
+ render: function(evt) {
+ var data = {
+ projectName: projectName,
+ flowName: flowId,
+ jobTypes: this.model.get('jobTypes'),
+ schedule: this.model.get('schedule'),
+ };
+ dust.render("flowsummary", data, function(err, out) {
+ $('#summary-view-content').html(out);
+ });
+ },
});
var graphModel;
@@ -411,105 +411,105 @@ var executionsTimeGraphView;
var slaView;
$(function() {
- var selected;
- // Execution model has to be created before the window switches the tabs.
- executionModel = new azkaban.ExecutionModel();
- executionsView = new azkaban.ExecutionsView({
- el: $('#executionsView'),
- model: executionModel
- });
+ var selected;
+ // Execution model has to be created before the window switches the tabs.
+ executionModel = new azkaban.ExecutionModel();
+ executionsView = new azkaban.ExecutionsView({
+ el: $('#executionsView'),
+ model: executionModel
+ });
summaryModel = new azkaban.SummaryModel();
- summaryView = new azkaban.SummaryView({
- el: $('#summaryView'),
- model: summaryModel
- });
+ summaryView = new azkaban.SummaryView({
+ el: $('#summaryView'),
+ model: summaryModel
+ });
flowStatsModel = new azkaban.FlowStatsModel();
- flowStatsView = new azkaban.FlowStatsView({
- el: $('#flow-stats-container'),
- model: flowStatsModel
- });
+ flowStatsView = new azkaban.FlowStatsView({
+ el: $('#flow-stats-container'),
+ model: flowStatsModel
+ });
flowTabView = new azkaban.FlowTabView({
- el: $('#headertabs'),
- selectedView: selected
- });
-
- graphModel = new azkaban.GraphModel();
- mainSvgGraphView = new azkaban.SvgGraphView({
- el: $('#svgDiv'),
- model: graphModel,
- rightClick: {
- "node": nodeClickCallback,
- "edge": edgeClickCallback,
- "graph": graphClickCallback
- }
- });
+ el: $('#headertabs'),
+ selectedView: selected
+ });
+
+ graphModel = new azkaban.GraphModel();
+ mainSvgGraphView = new azkaban.SvgGraphView({
+ el: $('#svgDiv'),
+ model: graphModel,
+ rightClick: {
+ "node": nodeClickCallback,
+ "edge": edgeClickCallback,
+ "graph": graphClickCallback
+ }
+ });
jobsListView = new azkaban.JobListView({
- el: $('#joblist-panel'),
- model: graphModel,
- contextMenuCallback: jobClickCallback
- });
+ el: $('#joblist-panel'),
+ model: graphModel,
+ contextMenuCallback: jobClickCallback
+ });
executionsTimeGraphView = new azkaban.TimeGraphView({
- el: $('#timeGraph'),
- model: executionModel,
+ el: $('#timeGraph'),
+ model: executionModel,
modelField: 'executions'
- });
-
- slaView = new azkaban.ChangeSlaView({el:$('#sla-options')});
-
- var requestURL = contextURL + "/manager";
- // Set up the Flow options view. Create a new one every time :p
- $('#executebtn').click(function() {
- var data = graphModel.get("data");
- var nodes = data.nodes;
- var executingData = {
- project: projectName,
- ajax: "executeFlow",
- flow: flowId
- };
-
- flowExecuteDialogView.show(executingData);
- });
-
- var requestData = {
- "project": projectName,
- "ajax": "fetchflowgraph",
- "flow": flowId
- };
- var successHandler = function(data) {
- console.log("data fetched");
- graphModel.addFlow(data);
- graphModel.trigger("change:graph");
-
- // Handle the hash changes here so the graph finishes rendering first.
- if (window.location.hash) {
- var hash = window.location.hash;
- if (hash == "#executions") {
- flowTabView.handleExecutionLinkClick();
- }
- if (hash == "#summary") {
- flowTabView.handleSummaryLinkClick();
- }
- else if (hash == "#graph") {
- // Redundant, but we may want to change the default.
- selected = "graph";
- }
- else {
- if ("#page" == hash.substring(0, "#page".length)) {
- var page = hash.substring("#page".length, hash.length);
- console.log("page " + page);
- flowTabView.handleExecutionLinkClick();
- executionModel.set({"page": parseInt(page)});
- }
- else {
- selected = "graph";
- }
- }
- }
- };
- $.get(requestURL, requestData, successHandler, "json");
+ });
+
+ slaView = new azkaban.ChangeSlaView({el:$('#sla-options')});
+
+ var requestURL = contextURL + "/manager";
+ // Set up the Flow options view. Create a new one every time :p
+ $('#executebtn').click(function() {
+ var data = graphModel.get("data");
+ var nodes = data.nodes;
+ var executingData = {
+ project: projectName,
+ ajax: "executeFlow",
+ flow: flowId
+ };
+
+ flowExecuteDialogView.show(executingData);
+ });
+
+ var requestData = {
+ "project": projectName,
+ "ajax": "fetchflowgraph",
+ "flow": flowId
+ };
+ var successHandler = function(data) {
+ console.log("data fetched");
+ graphModel.addFlow(data);
+ graphModel.trigger("change:graph");
+
+ // Handle the hash changes here so the graph finishes rendering first.
+ if (window.location.hash) {
+ var hash = window.location.hash;
+ if (hash == "#executions") {
+ flowTabView.handleExecutionLinkClick();
+ }
+ if (hash == "#summary") {
+ flowTabView.handleSummaryLinkClick();
+ }
+ else if (hash == "#graph") {
+ // Redundant, but we may want to change the default.
+ selected = "graph";
+ }
+ else {
+ if ("#page" == hash.substring(0, "#page".length)) {
+ var page = hash.substring("#page".length, hash.length);
+ console.log("page " + page);
+ flowTabView.handleExecutionLinkClick();
+ executionModel.set({"page": parseInt(page)});
+ }
+ else {
+ selected = "graph";
+ }
+ }
+ }
+ };
+ $.get(requestURL, requestData, successHandler, "json");
});
src/web/js/azkaban/view/flow-execute-dialog.js 1238(+619 -619)
diff --git a/src/web/js/azkaban/view/flow-execute-dialog.js b/src/web/js/azkaban/view/flow-execute-dialog.js
index ad39ac4..132b8eb 100644
--- a/src/web/js/azkaban/view/flow-execute-dialog.js
+++ b/src/web/js/azkaban/view/flow-execute-dialog.js
@@ -18,469 +18,469 @@ $.namespace('azkaban');
var flowExecuteDialogView;
azkaban.FlowExecuteDialogView = Backbone.View.extend({
- events: {
- "click .closeExecPanel": "hideExecutionOptionPanel",
- "click #schedule-btn": "scheduleClick",
- "click #execute-btn": "handleExecuteFlow"
- },
-
- initialize: function(settings) {
- this.model.bind('change:flowinfo', this.changeFlowInfo, this);
- $("#override-success-emails").click(function(evt) {
- if ($(this).is(':checked')) {
- $('#success-emails').attr('disabled', null);
- }
- else {
- $('#success-emails').attr('disabled', "disabled");
- }
- });
-
- $("#override-failure-emails").click(function(evt) {
- if ($(this).is(':checked')) {
- $('#failure-emails').attr('disabled', null);
- }
- else {
- $('#failure-emails').attr('disabled', "disabled");
- }
- });
- },
-
- render: function() {
- },
-
- getExecutionOptionData: function() {
- var failureAction = $('#failure-action').val();
- var failureEmails = $('#failure-emails').val();
- var successEmails = $('#success-emails').val();
- var notifyFailureFirst = $('#notify-failure-first').is(':checked');
- var notifyFailureLast = $('#notify-failure-last').is(':checked');
- var failureEmailsOverride = $("#override-failure-emails").is(':checked');
- var successEmailsOverride = $("#override-success-emails").is(':checked');
-
- var flowOverride = {};
- var editRows = $(".editRow");
- for (var i = 0; i < editRows.length; ++i) {
- var row = editRows[i];
- var td = $(row).find('span');
- var key = $(td[0]).text();
- var val = $(td[1]).text();
-
- if (key && key.length > 0) {
- flowOverride[key] = val;
- }
- }
-
- var data = this.model.get("data");
- var disabledList = gatherDisabledNodes(data);
-
- var executingData = {
- projectId: projectId,
- project: this.projectName,
- ajax: "executeFlow",
- flow: this.flowId,
- disabled: JSON.stringify(disabledList),
- failureEmailsOverride: failureEmailsOverride,
- successEmailsOverride: successEmailsOverride,
- failureAction: failureAction,
- failureEmails: failureEmails,
- successEmails: successEmails,
- notifyFailureFirst: notifyFailureFirst,
- notifyFailureLast: notifyFailureLast,
- flowOverride: flowOverride
- };
-
- // Set concurrency option, default is skip
-
- var concurrentOption = $('input[name=concurrent]:checked').val();
- executingData.concurrentOption = concurrentOption;
- if (concurrentOption == "pipeline") {
- var pipelineLevel = $("#pipeline-level").val();
- executingData.pipelineLevel = pipelineLevel;
- }
- else if (concurrentOption == "queue") {
- executingData.queueLevel = $("#queueLevel").val();
- }
-
- return executingData;
- },
-
- changeFlowInfo: function() {
- var successEmails = this.model.get("successEmails");
- var failureEmails = this.model.get("failureEmails");
- var failureActions = this.model.get("failureAction");
- var notifyFailure = this.model.get("notifyFailure");
- var flowParams = this.model.get("flowParams");
- var isRunning = this.model.get("isRunning");
- var concurrentOption = this.model.get("concurrentOption");
- var pipelineLevel = this.model.get("pipelineLevel");
- var pipelineExecutionId = this.model.get("pipelineExecution");
- var queueLevel = this.model.get("queueLevel");
- var nodeStatus = this.model.get("nodeStatus");
- var overrideSuccessEmails = this.model.get("failureEmailsOverride");
- var overrideFailureEmails = this.model.get("successEmailsOverride");
-
- if (overrideSuccessEmails) {
- $('#override-success-emails').attr('checked', true);
- }
- else {
- $('#success-emails').attr('disabled','disabled');
- }
- if (overrideFailureEmails) {
- $('#override-failure-emails').attr('checked', true);
- }
- else {
- $('#failure-emails').attr('disabled','disabled');
- }
-
- if (successEmails) {
- $('#success-emails').val(successEmails.join());
- }
- if (failureEmails) {
- $('#failure-emails').val(failureEmails.join());
- }
- if (failureActions) {
- $('#failure-action').val(failureActions);
- }
-
- if (notifyFailure.first) {
- $('#notify-failure-first').attr('checked', true);
- $('#notify-failure-first').parent('.btn').addClass('active');
- }
- if (notifyFailure.last) {
- $('#notify-failure-last').attr('checked', true);
- $('#notify-failure-last').parent('.btn').addClass('active');
- }
-
- if (concurrentOption) {
- $('input[value='+concurrentOption+'][name="concurrent"]').attr('checked', true);
- }
- if (pipelineLevel) {
- $('#pipeline-level').val(pipelineLevel);
- }
- if (queueLevel) {
- $('#queueLevel').val(queueLevel);
- }
-
- if (flowParams) {
- for (var key in flowParams) {
- editTableView.handleAddRow({
- paramkey: key,
- paramvalue: flowParams[key]
- });
- }
- }
- },
-
- show: function(data) {
- var projectName = data.project;
- var flowId = data.flow;
- var jobId = data.job;
-
- // ExecId is optional
- var execId = data.execid;
- var exgraph = data.exgraph;
-
- this.projectName = projectName;
- this.flowId = flowId;
-
- var self = this;
- var loadCallback = function() {
- if (jobId) {
- self.showExecuteJob(projectName, flowId, jobId, data.withDep);
- }
- else {
- self.showExecuteFlow(projectName, flowId);
- }
- }
-
- var loadedId = executableGraphModel.get("flowId");
- this.loadGraph(projectName, flowId, exgraph, loadCallback);
- this.loadFlowInfo(projectName, flowId, execId);
- },
-
- showExecuteFlow: function(projectName, flowId) {
- $("#execute-flow-panel-title").text("Execute Flow " + flowId);
- this.showExecutionOptionPanel();
-
- // Triggers a render
- this.model.trigger("change:graph");
- },
-
- showExecuteJob: function(projectName, flowId, jobId, withDep) {
- sideMenuDialogView.menuSelect($("#flow-option"));
- $("#execute-flow-panel-title").text("Execute Flow " + flowId);
-
- var data = this.model.get("data");
- var disabled = this.model.get("disabled");
-
- // Disable all, then re-enable those you want.
- disableAll();
-
- var jobNode = data.nodeMap[jobId];
- touchNode(jobNode, false);
-
- if (withDep) {
- recurseAllAncestors(jobNode, false);
- }
-
- this.showExecutionOptionPanel();
- this.model.trigger("change:graph");
- },
-
- showExecutionOptionPanel: function() {
- sideMenuDialogView.menuSelect($("#flow-option"));
- $('#execute-flow-panel').modal();
- },
-
- hideExecutionOptionPanel: function() {
- $('#execute-flow-panel').modal("hide");
- },
-
- scheduleClick: function() {
- console.log("click schedule button.");
- this.hideExecutionOptionPanel();
- schedulePanelView.showSchedulePanel();
- },
-
- loadFlowInfo: function(projectName, flowId, execId) {
- console.log("Loading flow " + flowId);
- fetchFlowInfo(this.model, projectName, flowId, execId);
- },
-
- loadGraph: function(projectName, flowId, exgraph, callback) {
- console.log("Loading flow " + flowId);
- var requestURL = contextURL + "/manager";
-
- var graphModel = executableGraphModel;
- // fetchFlow(this.model, projectName, flowId, true);
- var requestData = {
- "project": projectName,
- "ajax": "fetchflowgraph",
- "flow": flowId
- };
- var self = this;
- var successHandler = function(data) {
- console.log("data fetched");
- graphModel.addFlow(data);
-
- if (exgraph) {
- self.assignInitialStatus(data, exgraph);
- }
-
- // Auto disable jobs that are finished.
- disableFinishedJobs(data);
- executingSvgGraphView = new azkaban.SvgGraphView({
- el: $('#flow-executing-graph'),
- model: graphModel,
- render: false,
- rightClick: {
- "node": expanelNodeClickCallback,
- "edge": expanelEdgeClickCallback,
- "graph": expanelGraphClickCallback
- },
- tooltipcontainer: "#svg-div-custom"
- });
-
- if (callback) {
- callback.call(this);
- }
- };
- $.get(requestURL, requestData, successHandler, "json");
- },
-
- assignInitialStatus: function(data, statusData) {
- // Copies statuses over from the previous execution if it exists.
- var statusNodeMap = statusData.nodeMap;
- var nodes = data.nodes;
- for (var i = 0; i<nodes.length; ++i) {
- var node = nodes[i];
- var statusNode = statusNodeMap[node.id];
- if (statusNode) {
- node.status = statusNode.status;
- if (node.type == "flow" && statusNode.type == "flow") {
- this.assignInitialStatus(node, statusNode);
- }
- }
- }
- },
-
- handleExecuteFlow: function(evt) {
- console.log("click schedule button.");
- var executeURL = contextURL + "/executor";
- var executingData = this.getExecutionOptionData();
- executeFlow(executingData);
- }
+ events: {
+ "click .closeExecPanel": "hideExecutionOptionPanel",
+ "click #schedule-btn": "scheduleClick",
+ "click #execute-btn": "handleExecuteFlow"
+ },
+
+ initialize: function(settings) {
+ this.model.bind('change:flowinfo', this.changeFlowInfo, this);
+ $("#override-success-emails").click(function(evt) {
+ if ($(this).is(':checked')) {
+ $('#success-emails').attr('disabled', null);
+ }
+ else {
+ $('#success-emails').attr('disabled', "disabled");
+ }
+ });
+
+ $("#override-failure-emails").click(function(evt) {
+ if ($(this).is(':checked')) {
+ $('#failure-emails').attr('disabled', null);
+ }
+ else {
+ $('#failure-emails').attr('disabled', "disabled");
+ }
+ });
+ },
+
+ render: function() {
+ },
+
+ getExecutionOptionData: function() {
+ var failureAction = $('#failure-action').val();
+ var failureEmails = $('#failure-emails').val();
+ var successEmails = $('#success-emails').val();
+ var notifyFailureFirst = $('#notify-failure-first').is(':checked');
+ var notifyFailureLast = $('#notify-failure-last').is(':checked');
+ var failureEmailsOverride = $("#override-failure-emails").is(':checked');
+ var successEmailsOverride = $("#override-success-emails").is(':checked');
+
+ var flowOverride = {};
+ var editRows = $(".editRow");
+ for (var i = 0; i < editRows.length; ++i) {
+ var row = editRows[i];
+ var td = $(row).find('span');
+ var key = $(td[0]).text();
+ var val = $(td[1]).text();
+
+ if (key && key.length > 0) {
+ flowOverride[key] = val;
+ }
+ }
+
+ var data = this.model.get("data");
+ var disabledList = gatherDisabledNodes(data);
+
+ var executingData = {
+ projectId: projectId,
+ project: this.projectName,
+ ajax: "executeFlow",
+ flow: this.flowId,
+ disabled: JSON.stringify(disabledList),
+ failureEmailsOverride: failureEmailsOverride,
+ successEmailsOverride: successEmailsOverride,
+ failureAction: failureAction,
+ failureEmails: failureEmails,
+ successEmails: successEmails,
+ notifyFailureFirst: notifyFailureFirst,
+ notifyFailureLast: notifyFailureLast,
+ flowOverride: flowOverride
+ };
+
+ // Set concurrency option, default is skip
+
+ var concurrentOption = $('input[name=concurrent]:checked').val();
+ executingData.concurrentOption = concurrentOption;
+ if (concurrentOption == "pipeline") {
+ var pipelineLevel = $("#pipeline-level").val();
+ executingData.pipelineLevel = pipelineLevel;
+ }
+ else if (concurrentOption == "queue") {
+ executingData.queueLevel = $("#queueLevel").val();
+ }
+
+ return executingData;
+ },
+
+ changeFlowInfo: function() {
+ var successEmails = this.model.get("successEmails");
+ var failureEmails = this.model.get("failureEmails");
+ var failureActions = this.model.get("failureAction");
+ var notifyFailure = this.model.get("notifyFailure");
+ var flowParams = this.model.get("flowParams");
+ var isRunning = this.model.get("isRunning");
+ var concurrentOption = this.model.get("concurrentOption");
+ var pipelineLevel = this.model.get("pipelineLevel");
+ var pipelineExecutionId = this.model.get("pipelineExecution");
+ var queueLevel = this.model.get("queueLevel");
+ var nodeStatus = this.model.get("nodeStatus");
+ var overrideSuccessEmails = this.model.get("failureEmailsOverride");
+ var overrideFailureEmails = this.model.get("successEmailsOverride");
+
+ if (overrideSuccessEmails) {
+ $('#override-success-emails').attr('checked', true);
+ }
+ else {
+ $('#success-emails').attr('disabled','disabled');
+ }
+ if (overrideFailureEmails) {
+ $('#override-failure-emails').attr('checked', true);
+ }
+ else {
+ $('#failure-emails').attr('disabled','disabled');
+ }
+
+ if (successEmails) {
+ $('#success-emails').val(successEmails.join());
+ }
+ if (failureEmails) {
+ $('#failure-emails').val(failureEmails.join());
+ }
+ if (failureActions) {
+ $('#failure-action').val(failureActions);
+ }
+
+ if (notifyFailure.first) {
+ $('#notify-failure-first').attr('checked', true);
+ $('#notify-failure-first').parent('.btn').addClass('active');
+ }
+ if (notifyFailure.last) {
+ $('#notify-failure-last').attr('checked', true);
+ $('#notify-failure-last').parent('.btn').addClass('active');
+ }
+
+ if (concurrentOption) {
+ $('input[value='+concurrentOption+'][name="concurrent"]').attr('checked', true);
+ }
+ if (pipelineLevel) {
+ $('#pipeline-level').val(pipelineLevel);
+ }
+ if (queueLevel) {
+ $('#queueLevel').val(queueLevel);
+ }
+
+ if (flowParams) {
+ for (var key in flowParams) {
+ editTableView.handleAddRow({
+ paramkey: key,
+ paramvalue: flowParams[key]
+ });
+ }
+ }
+ },
+
+ show: function(data) {
+ var projectName = data.project;
+ var flowId = data.flow;
+ var jobId = data.job;
+
+ // ExecId is optional
+ var execId = data.execid;
+ var exgraph = data.exgraph;
+
+ this.projectName = projectName;
+ this.flowId = flowId;
+
+ var self = this;
+ var loadCallback = function() {
+ if (jobId) {
+ self.showExecuteJob(projectName, flowId, jobId, data.withDep);
+ }
+ else {
+ self.showExecuteFlow(projectName, flowId);
+ }
+ }
+
+ var loadedId = executableGraphModel.get("flowId");
+ this.loadGraph(projectName, flowId, exgraph, loadCallback);
+ this.loadFlowInfo(projectName, flowId, execId);
+ },
+
+ showExecuteFlow: function(projectName, flowId) {
+ $("#execute-flow-panel-title").text("Execute Flow " + flowId);
+ this.showExecutionOptionPanel();
+
+ // Triggers a render
+ this.model.trigger("change:graph");
+ },
+
+ showExecuteJob: function(projectName, flowId, jobId, withDep) {
+ sideMenuDialogView.menuSelect($("#flow-option"));
+ $("#execute-flow-panel-title").text("Execute Flow " + flowId);
+
+ var data = this.model.get("data");
+ var disabled = this.model.get("disabled");
+
+ // Disable all, then re-enable those you want.
+ disableAll();
+
+ var jobNode = data.nodeMap[jobId];
+ touchNode(jobNode, false);
+
+ if (withDep) {
+ recurseAllAncestors(jobNode, false);
+ }
+
+ this.showExecutionOptionPanel();
+ this.model.trigger("change:graph");
+ },
+
+ showExecutionOptionPanel: function() {
+ sideMenuDialogView.menuSelect($("#flow-option"));
+ $('#execute-flow-panel').modal();
+ },
+
+ hideExecutionOptionPanel: function() {
+ $('#execute-flow-panel').modal("hide");
+ },
+
+ scheduleClick: function() {
+ console.log("click schedule button.");
+ this.hideExecutionOptionPanel();
+ schedulePanelView.showSchedulePanel();
+ },
+
+ loadFlowInfo: function(projectName, flowId, execId) {
+ console.log("Loading flow " + flowId);
+ fetchFlowInfo(this.model, projectName, flowId, execId);
+ },
+
+ loadGraph: function(projectName, flowId, exgraph, callback) {
+ console.log("Loading flow " + flowId);
+ var requestURL = contextURL + "/manager";
+
+ var graphModel = executableGraphModel;
+ // fetchFlow(this.model, projectName, flowId, true);
+ var requestData = {
+ "project": projectName,
+ "ajax": "fetchflowgraph",
+ "flow": flowId
+ };
+ var self = this;
+ var successHandler = function(data) {
+ console.log("data fetched");
+ graphModel.addFlow(data);
+
+ if (exgraph) {
+ self.assignInitialStatus(data, exgraph);
+ }
+
+ // Auto disable jobs that are finished.
+ disableFinishedJobs(data);
+ executingSvgGraphView = new azkaban.SvgGraphView({
+ el: $('#flow-executing-graph'),
+ model: graphModel,
+ render: false,
+ rightClick: {
+ "node": expanelNodeClickCallback,
+ "edge": expanelEdgeClickCallback,
+ "graph": expanelGraphClickCallback
+ },
+ tooltipcontainer: "#svg-div-custom"
+ });
+
+ if (callback) {
+ callback.call(this);
+ }
+ };
+ $.get(requestURL, requestData, successHandler, "json");
+ },
+
+ assignInitialStatus: function(data, statusData) {
+ // Copies statuses over from the previous execution if it exists.
+ var statusNodeMap = statusData.nodeMap;
+ var nodes = data.nodes;
+ for (var i = 0; i<nodes.length; ++i) {
+ var node = nodes[i];
+ var statusNode = statusNodeMap[node.id];
+ if (statusNode) {
+ node.status = statusNode.status;
+ if (node.type == "flow" && statusNode.type == "flow") {
+ this.assignInitialStatus(node, statusNode);
+ }
+ }
+ }
+ },
+
+ handleExecuteFlow: function(evt) {
+ console.log("click schedule button.");
+ var executeURL = contextURL + "/executor";
+ var executingData = this.getExecutionOptionData();
+ executeFlow(executingData);
+ }
});
var editTableView;
azkaban.EditTableView = Backbone.View.extend({
- events: {
- "click table #add-btn": "handleAddRow",
- "click table .editable": "handleEditColumn",
- "click table .remove-btn": "handleRemoveColumn"
- },
-
- initialize: function(setting) {
- },
-
- handleAddRow: function(data) {
- var name = "";
- if (data.paramkey) {
- name = data.paramkey;
- }
-
- var value = "";
- if (data.paramvalue) {
- value = data.paramvalue;
- }
-
- var tr = document.createElement("tr");
- var tdName = document.createElement("td");
- $(tdName).addClass('property-key');
- var tdValue = document.createElement("td");
-
- var remove = document.createElement("div");
- $(remove).addClass("pull-right").addClass('remove-btn');
- var removeBtn = document.createElement("button");
- $(removeBtn).attr('type', 'button');
- $(removeBtn).addClass('btn').addClass('btn-xs').addClass('btn-danger');
- $(removeBtn).text('Delete');
- $(remove).append(removeBtn);
-
- var nameData = document.createElement("span");
- $(nameData).addClass("spanValue");
- $(nameData).text(name);
- var valueData = document.createElement("span");
- $(valueData).addClass("spanValue");
- $(valueData).text(value);
-
- $(tdName).append(nameData);
- $(tdName).addClass("editable");
-
- $(tdValue).append(valueData);
- $(tdValue).append(remove);
- $(tdValue).addClass("editable").addClass('value');
-
- $(tr).addClass("editRow");
- $(tr).append(tdName);
- $(tr).append(tdValue);
-
- $(tr).insertBefore(".addRow");
- return tr;
- },
-
- handleEditColumn: function(evt) {
- var curTarget = evt.currentTarget;
-
- var text = $(curTarget).children(".spanValue").text();
- $(curTarget).empty();
-
- var input = document.createElement("input");
- $(input).attr("type", "text");
- $(input).addClass('form-control').addClass('input-sm');
- $(input).css("width", "100%");
- $(input).val(text);
- $(curTarget).addClass("editing");
- $(curTarget).append(input);
- $(input).focus();
-
- var obj = this;
- $(input).focusout(function(evt) {
- obj.closeEditingTarget(evt);
- });
-
- $(input).keypress(function(evt) {
- if (evt.which == 13) {
- obj.closeEditingTarget(evt);
- }
- });
- },
-
- handleRemoveColumn: function(evt) {
- var curTarget = evt.currentTarget;
- // Should be the table
- var row = curTarget.parentElement.parentElement;
- $(row).remove();
- },
-
- closeEditingTarget: function(evt) {
- var input = evt.currentTarget;
- var text = $(input).val();
- var parent = $(input).parent();
- $(parent).empty();
-
- var valueData = document.createElement("span");
- $(valueData).addClass("spanValue");
- $(valueData).text(text);
-
- if ($(parent).hasClass("value")) {
- var remove = document.createElement("div");
- $(remove).addClass("pull-right").addClass('remove-btn');
- var removeBtn = document.createElement("button");
- $(removeBtn).attr('type', 'button');
- $(removeBtn).addClass('btn').addClass('btn-xs').addClass('btn-danger');
- $(removeBtn).text('Delete');
- $(remove).append(removeBtn);
- $(parent).append(remove);
- }
-
- $(parent).removeClass("editing");
- $(parent).append(valueData);
- }
+ events: {
+ "click table #add-btn": "handleAddRow",
+ "click table .editable": "handleEditColumn",
+ "click table .remove-btn": "handleRemoveColumn"
+ },
+
+ initialize: function(setting) {
+ },
+
+ handleAddRow: function(data) {
+ var name = "";
+ if (data.paramkey) {
+ name = data.paramkey;
+ }
+
+ var value = "";
+ if (data.paramvalue) {
+ value = data.paramvalue;
+ }
+
+ var tr = document.createElement("tr");
+ var tdName = document.createElement("td");
+ $(tdName).addClass('property-key');
+ var tdValue = document.createElement("td");
+
+ var remove = document.createElement("div");
+ $(remove).addClass("pull-right").addClass('remove-btn');
+ var removeBtn = document.createElement("button");
+ $(removeBtn).attr('type', 'button');
+ $(removeBtn).addClass('btn').addClass('btn-xs').addClass('btn-danger');
+ $(removeBtn).text('Delete');
+ $(remove).append(removeBtn);
+
+ var nameData = document.createElement("span");
+ $(nameData).addClass("spanValue");
+ $(nameData).text(name);
+ var valueData = document.createElement("span");
+ $(valueData).addClass("spanValue");
+ $(valueData).text(value);
+
+ $(tdName).append(nameData);
+ $(tdName).addClass("editable");
+
+ $(tdValue).append(valueData);
+ $(tdValue).append(remove);
+ $(tdValue).addClass("editable").addClass('value');
+
+ $(tr).addClass("editRow");
+ $(tr).append(tdName);
+ $(tr).append(tdValue);
+
+ $(tr).insertBefore(".addRow");
+ return tr;
+ },
+
+ handleEditColumn: function(evt) {
+ var curTarget = evt.currentTarget;
+
+ var text = $(curTarget).children(".spanValue").text();
+ $(curTarget).empty();
+
+ var input = document.createElement("input");
+ $(input).attr("type", "text");
+ $(input).addClass('form-control').addClass('input-sm');
+ $(input).css("width", "100%");
+ $(input).val(text);
+ $(curTarget).addClass("editing");
+ $(curTarget).append(input);
+ $(input).focus();
+
+ var obj = this;
+ $(input).focusout(function(evt) {
+ obj.closeEditingTarget(evt);
+ });
+
+ $(input).keypress(function(evt) {
+ if (evt.which == 13) {
+ obj.closeEditingTarget(evt);
+ }
+ });
+ },
+
+ handleRemoveColumn: function(evt) {
+ var curTarget = evt.currentTarget;
+ // Should be the table
+ var row = curTarget.parentElement.parentElement;
+ $(row).remove();
+ },
+
+ closeEditingTarget: function(evt) {
+ var input = evt.currentTarget;
+ var text = $(input).val();
+ var parent = $(input).parent();
+ $(parent).empty();
+
+ var valueData = document.createElement("span");
+ $(valueData).addClass("spanValue");
+ $(valueData).text(text);
+
+ if ($(parent).hasClass("value")) {
+ var remove = document.createElement("div");
+ $(remove).addClass("pull-right").addClass('remove-btn');
+ var removeBtn = document.createElement("button");
+ $(removeBtn).attr('type', 'button');
+ $(removeBtn).addClass('btn').addClass('btn-xs').addClass('btn-danger');
+ $(removeBtn).text('Delete');
+ $(remove).append(removeBtn);
+ $(parent).append(remove);
+ }
+
+ $(parent).removeClass("editing");
+ $(parent).append(valueData);
+ }
});
var sideMenuDialogView;
azkaban.SideMenuDialogView = Backbone.View.extend({
- events: {
- "click .menu-header": "menuClick"
- },
-
- initialize: function(settings) {
- var children = $(this.el).children();
- for (var i = 0; i < children.length; ++i ) {
- var child = children[i];
- $(child).addClass("menu-header");
- var caption = $(child).find(".menu-caption");
- $(caption).hide();
- }
- this.menuSelect($("#flow-option"));
- },
-
- menuClick: function(evt) {
- this.menuSelect(evt.currentTarget);
- },
-
- menuSelect: function(target) {
- if ($(target).hasClass("active")) {
- return;
- }
-
- $(".side-panel").each(function() {
- $(this).hide();
- });
-
- $(".menu-header").each(function() {
- $(this).find(".menu-caption").slideUp("fast");
- $(this).removeClass("active");
- });
-
- $(target).addClass("active");
- $(target).find(".menu-caption").slideDown("fast");
- var panelName = $(target).attr("viewpanel");
- $("#" + panelName).show();
- }
+ events: {
+ "click .menu-header": "menuClick"
+ },
+
+ initialize: function(settings) {
+ var children = $(this.el).children();
+ for (var i = 0; i < children.length; ++i ) {
+ var child = children[i];
+ $(child).addClass("menu-header");
+ var caption = $(child).find(".menu-caption");
+ $(caption).hide();
+ }
+ this.menuSelect($("#flow-option"));
+ },
+
+ menuClick: function(evt) {
+ this.menuSelect(evt.currentTarget);
+ },
+
+ menuSelect: function(target) {
+ if ($(target).hasClass("active")) {
+ return;
+ }
+
+ $(".side-panel").each(function() {
+ $(this).hide();
+ });
+
+ $(".menu-header").each(function() {
+ $(this).find(".menu-caption").slideUp("fast");
+ $(this).removeClass("active");
+ });
+
+ $(target).addClass("active");
+ $(target).find(".menu-caption").slideDown("fast");
+ var panelName = $(target).attr("viewpanel");
+ $("#" + panelName).show();
+ }
});
var handleJobMenuClick = function(action, el, pos) {
- var jobid = el[0].jobid;
-
- var requestURL = contextURL + "/manager?project=" + projectName + "&flow=" + flowName + "&job=" + jobid;
- if (action == "open") {
- window.location.href = requestURL;
- }
- else if (action == "openwindow") {
- window.open(requestURL);
- }
+ var jobid = el[0].jobid;
+
+ var requestURL = contextURL + "/manager?project=" + projectName + "&flow=" + flowName + "&job=" + jobid;
+ if (action == "open") {
+ window.location.href = requestURL;
+ }
+ else if (action == "openwindow") {
+ window.open(requestURL);
+ }
}
var executableGraphModel;
@@ -489,226 +489,226 @@ var executableGraphModel;
* Disable jobs that need to be disabled
*/
var disableFinishedJobs = function(data) {
- for (var i = 0; i < data.nodes.length; ++i) {
- var node = data.nodes[i];
-
- if (node.status == "DISABLED" || node.status == "SKIPPED") {
- node.status = "READY";
- node.disabled = true;
- }
- else if (node.status == "SUCCEEDED" || node.status=="RUNNING") {
- node.disabled = true;
- }
- else if (node.status == "CANCELLED") {
- node.disabled = false;
- node.status="READY";
- }
- else {
- node.disabled = false;
- if (node.type == "flow") {
- disableFinishedJobs(node);
- }
- }
- }
+ for (var i = 0; i < data.nodes.length; ++i) {
+ var node = data.nodes[i];
+
+ if (node.status == "DISABLED" || node.status == "SKIPPED") {
+ node.status = "READY";
+ node.disabled = true;
+ }
+ else if (node.status == "SUCCEEDED" || node.status=="RUNNING") {
+ node.disabled = true;
+ }
+ else if (node.status == "CANCELLED") {
+ node.disabled = false;
+ node.status="READY";
+ }
+ else {
+ node.disabled = false;
+ if (node.type == "flow") {
+ disableFinishedJobs(node);
+ }
+ }
+ }
}
/**
* Enable all jobs. Recurse
*/
var enableAll = function() {
- recurseTree(executableGraphModel.get("data"), false, false);
- executableGraphModel.trigger("change:disabled");
+ recurseTree(executableGraphModel.get("data"), false, false);
+ executableGraphModel.trigger("change:disabled");
}
var disableAll = function() {
- recurseTree(executableGraphModel.get("data"), true, false);
- executableGraphModel.trigger("change:disabled");
+ recurseTree(executableGraphModel.get("data"), true, false);
+ executableGraphModel.trigger("change:disabled");
}
var recurseTree = function(data, disabled, recurse) {
- for (var i = 0; i < data.nodes.length; ++i) {
- var node = data.nodes[i];
- node.disabled = disabled;
-
- if (node.type == "flow" && recurse) {
- recurseTree(node, disabled);
- }
- }
+ for (var i = 0; i < data.nodes.length; ++i) {
+ var node = data.nodes[i];
+ node.disabled = disabled;
+
+ if (node.type == "flow" && recurse) {
+ recurseTree(node, disabled);
+ }
+ }
}
var touchNode = function(node, disable) {
- node.disabled = disable;
- executableGraphModel.trigger("change:disabled");
+ node.disabled = disable;
+ executableGraphModel.trigger("change:disabled");
}
var touchParents = function(node, disable) {
- var inNodes = node.inNodes;
+ var inNodes = node.inNodes;
- if (inNodes) {
- for (var key in inNodes) {
- inNodes[key].disabled = disable;
- }
- }
+ if (inNodes) {
+ for (var key in inNodes) {
+ inNodes[key].disabled = disable;
+ }
+ }
- executableGraphModel.trigger("change:disabled");
+ executableGraphModel.trigger("change:disabled");
}
var touchChildren = function(node, disable) {
- var outNodes = node.outNodes;
+ var outNodes = node.outNodes;
- if (outNodes) {
- for (var key in outNodes) {
- outNodes[key].disabled = disable;
- }
- }
+ if (outNodes) {
+ for (var key in outNodes) {
+ outNodes[key].disabled = disable;
+ }
+ }
- executableGraphModel.trigger("change:disabled");
+ executableGraphModel.trigger("change:disabled");
}
var touchAncestors = function(node, disable) {
- recurseAllAncestors(node, disable);
+ recurseAllAncestors(node, disable);
- executableGraphModel.trigger("change:disabled");
+ executableGraphModel.trigger("change:disabled");
}
var touchDescendents = function(node, disable) {
- recurseAllDescendents(node, disable);
+ recurseAllDescendents(node, disable);
- executableGraphModel.trigger("change:disabled");
+ executableGraphModel.trigger("change:disabled");
}
var gatherDisabledNodes = function(data) {
- var nodes = data.nodes;
- var disabled = [];
-
- for (var i = 0; i < nodes.length; ++i) {
- var node = nodes[i];
- if (node.disabled) {
- disabled.push(node.id);
- }
- else {
- if (node.type == "flow") {
- var array = gatherDisabledNodes(node);
- if (array && array.length > 0) {
- disabled.push({id: node.id, children: array});
- }
- }
- }
- }
-
- return disabled;
+ var nodes = data.nodes;
+ var disabled = [];
+
+ for (var i = 0; i < nodes.length; ++i) {
+ var node = nodes[i];
+ if (node.disabled) {
+ disabled.push(node.id);
+ }
+ else {
+ if (node.type == "flow") {
+ var array = gatherDisabledNodes(node);
+ if (array && array.length > 0) {
+ disabled.push({id: node.id, children: array});
+ }
+ }
+ }
+ }
+
+ return disabled;
}
function recurseAllAncestors(node, disable) {
- var inNodes = node.inNodes;
- if (inNodes) {
- for (var key in inNodes) {
- inNodes[key].disabled = disable;
- recurseAllAncestors(inNodes[key], disable);
- }
- }
+ var inNodes = node.inNodes;
+ if (inNodes) {
+ for (var key in inNodes) {
+ inNodes[key].disabled = disable;
+ recurseAllAncestors(inNodes[key], disable);
+ }
+ }
}
function recurseAllDescendents(node, disable) {
- var outNodes = node.outNodes;
- if (outNodes) {
- for (var key in outNodes) {
- outNodes[key].disabled = disable;
- recurseAllDescendents(outNodes[key], disable);
- }
- }
+ var outNodes = node.outNodes;
+ if (outNodes) {
+ for (var key in outNodes) {
+ outNodes[key].disabled = disable;
+ recurseAllDescendents(outNodes[key], disable);
+ }
+ }
}
var expanelNodeClickCallback = function(event, model, node) {
- console.log("Node clicked callback");
- var jobId = node.id;
- var flowId = executableGraphModel.get("flowId");
- var type = node.type;
-
- var menu;
- if (type == "flow") {
- var flowRequestURL = contextURL + "/manager?project=" + projectName + "&flow=" + node.flowId;
- if (node.expanded) {
- menu = [
- {title: "Collapse Flow...", callback: function() {model.trigger("collapseFlow", node);}},
- {title: "Open Flow in New Window...", callback: function() {window.open(flowRequestURL);}}
- ];
-
- }
- else {
- menu = [
- {title: "Expand Flow...", callback: function() {model.trigger("expandFlow", node);}},
- {title: "Open Flow in New Window...", callback: function() {window.open(flowRequestURL);}}
- ];
- }
- }
- else {
- var requestURL = contextURL + "/manager?project=" + projectName + "&flow=" + flowId + "&job=" + jobId;
- menu = [
- {title: "Open Job in New Window...", callback: function() {window.open(requestURL);}},
- ];
- }
-
- $.merge(menu, [
- {break: 1},
- {title: "Enable", callback: function() {touchNode(node, false);}, submenu: [
- {title: "Parents", callback: function(){touchParents(node, false);}},
- {title: "Ancestors", callback: function(){touchAncestors(node, false);}},
- {title: "Children", callback: function(){touchChildren(node, false);}},
- {title: "Descendents", callback: function(){touchDescendents(node, false);}},
- {title: "Enable All", callback: function(){enableAll();}}
- ]},
- {title: "Disable", callback: function() {touchNode(node, true)}, submenu: [
- {title: "Parents", callback: function(){touchParents(node, true);}},
- {title: "Ancestors", callback: function(){touchAncestors(node, true);}},
- {title: "Children", callback: function(){touchChildren(node, true);}},
- {title: "Descendents", callback: function(){touchDescendents(node, true);}},
- {title: "Disable All", callback: function(){disableAll();}}
- ]},
- {title: "Center Job", callback: function() {model.trigger("centerNode", node);}}
- ]);
-
-
- contextMenuView.show(event, menu);
+ console.log("Node clicked callback");
+ var jobId = node.id;
+ var flowId = executableGraphModel.get("flowId");
+ var type = node.type;
+
+ var menu;
+ if (type == "flow") {
+ var flowRequestURL = contextURL + "/manager?project=" + projectName + "&flow=" + node.flowId;
+ if (node.expanded) {
+ menu = [
+ {title: "Collapse Flow...", callback: function() {model.trigger("collapseFlow", node);}},
+ {title: "Open Flow in New Window...", callback: function() {window.open(flowRequestURL);}}
+ ];
+
+ }
+ else {
+ menu = [
+ {title: "Expand Flow...", callback: function() {model.trigger("expandFlow", node);}},
+ {title: "Open Flow in New Window...", callback: function() {window.open(flowRequestURL);}}
+ ];
+ }
+ }
+ else {
+ var requestURL = contextURL + "/manager?project=" + projectName + "&flow=" + flowId + "&job=" + jobId;
+ menu = [
+ {title: "Open Job in New Window...", callback: function() {window.open(requestURL);}},
+ ];
+ }
+
+ $.merge(menu, [
+ {break: 1},
+ {title: "Enable", callback: function() {touchNode(node, false);}, submenu: [
+ {title: "Parents", callback: function(){touchParents(node, false);}},
+ {title: "Ancestors", callback: function(){touchAncestors(node, false);}},
+ {title: "Children", callback: function(){touchChildren(node, false);}},
+ {title: "Descendents", callback: function(){touchDescendents(node, false);}},
+ {title: "Enable All", callback: function(){enableAll();}}
+ ]},
+ {title: "Disable", callback: function() {touchNode(node, true)}, submenu: [
+ {title: "Parents", callback: function(){touchParents(node, true);}},
+ {title: "Ancestors", callback: function(){touchAncestors(node, true);}},
+ {title: "Children", callback: function(){touchChildren(node, true);}},
+ {title: "Descendents", callback: function(){touchDescendents(node, true);}},
+ {title: "Disable All", callback: function(){disableAll();}}
+ ]},
+ {title: "Center Job", callback: function() {model.trigger("centerNode", node);}}
+ ]);
+
+
+ contextMenuView.show(event, menu);
}
var expanelEdgeClickCallback = function(event) {
- console.log("Edge clicked callback");
+ console.log("Edge clicked callback");
}
var expanelGraphClickCallback = function(event) {
- console.log("Graph clicked callback");
- var flowId = executableGraphModel.get("flowId");
- var requestURL = contextURL + "/manager?project=" + projectName + "&flow=" + flowId;
-
- var menu = [
- {title: "Open Flow in New Window...", callback: function() {window.open(requestURL);}},
- {break: 1},
- {title: "Enable All", callback: function() {enableAll();}},
- {title: "Disable All", callback: function() {disableAll();}},
- {break: 1},
- {title: "Center Graph", callback: function() {executableGraphModel.trigger("resetPanZoom");}}
- ];
-
- contextMenuView.show(event, menu);
+ console.log("Graph clicked callback");
+ var flowId = executableGraphModel.get("flowId");
+ var requestURL = contextURL + "/manager?project=" + projectName + "&flow=" + flowId;
+
+ var menu = [
+ {title: "Open Flow in New Window...", callback: function() {window.open(requestURL);}},
+ {break: 1},
+ {title: "Enable All", callback: function() {enableAll();}},
+ {title: "Disable All", callback: function() {disableAll();}},
+ {break: 1},
+ {title: "Center Graph", callback: function() {executableGraphModel.trigger("resetPanZoom");}}
+ ];
+
+ contextMenuView.show(event, menu);
}
var contextMenuView;
$(function() {
- executableGraphModel = new azkaban.GraphModel();
- flowExecuteDialogView = new azkaban.FlowExecuteDialogView({
- el: $('#execute-flow-panel'),
- model: executableGraphModel
- });
-
- sideMenuDialogView = new azkaban.SideMenuDialogView({
- el: $('#graph-options')
- });
- editTableView = new azkaban.EditTableView({
- el: $('#editTable')
- });
-
- contextMenuView = new azkaban.ContextMenuView({
- el: $('#contextMenu')
- });
+ executableGraphModel = new azkaban.GraphModel();
+ flowExecuteDialogView = new azkaban.FlowExecuteDialogView({
+ el: $('#execute-flow-panel'),
+ model: executableGraphModel
+ });
+
+ sideMenuDialogView = new azkaban.SideMenuDialogView({
+ el: $('#graph-options')
+ });
+ editTableView = new azkaban.EditTableView({
+ el: $('#editTable')
+ });
+
+ contextMenuView = new azkaban.ContextMenuView({
+ el: $('#contextMenu')
+ });
});
src/web/js/azkaban/view/flow-execution-list.js 736(+368 -368)
diff --git a/src/web/js/azkaban/view/flow-execution-list.js b/src/web/js/azkaban/view/flow-execution-list.js
index f99fc61..dff99b4 100644
--- a/src/web/js/azkaban/view/flow-execution-list.js
+++ b/src/web/js/azkaban/view/flow-execution-list.js
@@ -1,12 +1,12 @@
/*
* Copyright 2014 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -20,375 +20,375 @@
var executionListView;
azkaban.ExecutionListView = Backbone.View.extend({
- events: {
- //"contextmenu .flow-progress-bar": "handleProgressBoxClick"
- },
-
- initialize: function(settings) {
- this.model.bind('change:graph', this.renderJobs, this);
- this.model.bind('change:update', this.updateJobs, this);
-
- // This is for tabbing. Blah, hacky
- var executingBody = $("#executableBody")[0];
- executingBody.level = 0;
- },
-
- renderJobs: function(evt) {
- var data = this.model.get("data");
- var lastTime = data.endTime == -1 ? (new Date()).getTime() : data.endTime;
- var executingBody = $("#executableBody");
- this.updateJobRow(data.nodes, executingBody);
-
- var flowLastTime = data.endTime == -1 ? (new Date()).getTime() : data.endTime;
- var flowStartTime = data.startTime;
- this.updateProgressBar(data, flowStartTime, flowLastTime);
- },
+ events: {
+ //"contextmenu .flow-progress-bar": "handleProgressBoxClick"
+ },
+ initialize: function(settings) {
+ this.model.bind('change:graph', this.renderJobs, this);
+ this.model.bind('change:update', this.updateJobs, this);
+
+ // This is for tabbing. Blah, hacky
+ var executingBody = $("#executableBody")[0];
+ executingBody.level = 0;
+ },
+
+ renderJobs: function(evt) {
+ var data = this.model.get("data");
+ var lastTime = data.endTime == -1 ? (new Date()).getTime() : data.endTime;
+ var executingBody = $("#executableBody");
+ this.updateJobRow(data.nodes, executingBody);
+
+ var flowLastTime = data.endTime == -1 ? (new Date()).getTime() : data.endTime;
+ var flowStartTime = data.startTime;
+ this.updateProgressBar(data, flowStartTime, flowLastTime);
+ },
+
+//
+// handleProgressBoxClick: function(evt) {
+// var target = evt.currentTarget;
+// var job = target.job;
+// var attempt = target.attempt;
+//
+// var data = this.model.get("data");
+// var node = data.nodes[job];
+//
+// var jobId = event.currentTarget.jobid;
+// var requestURL = contextURL + "/manager?project=" + projectName + "&execid=" + execId + "&job=" + job + "&attempt=" + attempt;
+//
+// var menu = [
+// {title: "Open Job...", callback: function() {window.location.href=requestURL;}},
+// {title: "Open Job in New Window...", callback: function() {window.open(requestURL);}}
+// ];
//
-// handleProgressBoxClick: function(evt) {
-// var target = evt.currentTarget;
-// var job = target.job;
-// var attempt = target.attempt;
-//
-// var data = this.model.get("data");
-// var node = data.nodes[job];
-//
-// var jobId = event.currentTarget.jobid;
-// var requestURL = contextURL + "/manager?project=" + projectName + "&execid=" + execId + "&job=" + job + "&attempt=" + attempt;
-//
-// var menu = [
-// {title: "Open Job...", callback: function() {window.location.href=requestURL;}},
-// {title: "Open Job in New Window...", callback: function() {window.open(requestURL);}}
-// ];
-//
-// contextMenuView.show(evt, menu);
-// },
-
- updateJobs: function(evt) {
- var update = this.model.get("update");
- var lastTime = update.endTime == -1
- ? (new Date()).getTime()
- : update.endTime;
- var executingBody = $("#executableBody");
-
- if (update.nodes) {
- this.updateJobRow(update.nodes, executingBody);
- }
-
- var data = this.model.get("data");
- var flowLastTime = data.endTime == -1
- ? (new Date()).getTime()
- : data.endTime;
- var flowStartTime = data.startTime;
- this.updateProgressBar(data, flowStartTime, flowLastTime);
- },
-
- updateJobRow: function(nodes, body) {
- if (!nodes) {
- return;
- }
-
- nodes.sort(function(a,b) { return a.startTime - b.startTime; });
- for (var i = 0; i < nodes.length; ++i) {
- var node = nodes[i].changedNode ? nodes[i].changedNode : nodes[i];
-
- if (node.startTime < 0) {
- continue;
- }
- //var nodeId = node.id.replace(".", "\\\\.");
- var row = node.joblistrow;
- if (!row) {
- this.addNodeRow(node, body);
- }
-
- row = node.joblistrow;
- var statusDiv = $(row).find("> td.statustd > .status");
- statusDiv.text(statusStringMap[node.status]);
- $(statusDiv).attr("class", "status " + node.status);
-
- var startTimeTd = $(row).find("> td.startTime");
- var startdate = new Date(node.startTime);
- $(startTimeTd).text(getDateFormat(startdate));
-
- var endTimeTd = $(row).find("> td.endTime");
- if (node.endTime == -1) {
- $(endTimeTd).text("-");
- }
- else {
- var enddate = new Date(node.endTime);
- $(endTimeTd).text(getDateFormat(enddate));
- }
-
- var progressBar = $(row).find("> td.timeline > .flow-progress > .main-progress");
- if (!progressBar.hasClass(node.status)) {
- for (var j = 0; j < statusList.length; ++j) {
- var status = statusList[j];
- progressBar.removeClass(status);
- }
- progressBar.addClass(node.status);
- }
-
- // Create past attempts
- if (node.pastAttempts) {
- for (var a = 0; a < node.pastAttempts.length; ++a) {
- var attempt = node.pastAttempts[a];
- var attemptBox = attempt.attemptBox;
-
- if (!attemptBox) {
- var attemptBox = document.createElement("div");
- attempt.attemptBox = attemptBox;
-
- $(attemptBox).addClass("flow-progress-bar");
- $(attemptBox).addClass("attempt");
-
- $(attemptBox).css("float","left");
- $(attemptBox).bind("contextmenu", attemptRightClick);
-
- $(progressBar).before(attemptBox);
- attemptBox.job = node.id;
- attemptBox.attempt = a;
- }
- }
- }
-
- var elapsedTime = $(row).find("> td.elapsedTime");
- if (node.endTime == -1) {
- $(elapsedTime).text(getDuration(node.startTime, (new Date()).getTime()));
- }
- else {
- $(elapsedTime).text(getDuration(node.startTime, node.endTime));
- }
-
- if (node.nodes) {
- var subtableBody = $(row.subflowrow).find("> td > table");
- subtableBody[0].level = $(body)[0].level + 1;
- this.updateJobRow(node.nodes, subtableBody);
- }
- }
- },
-
- updateProgressBar: function(data, flowStartTime, flowLastTime) {
- if (data.startTime == -1) {
- return;
- }
-
- var outerWidth = $(".flow-progress").css("width");
- if (outerWidth) {
- if (outerWidth.substring(outerWidth.length - 2, outerWidth.length) == "px") {
- outerWidth = outerWidth.substring(0, outerWidth.length - 2);
- }
- outerWidth = parseInt(outerWidth);
- }
-
- var parentLastTime = data.endTime == -1 ? (new Date()).getTime() : data.endTime;
- var parentStartTime = data.startTime;
-
- var factor = outerWidth / (flowLastTime - flowStartTime);
- var outerProgressBarWidth = factor * (parentLastTime - parentStartTime);
- var outerLeftMargin = factor * (parentStartTime - flowStartTime);
-
- var nodes = data.nodes;
- for (var i = 0; i < nodes.length; ++i) {
- var node = nodes[i];
-
- // calculate the progress
- var tr = node.joblistrow;
- var outerProgressBar = $(tr).find("> td.timeline > .flow-progress");
- var progressBar = $(tr).find("> td.timeline > .flow-progress > .main-progress");
- var offsetLeft = 0;
- var minOffset = 0;
- progressBar.attempt = 0;
-
- // Shift the outer progress
- $(outerProgressBar).css("width", outerProgressBarWidth)
- $(outerProgressBar).css("margin-left", outerLeftMargin);
-
- // Add all the attempts
- if (node.pastAttempts) {
- var logURL = contextURL + "/executor?execid=" + execId + "&job=" + node.id + "&attempt=" + node.pastAttempts.length;
- var anchor = $(tr).find("> td.details > a");
- if (anchor.length != 0) {
- $(anchor).attr("href", logURL);
- progressBar.attempt = node.pastAttempts.length;
- }
-
- // Calculate the node attempt bars
- for (var p = 0; p < node.pastAttempts.length; ++p) {
- var pastAttempt = node.pastAttempts[p];
- var pastAttemptBox = pastAttempt.attemptBox;
-
- var left = (pastAttempt.startTime - flowStartTime)*factor;
- var width = Math.max((pastAttempt.endTime - pastAttempt.startTime)*factor, 3);
-
- var margin = left - offsetLeft;
- $(pastAttemptBox).css("margin-left", left - offsetLeft);
- $(pastAttemptBox).css("width", width);
-
- $(pastAttemptBox).attr("title", "attempt:" + p + " start:" + getHourMinSec(new Date(pastAttempt.startTime)) + " end:" + getHourMinSec(new Date(pastAttempt.endTime)));
- offsetLeft += width + margin;
- }
- }
-
- var nodeLastTime = node.endTime == -1 ? (new Date()).getTime() : node.endTime;
- var left = Math.max((node.startTime-parentStartTime)*factor, minOffset);
- var margin = left - offsetLeft;
- var width = Math.max((nodeLastTime - node.startTime)*factor, 3);
- width = Math.min(width, outerWidth);
-
- progressBar.css("margin-left", left)
- progressBar.css("width", width);
- progressBar.attr("title", "attempt:" + progressBar.attempt + " start:" + getHourMinSec(new Date(node.startTime)) + " end:" + getHourMinSec(new Date(node.endTime)));
-
- if (node.nodes) {
- this.updateProgressBar(node, flowStartTime, flowLastTime);
- }
- }
- },
-
- toggleExpandFlow: function(flow) {
- console.log("Toggle Expand");
- var tr = flow.joblistrow;
- var subFlowRow = tr.subflowrow;
- var expandIcon = $(tr).find("> td > .listExpand");
- if (tr.expanded) {
- tr.expanded = false;
- $(expandIcon).removeClass("glyphicon-chevron-up");
- $(expandIcon).addClass("glyphicon-chevron-down");
-
- $(tr).removeClass("expanded");
- $(subFlowRow).hide();
- }
- else {
- tr.expanded = true;
- $(expandIcon).addClass("glyphicon-chevron-up");
- $(expandIcon).removeClass("glyphicon-chevron-down");
- $(tr).addClass("expanded");
- $(subFlowRow).show();
- }
- },
-
- addNodeRow: function(node, body) {
- var self = this;
- var tr = document.createElement("tr");
- var tdName = document.createElement("td");
- var tdType = document.createElement("td");
- var tdTimeline = document.createElement("td");
- var tdStart = document.createElement("td");
- var tdEnd = document.createElement("td");
- var tdElapse = document.createElement("td");
- var tdStatus = document.createElement("td");
- var tdDetails = document.createElement("td");
- node.joblistrow = tr;
- tr.node = node;
- var padding = 15*$(body)[0].level;
-
- $(tr).append(tdName);
- $(tr).append(tdType);
- $(tr).append(tdTimeline);
- $(tr).append(tdStart);
- $(tr).append(tdEnd);
- $(tr).append(tdElapse);
- $(tr).append(tdStatus);
- $(tr).append(tdDetails);
- $(tr).addClass("jobListRow");
-
- $(tdName).addClass("jobname");
- $(tdType).addClass("jobtype");
- if (padding) {
- $(tdName).css("padding-left", padding);
- }
- $(tdTimeline).addClass("timeline");
- $(tdStart).addClass("startTime");
- $(tdEnd).addClass("endTime");
- $(tdElapse).addClass("elapsedTime");
- $(tdStatus).addClass("statustd");
- $(tdDetails).addClass("details");
-
- $(tdType).text(node.type);
-
- var outerProgressBar = document.createElement("div");
- //$(outerProgressBar).attr("id", node.id + "-outerprogressbar");
- $(outerProgressBar).addClass("flow-progress");
-
- var progressBox = document.createElement("div");
- progressBox.job = node.id;
- //$(progressBox).attr("id", node.id + "-progressbar");
- $(progressBox).addClass("flow-progress-bar");
- $(progressBox).addClass("main-progress");
- $(outerProgressBar).append(progressBox);
- $(tdTimeline).append(outerProgressBar);
-
- var requestURL = contextURL + "/manager?project=" + projectName + "&job=" + node.id + "&history";
- var a = document.createElement("a");
- $(a).attr("href", requestURL);
- $(a).text(node.id);
- $(tdName).append(a);
- if (node.type=="flow") {
- var expandIcon = document.createElement("div");
- $(expandIcon).addClass("listExpand");
- $(tdName).append(expandIcon);
- $(expandIcon).addClass("expandarrow glyphicon glyphicon-chevron-down");
- $(expandIcon).click(function(evt) {
- var parent = $(evt.currentTarget).parents("tr")[0];
- self.toggleExpandFlow(parent.node);
- });
- }
-
- var status = document.createElement("div");
- $(status).addClass("status");
- //$(status).attr("id", node.id + "-status-div");
- tdStatus.appendChild(status);
-
- var logURL = contextURL + "/executor?execid=" + execId + "&job=" + node.nestedId;
- if (node.attempt) {
- logURL += "&attempt=" + node.attempt;
- }
-
- if (node.type != 'flow' && node.status != 'SKIPPED') {
- var a = document.createElement("a");
- $(a).attr("href", logURL);
- //$(a).attr("id", node.id + "-log-link");
- $(a).text("Details");
- $(tdDetails).append(a);
- }
-
- $(body).append(tr);
- if (node.type == "flow") {
- var subFlowRow = document.createElement("tr");
- var subFlowCell = document.createElement("td");
- $(subFlowCell).addClass("subflowrow");
-
- var numColumn = $(tr).children("td").length;
- $(subFlowCell).attr("colspan", numColumn);
- tr.subflowrow = subFlowRow;
-
- $(subFlowRow).append(subFlowCell);
- $(body).append(subFlowRow);
- $(subFlowRow).hide();
- var subtable = document.createElement("table");
- var parentClasses = $(body).closest("table").attr("class");
-
- $(subtable).attr("class", parentClasses);
- $(subtable).addClass("subtable");
- $(subFlowCell).append(subtable);
- }
- }
+// contextMenuView.show(evt, menu);
+// },
+
+ updateJobs: function(evt) {
+ var update = this.model.get("update");
+ var lastTime = update.endTime == -1
+ ? (new Date()).getTime()
+ : update.endTime;
+ var executingBody = $("#executableBody");
+
+ if (update.nodes) {
+ this.updateJobRow(update.nodes, executingBody);
+ }
+
+ var data = this.model.get("data");
+ var flowLastTime = data.endTime == -1
+ ? (new Date()).getTime()
+ : data.endTime;
+ var flowStartTime = data.startTime;
+ this.updateProgressBar(data, flowStartTime, flowLastTime);
+ },
+
+ updateJobRow: function(nodes, body) {
+ if (!nodes) {
+ return;
+ }
+
+ nodes.sort(function(a,b) { return a.startTime - b.startTime; });
+ for (var i = 0; i < nodes.length; ++i) {
+ var node = nodes[i].changedNode ? nodes[i].changedNode : nodes[i];
+
+ if (node.startTime < 0) {
+ continue;
+ }
+ //var nodeId = node.id.replace(".", "\\\\.");
+ var row = node.joblistrow;
+ if (!row) {
+ this.addNodeRow(node, body);
+ }
+
+ row = node.joblistrow;
+ var statusDiv = $(row).find("> td.statustd > .status");
+ statusDiv.text(statusStringMap[node.status]);
+ $(statusDiv).attr("class", "status " + node.status);
+
+ var startTimeTd = $(row).find("> td.startTime");
+ var startdate = new Date(node.startTime);
+ $(startTimeTd).text(getDateFormat(startdate));
+
+ var endTimeTd = $(row).find("> td.endTime");
+ if (node.endTime == -1) {
+ $(endTimeTd).text("-");
+ }
+ else {
+ var enddate = new Date(node.endTime);
+ $(endTimeTd).text(getDateFormat(enddate));
+ }
+
+ var progressBar = $(row).find("> td.timeline > .flow-progress > .main-progress");
+ if (!progressBar.hasClass(node.status)) {
+ for (var j = 0; j < statusList.length; ++j) {
+ var status = statusList[j];
+ progressBar.removeClass(status);
+ }
+ progressBar.addClass(node.status);
+ }
+
+ // Create past attempts
+ if (node.pastAttempts) {
+ for (var a = 0; a < node.pastAttempts.length; ++a) {
+ var attempt = node.pastAttempts[a];
+ var attemptBox = attempt.attemptBox;
+
+ if (!attemptBox) {
+ var attemptBox = document.createElement("div");
+ attempt.attemptBox = attemptBox;
+
+ $(attemptBox).addClass("flow-progress-bar");
+ $(attemptBox).addClass("attempt");
+
+ $(attemptBox).css("float","left");
+ $(attemptBox).bind("contextmenu", attemptRightClick);
+
+ $(progressBar).before(attemptBox);
+ attemptBox.job = node.id;
+ attemptBox.attempt = a;
+ }
+ }
+ }
+
+ var elapsedTime = $(row).find("> td.elapsedTime");
+ if (node.endTime == -1) {
+ $(elapsedTime).text(getDuration(node.startTime, (new Date()).getTime()));
+ }
+ else {
+ $(elapsedTime).text(getDuration(node.startTime, node.endTime));
+ }
+
+ if (node.nodes) {
+ var subtableBody = $(row.subflowrow).find("> td > table");
+ subtableBody[0].level = $(body)[0].level + 1;
+ this.updateJobRow(node.nodes, subtableBody);
+ }
+ }
+ },
+
+ updateProgressBar: function(data, flowStartTime, flowLastTime) {
+ if (data.startTime == -1) {
+ return;
+ }
+
+ var outerWidth = $(".flow-progress").css("width");
+ if (outerWidth) {
+ if (outerWidth.substring(outerWidth.length - 2, outerWidth.length) == "px") {
+ outerWidth = outerWidth.substring(0, outerWidth.length - 2);
+ }
+ outerWidth = parseInt(outerWidth);
+ }
+
+ var parentLastTime = data.endTime == -1 ? (new Date()).getTime() : data.endTime;
+ var parentStartTime = data.startTime;
+
+ var factor = outerWidth / (flowLastTime - flowStartTime);
+ var outerProgressBarWidth = factor * (parentLastTime - parentStartTime);
+ var outerLeftMargin = factor * (parentStartTime - flowStartTime);
+
+ var nodes = data.nodes;
+ for (var i = 0; i < nodes.length; ++i) {
+ var node = nodes[i];
+
+ // calculate the progress
+ var tr = node.joblistrow;
+ var outerProgressBar = $(tr).find("> td.timeline > .flow-progress");
+ var progressBar = $(tr).find("> td.timeline > .flow-progress > .main-progress");
+ var offsetLeft = 0;
+ var minOffset = 0;
+ progressBar.attempt = 0;
+
+ // Shift the outer progress
+ $(outerProgressBar).css("width", outerProgressBarWidth)
+ $(outerProgressBar).css("margin-left", outerLeftMargin);
+
+ // Add all the attempts
+ if (node.pastAttempts) {
+ var logURL = contextURL + "/executor?execid=" + execId + "&job=" + node.id + "&attempt=" + node.pastAttempts.length;
+ var anchor = $(tr).find("> td.details > a");
+ if (anchor.length != 0) {
+ $(anchor).attr("href", logURL);
+ progressBar.attempt = node.pastAttempts.length;
+ }
+
+ // Calculate the node attempt bars
+ for (var p = 0; p < node.pastAttempts.length; ++p) {
+ var pastAttempt = node.pastAttempts[p];
+ var pastAttemptBox = pastAttempt.attemptBox;
+
+ var left = (pastAttempt.startTime - flowStartTime)*factor;
+ var width = Math.max((pastAttempt.endTime - pastAttempt.startTime)*factor, 3);
+
+ var margin = left - offsetLeft;
+ $(pastAttemptBox).css("margin-left", left - offsetLeft);
+ $(pastAttemptBox).css("width", width);
+
+ $(pastAttemptBox).attr("title", "attempt:" + p + " start:" + getHourMinSec(new Date(pastAttempt.startTime)) + " end:" + getHourMinSec(new Date(pastAttempt.endTime)));
+ offsetLeft += width + margin;
+ }
+ }
+
+ var nodeLastTime = node.endTime == -1 ? (new Date()).getTime() : node.endTime;
+ var left = Math.max((node.startTime-parentStartTime)*factor, minOffset);
+ var margin = left - offsetLeft;
+ var width = Math.max((nodeLastTime - node.startTime)*factor, 3);
+ width = Math.min(width, outerWidth);
+
+ progressBar.css("margin-left", left)
+ progressBar.css("width", width);
+ progressBar.attr("title", "attempt:" + progressBar.attempt + " start:" + getHourMinSec(new Date(node.startTime)) + " end:" + getHourMinSec(new Date(node.endTime)));
+
+ if (node.nodes) {
+ this.updateProgressBar(node, flowStartTime, flowLastTime);
+ }
+ }
+ },
+
+ toggleExpandFlow: function(flow) {
+ console.log("Toggle Expand");
+ var tr = flow.joblistrow;
+ var subFlowRow = tr.subflowrow;
+ var expandIcon = $(tr).find("> td > .listExpand");
+ if (tr.expanded) {
+ tr.expanded = false;
+ $(expandIcon).removeClass("glyphicon-chevron-up");
+ $(expandIcon).addClass("glyphicon-chevron-down");
+
+ $(tr).removeClass("expanded");
+ $(subFlowRow).hide();
+ }
+ else {
+ tr.expanded = true;
+ $(expandIcon).addClass("glyphicon-chevron-up");
+ $(expandIcon).removeClass("glyphicon-chevron-down");
+ $(tr).addClass("expanded");
+ $(subFlowRow).show();
+ }
+ },
+
+ addNodeRow: function(node, body) {
+ var self = this;
+ var tr = document.createElement("tr");
+ var tdName = document.createElement("td");
+ var tdType = document.createElement("td");
+ var tdTimeline = document.createElement("td");
+ var tdStart = document.createElement("td");
+ var tdEnd = document.createElement("td");
+ var tdElapse = document.createElement("td");
+ var tdStatus = document.createElement("td");
+ var tdDetails = document.createElement("td");
+ node.joblistrow = tr;
+ tr.node = node;
+ var padding = 15*$(body)[0].level;
+
+ $(tr).append(tdName);
+ $(tr).append(tdType);
+ $(tr).append(tdTimeline);
+ $(tr).append(tdStart);
+ $(tr).append(tdEnd);
+ $(tr).append(tdElapse);
+ $(tr).append(tdStatus);
+ $(tr).append(tdDetails);
+ $(tr).addClass("jobListRow");
+
+ $(tdName).addClass("jobname");
+ $(tdType).addClass("jobtype");
+ if (padding) {
+ $(tdName).css("padding-left", padding);
+ }
+ $(tdTimeline).addClass("timeline");
+ $(tdStart).addClass("startTime");
+ $(tdEnd).addClass("endTime");
+ $(tdElapse).addClass("elapsedTime");
+ $(tdStatus).addClass("statustd");
+ $(tdDetails).addClass("details");
+
+ $(tdType).text(node.type);
+
+ var outerProgressBar = document.createElement("div");
+ //$(outerProgressBar).attr("id", node.id + "-outerprogressbar");
+ $(outerProgressBar).addClass("flow-progress");
+
+ var progressBox = document.createElement("div");
+ progressBox.job = node.id;
+ //$(progressBox).attr("id", node.id + "-progressbar");
+ $(progressBox).addClass("flow-progress-bar");
+ $(progressBox).addClass("main-progress");
+ $(outerProgressBar).append(progressBox);
+ $(tdTimeline).append(outerProgressBar);
+
+ var requestURL = contextURL + "/manager?project=" + projectName + "&job=" + node.id + "&history";
+ var a = document.createElement("a");
+ $(a).attr("href", requestURL);
+ $(a).text(node.id);
+ $(tdName).append(a);
+ if (node.type=="flow") {
+ var expandIcon = document.createElement("div");
+ $(expandIcon).addClass("listExpand");
+ $(tdName).append(expandIcon);
+ $(expandIcon).addClass("expandarrow glyphicon glyphicon-chevron-down");
+ $(expandIcon).click(function(evt) {
+ var parent = $(evt.currentTarget).parents("tr")[0];
+ self.toggleExpandFlow(parent.node);
+ });
+ }
+
+ var status = document.createElement("div");
+ $(status).addClass("status");
+ //$(status).attr("id", node.id + "-status-div");
+ tdStatus.appendChild(status);
+
+ var logURL = contextURL + "/executor?execid=" + execId + "&job=" + node.nestedId;
+ if (node.attempt) {
+ logURL += "&attempt=" + node.attempt;
+ }
+
+ if (node.type != 'flow' && node.status != 'SKIPPED') {
+ var a = document.createElement("a");
+ $(a).attr("href", logURL);
+ //$(a).attr("id", node.id + "-log-link");
+ $(a).text("Details");
+ $(tdDetails).append(a);
+ }
+
+ $(body).append(tr);
+ if (node.type == "flow") {
+ var subFlowRow = document.createElement("tr");
+ var subFlowCell = document.createElement("td");
+ $(subFlowCell).addClass("subflowrow");
+
+ var numColumn = $(tr).children("td").length;
+ $(subFlowCell).attr("colspan", numColumn);
+ tr.subflowrow = subFlowRow;
+
+ $(subFlowRow).append(subFlowCell);
+ $(body).append(subFlowRow);
+ $(subFlowRow).hide();
+ var subtable = document.createElement("table");
+ var parentClasses = $(body).closest("table").attr("class");
+
+ $(subtable).attr("class", parentClasses);
+ $(subtable).addClass("subtable");
+ $(subFlowCell).append(subtable);
+ }
+ }
});
var attemptRightClick = function(event) {
- var target = event.currentTarget;
- var job = target.job;
- var attempt = target.attempt;
-
- var jobId = event.currentTarget.jobid;
- var requestURL = contextURL + "/executor?project=" + projectName + "&execid=" + execId + "&job=" + job + "&attempt=" + attempt;
-
- var menu = [
- {title: "Open Attempt Log...", callback: function() {window.location.href=requestURL;}},
- {title: "Open Attempt Log in New Window...", callback: function() {window.open(requestURL);}}
- ];
-
- contextMenuView.show(event, menu);
- return false;
+ var target = event.currentTarget;
+ var job = target.job;
+ var attempt = target.attempt;
+
+ var jobId = event.currentTarget.jobid;
+ var requestURL = contextURL + "/executor?project=" + projectName + "&execid=" + execId + "&job=" + job + "&attempt=" + attempt;
+
+ var menu = [
+ {title: "Open Attempt Log...", callback: function() {window.location.href=requestURL;}},
+ {title: "Open Attempt Log in New Window...", callback: function() {window.open(requestURL);}}
+ ];
+
+ contextMenuView.show(event, menu);
+ return false;
}
src/web/js/azkaban/view/flow-extended.js 122(+61 -61)
diff --git a/src/web/js/azkaban/view/flow-extended.js b/src/web/js/azkaban/view/flow-extended.js
index aa38d34..686d0a1 100644
--- a/src/web/js/azkaban/view/flow-extended.js
+++ b/src/web/js/azkaban/view/flow-extended.js
@@ -1,62 +1,62 @@
azkaban.FlowExtendedViewPanel = Backbone.View.extend({
- events: {
- "click .closeInfoPanel" : "handleClosePanel"
- },
- initialize: function(settings) {
- //this.model.bind('change:flowinfo', this.changeFlowInfo, this);
- $(this.el).show();
- $(this.el).draggable({cancel: ".dataContent", containment: "document"});
-
- this.render();
- $(this.el).hide();
- },
- showExtendedView: function(evt) {
- var event = evt;
-
- $(this.el).css({top: evt.pageY, left: evt.pageX});
- $(this.el).show();
- },
- render: function(self) {
- console.log("Changing title");
- $(this.el).find(".nodeId").text(this.model.get("id"));
- $(this.el).find(".nodeType").text(this.model.get("type"));
-
- var props = this.model.get("props");
- var tableBody = $(this.el).find(".dataPropertiesBody");
-
- for (var key in props) {
- var tr = document.createElement("tr");
- var tdKey = document.createElement("td");
- var tdValue = document.createElement("td");
-
- $(tdKey).text(key);
- $(tdValue).text(props[key]);
-
- $(tr).append(tdKey);
- $(tr).append(tdValue);
-
- $(tableBody).append(tr);
-
- var propsTable = $(this.el).find(".dataJobProperties");
- $(propsTable).resizable({handler: "s"});
- }
-
- if (this.model.get("type") == "flow") {
- var svgns = "http://www.w3.org/2000/svg";
- var svgDataFlow = $(this.el).find(".dataFlow");
-
- var svgGraph = document.createElementNS(svgns, "svg");
- $(svgGraph).attr("class", "svgTiny");
- $(svgDataFlow).append(svgGraph);
- $(svgDataFlow).resizable();
-
- this.graphView = new azkaban.SvgGraphView({el: svgDataFlow, model: this.model, render: true, rightClick: { "node": nodeClickCallback, "graph": graphClickCallback }})
- }
- else {
- $(this.el).find(".dataFlow").hide();
- }
- },
- handleClosePanel: function(self) {
- $(this.el).hide();
- }
-});
\ No newline at end of file
+ events: {
+ "click .closeInfoPanel" : "handleClosePanel"
+ },
+ initialize: function(settings) {
+ //this.model.bind('change:flowinfo', this.changeFlowInfo, this);
+ $(this.el).show();
+ $(this.el).draggable({cancel: ".dataContent", containment: "document"});
+
+ this.render();
+ $(this.el).hide();
+ },
+ showExtendedView: function(evt) {
+ var event = evt;
+
+ $(this.el).css({top: evt.pageY, left: evt.pageX});
+ $(this.el).show();
+ },
+ render: function(self) {
+ console.log("Changing title");
+ $(this.el).find(".nodeId").text(this.model.get("id"));
+ $(this.el).find(".nodeType").text(this.model.get("type"));
+
+ var props = this.model.get("props");
+ var tableBody = $(this.el).find(".dataPropertiesBody");
+
+ for (var key in props) {
+ var tr = document.createElement("tr");
+ var tdKey = document.createElement("td");
+ var tdValue = document.createElement("td");
+
+ $(tdKey).text(key);
+ $(tdValue).text(props[key]);
+
+ $(tr).append(tdKey);
+ $(tr).append(tdValue);
+
+ $(tableBody).append(tr);
+
+ var propsTable = $(this.el).find(".dataJobProperties");
+ $(propsTable).resizable({handler: "s"});
+ }
+
+ if (this.model.get("type") == "flow") {
+ var svgns = "http://www.w3.org/2000/svg";
+ var svgDataFlow = $(this.el).find(".dataFlow");
+
+ var svgGraph = document.createElementNS(svgns, "svg");
+ $(svgGraph).attr("class", "svgTiny");
+ $(svgDataFlow).append(svgGraph);
+ $(svgDataFlow).resizable();
+
+ this.graphView = new azkaban.SvgGraphView({el: svgDataFlow, model: this.model, render: true, rightClick: { "node": nodeClickCallback, "graph": graphClickCallback }})
+ }
+ else {
+ $(this.el).find(".dataFlow").hide();
+ }
+ },
+ handleClosePanel: function(self) {
+ $(this.el).hide();
+ }
+});
diff --git a/src/web/js/azkaban/view/flow-stats.js b/src/web/js/azkaban/view/flow-stats.js
index d991a4c..a77c3f8 100644
--- a/src/web/js/azkaban/view/flow-stats.js
+++ b/src/web/js/azkaban/view/flow-stats.js
@@ -23,9 +23,9 @@ azkaban.FlowStatsView = Backbone.View.extend({
histogram: true,
- initialize: function(settings) {
- this.model.bind('change:view', this.handleChangeView, this);
- this.model.bind('render', this.render, this);
+ initialize: function(settings) {
+ this.model.bind('change:view', this.handleChangeView, this);
+ this.model.bind('render', this.render, this);
if (settings.histogram != null) {
this.histogram = settings.histogram;
}
@@ -316,7 +316,7 @@ azkaban.FlowStatsView = Backbone.View.extend({
this.model.trigger('render');
},
- render: function(evt) {
+ render: function(evt) {
var view = this;
var data = this.model.get('data');
if (data == null) {
src/web/js/azkaban/view/history.js 100(+50 -50)
diff --git a/src/web/js/azkaban/view/history.js b/src/web/js/azkaban/view/history.js
index a381e74..11401b1 100644
--- a/src/web/js/azkaban/view/history.js
+++ b/src/web/js/azkaban/view/history.js
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -18,62 +18,62 @@ $.namespace('azkaban');
var advFilterView;
azkaban.AdvFilterView = Backbone.View.extend({
- events: {
- "click #filter-btn": "handleAdvFilter"
- },
-
- initialize: function(settings) {
- $('#datetimebegin').datetimepicker();
- $('#datetimeend').datetimepicker();
- $('#datetimebegin').on('change.dp', function(e) {
+ events: {
+ "click #filter-btn": "handleAdvFilter"
+ },
+
+ initialize: function(settings) {
+ $('#datetimebegin').datetimepicker();
+ $('#datetimeend').datetimepicker();
+ $('#datetimebegin').on('change.dp', function(e) {
$('#datetimeend').data('DateTimePicker').setStartDate(e.date);
});
- $('#datetimeend').on('change.dp', function(e) {
+ $('#datetimeend').on('change.dp', function(e) {
$('#datetimebegin').data('DateTimePicker').setEndDate(e.date);
});
- $('#adv-filter-error-msg').hide();
- },
-
- handleAdvFilter: function(evt) {
- console.log("handleAdv");
- var projcontain = $('#projcontain').val();
- var flowcontain = $('#flowcontain').val();
- var usercontain = $('#usercontain').val();
- var status = $('#status').val();
- var begin = $('#datetimebegin').val();
- var end = $('#datetimeend').val();
-
- console.log("filtering history");
+ $('#adv-filter-error-msg').hide();
+ },
+
+ handleAdvFilter: function(evt) {
+ console.log("handleAdv");
+ var projcontain = $('#projcontain').val();
+ var flowcontain = $('#flowcontain').val();
+ var usercontain = $('#usercontain').val();
+ var status = $('#status').val();
+ var begin = $('#datetimebegin').val();
+ var end = $('#datetimeend').val();
+
+ console.log("filtering history");
- var historyURL = contextURL + "/history"
- var redirectURL = contextURL + "/schedule"
+ var historyURL = contextURL + "/history"
+ var redirectURL = contextURL + "/schedule"
- var requestURL = historyURL + "?advfilter=true" + "&projcontain=" + projcontain + "&flowcontain=" + flowcontain + "&usercontain=" + usercontain + "&status=" + status + "&begin=" + begin + "&end=" + end ;
- window.location = requestURL;
+ var requestURL = historyURL + "?advfilter=true" + "&projcontain=" + projcontain + "&flowcontain=" + flowcontain + "&usercontain=" + usercontain + "&status=" + status + "&begin=" + begin + "&end=" + end ;
+ window.location = requestURL;
- /*
- var requestData = {
- "action": "advfilter",
- "projre": projre,
- "flowre": flowre,
- "userre": userre
- };
- var successHandler = function(data) {
- if (data.action == "redirect") {
- window.location = data.redirect;
- }
- };
- $.get(historyURL, requestData, successHandler, "json");
- */
- },
+ /*
+ var requestData = {
+ "action": "advfilter",
+ "projre": projre,
+ "flowre": flowre,
+ "userre": userre
+ };
+ var successHandler = function(data) {
+ if (data.action == "redirect") {
+ window.location = data.redirect;
+ }
+ };
+ $.get(historyURL, requestData, successHandler, "json");
+ */
+ },
- render: function() {
- }
+ render: function() {
+ }
});
$(function() {
- filterView = new azkaban.AdvFilterView({el: $('#adv-filter')});
- $('#adv-filter-btn').click( function() {
- $('#adv-filter').modal();
- });
+ filterView = new azkaban.AdvFilterView({el: $('#adv-filter')});
+ $('#adv-filter-btn').click( function() {
+ $('#adv-filter').modal();
+ });
});
src/web/js/azkaban/view/history-day.js 159(+78 -81)
diff --git a/src/web/js/azkaban/view/history-day.js b/src/web/js/azkaban/view/history-day.js
index fba1c6c..732028b 100644
--- a/src/web/js/azkaban/view/history-day.js
+++ b/src/web/js/azkaban/view/history-day.js
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -21,48 +21,46 @@ azkaban.DayDataModel = Backbone.Model.extend({});
var dayByDayView;
azkaban.DayByDayView = Backbone.View.extend({
- events: {
- },
- initialize: function(settings) {
- this.svgns = "http://www.w3.org/2000/svg";
- this.svg = $(this.el).find('svg')[0];
- this.columnDayWidth = 100;
- this.columnHourHeight = 50;
- this.columnHeight = 50*24;
-
- this.render(this);
- },http://documentcloud.github.com/backbone/#Events-trigger
- prepareData: function(self) {
- var response = model.get("data");
- var start = data.start;
- var end = data.end;
- var data = data.data;
-
- var daysData = {};
-
- var startDate = new Date(start);
-
- while (startDate.getTime() < end) {
- daysData[startDate.getTime()] = new Array();
- startDate.setDate(startDate.getDate() + 1);
- }
-
- for (var i = 0; i < data.length; ++i) {
- var flow = data[i];
-
- }
- },
- render: function(self) {
- var svg = self.svg;
- var svgns = self.svgns;
- var width = $(svg).width();
- var height = $(svg).height();
-
- var mainG = document.createElementNS(this.svgns, 'g');
- $(svg).append(mainG);
-
-
- }
+ events: {
+ },
+ initialize: function(settings) {
+ this.svgns = "http://www.w3.org/2000/svg";
+ this.svg = $(this.el).find('svg')[0];
+ this.columnDayWidth = 100;
+ this.columnHourHeight = 50;
+ this.columnHeight = 50*24;
+
+ this.render(this);
+ },
+ prepareData: function(self) {
+ var response = model.get("data");
+ var start = data.start;
+ var end = data.end;
+ var data = data.data;
+
+ var daysData = {};
+
+ var startDate = new Date(start);
+
+ while (startDate.getTime() < end) {
+ daysData[startDate.getTime()] = new Array();
+ startDate.setDate(startDate.getDate() + 1);
+ }
+
+ for (var i = 0; i < data.length; ++i) {
+ var flow = data[i];
+
+ }
+ },
+ render: function(self) {
+ var svg = self.svg;
+ var svgns = self.svgns;
+ var width = $(svg).width();
+ var height = $(svg).height();
+
+ var mainG = document.createElementNS(this.svgns, 'g');
+ $(svg).append(mainG);
+ }
});
var showDialog = function(title, message) {
@@ -70,44 +68,43 @@ var showDialog = function(title, message) {
$('#messageBox').text(message);
$('#messageDialog').modal({
- closeHTML: "<a href='#' title='Close' class='modal-close'>x</a>",
- position: ["20%",],
- containerId: 'confirm-container',
- containerCss: {
- 'height': '220px',
- 'width': '565px'
- },
- onShow: function (dialog) {
- }
- });
+ closeHTML: "<a href='#' title='Close' class='modal-close'>x</a>",
+ position: ["20%",],
+ containerId: 'confirm-container',
+ containerCss: {
+ 'height': '220px',
+ 'width': '565px'
+ },
+ onShow: function (dialog) {
+ }
+ });
}
$(function() {
- var requestURL = contextURL + "/history";
-
- var start = new Date();
- start.setHours(0);
- start.setMinutes(0);
- start.setSeconds(0);
- start.setMilliseconds(0);
- var end = new Date(start);
-
- start.setDate(start.getDate() - 7);
- console.log(start.getTime());
-
- end.setDate(end.getDate() + 1);
- console.log(end.getTime());
-
- dayDataModel = new azkaban.DayDataModel();
- dayByDayView = new azkaban.DayByDayView({el:$('#dayByDayPanel'), model: dayDataModel});
-
- $.get(
- requestURL,
- {"ajax":"fetch", "start": start.getTime(), "end": end.getTime()},
- function(data) {
- dayDataModel.set({data:data});
- },
- "json"
- );
+ var requestURL = contextURL + "/history";
+
+ var start = new Date();
+ start.setHours(0);
+ start.setMinutes(0);
+ start.setSeconds(0);
+ start.setMilliseconds(0);
+ var end = new Date(start);
+
+ start.setDate(start.getDate() - 7);
+ console.log(start.getTime());
+
+ end.setDate(end.getDate() + 1);
+ console.log(end.getTime());
+
+ dayDataModel = new azkaban.DayDataModel();
+ dayByDayView = new azkaban.DayByDayView({el:$('#dayByDayPanel'), model: dayDataModel});
+
+ $.get(
+ requestURL,
+ {"ajax":"fetch", "start": start.getTime(), "end": end.getTime()},
+ function(data) {
+ dayDataModel.set({data:data});
+ },
+ "json");
});
src/web/js/azkaban/view/jmx.js 252(+126 -126)
diff --git a/src/web/js/azkaban/view/jmx.js b/src/web/js/azkaban/view/jmx.js
index 3ec990a..8f12f1e 100644
--- a/src/web/js/azkaban/view/jmx.js
+++ b/src/web/js/azkaban/view/jmx.js
@@ -18,135 +18,135 @@ $.namespace('azkaban');
var jmxTableView;
azkaban.JMXTableView = Backbone.View.extend({
- events: {
- "click .query-btn": "queryJMX",
- "click .collapse-btn": "collapseRow"
- },
-
- initialize: function(settings) {
- },
-
- formatValue: function(value) {
- if (String(value).length != TIMESTAMP_LENGTH) {
- return value;
- }
- if (isNaN(parseInt(value))) {
- return value;
- }
- var date = new Date(value);
- if (date.getTime() <= 0) {
- return value;
- }
- return value + " (" + date.toISOString() + ")";
- },
-
- queryJMX: function(evt) {
- var target = evt.currentTarget;
- var id = target.id;
-
- var childID = id + "-child";
- var tbody = id + "-tbody";
-
- var requestURL = contextURL + "/jmx";
- var canonicalName=$(target).attr("domain") + ":name=" + $(target).attr("name");
-
- var data = {
- "ajax": "getAllMBeanAttributes",
- "mBean": canonicalName
- };
- if ($(target).attr("hostPort")) {
- data.ajax = "getAllExecutorAttributes";
- data.hostPort = $(target).attr("hostPort");
- }
- var view = this;
- var successHandler = function(data) {
- var table = $('#' + tbody);
- $(table).empty();
-
- for (var key in data.attributes) {
- var value = data.attributes[key];
-
- var tr = document.createElement("tr");
- var tdName = document.createElement("td");
- var tdVal = document.createElement("td");
-
- $(tdName).addClass('property-key');
- $(tdName).text(key);
-
- value = view.formatValue(value);
- $(tdVal).text(value);
-
- $(tr).append(tdName);
- $(tr).append(tdVal);
-
- $('#' + tbody).append(tr);
- }
-
- var child = $("#" + childID);
- $(child).fadeIn();
- };
- $.get(requestURL, data, successHandler);
- },
-
- queryRemote: function(evt) {
- var target = evt.currentTarget;
- var id = target.id;
-
- var childID = id + "-child";
- var tbody = id + "-tbody";
-
- var requestURL = contextURL + "/jmx";
- var canonicalName = $(target).attr("domain") + ":name=" + $(target).attr("name");
- var hostPort = $(target).attr("hostport");
- var requestData = {
- "ajax": "getAllExecutorAttributes",
- "mBean": canonicalName,
- "hostPort": hostPort
- };
- var view = this;
- var successHandler = function(data) {
- var table = $('#' + tbody);
- $(table).empty();
-
- for (var key in data.attributes) {
- var value = data.attributes[key];
-
- var tr = document.createElement("tr");
- var tdName = document.createElement("td");
- var tdVal = document.createElement("td");
-
- $(tdName).addClass('property-key');
- $(tdName).text(key);
-
- value = view.formatValue(value);
- $(tdVal).text(value);
-
- $(tr).append(tdName);
- $(tr).append(tdVal);
-
- $('#' + tbody).append(tr);
- }
-
- var child = $("#" + childID);
- $(child).fadeIn();
- };
- $.get(requestURL, requestData, successHandler);
- },
-
- collapseRow: function(evt) {
- $(evt.currentTarget).parent().parent().fadeOut();
- },
-
- render: function() {
- }
+ events: {
+ "click .query-btn": "queryJMX",
+ "click .collapse-btn": "collapseRow"
+ },
+
+ initialize: function(settings) {
+ },
+
+ formatValue: function(value) {
+ if (String(value).length != TIMESTAMP_LENGTH) {
+ return value;
+ }
+ if (isNaN(parseInt(value))) {
+ return value;
+ }
+ var date = new Date(value);
+ if (date.getTime() <= 0) {
+ return value;
+ }
+ return value + " (" + date.toISOString() + ")";
+ },
+
+ queryJMX: function(evt) {
+ var target = evt.currentTarget;
+ var id = target.id;
+
+ var childID = id + "-child";
+ var tbody = id + "-tbody";
+
+ var requestURL = contextURL + "/jmx";
+ var canonicalName=$(target).attr("domain") + ":name=" + $(target).attr("name");
+
+ var data = {
+ "ajax": "getAllMBeanAttributes",
+ "mBean": canonicalName
+ };
+ if ($(target).attr("hostPort")) {
+ data.ajax = "getAllExecutorAttributes";
+ data.hostPort = $(target).attr("hostPort");
+ }
+ var view = this;
+ var successHandler = function(data) {
+ var table = $('#' + tbody);
+ $(table).empty();
+
+ for (var key in data.attributes) {
+ var value = data.attributes[key];
+
+ var tr = document.createElement("tr");
+ var tdName = document.createElement("td");
+ var tdVal = document.createElement("td");
+
+ $(tdName).addClass('property-key');
+ $(tdName).text(key);
+
+ value = view.formatValue(value);
+ $(tdVal).text(value);
+
+ $(tr).append(tdName);
+ $(tr).append(tdVal);
+
+ $('#' + tbody).append(tr);
+ }
+
+ var child = $("#" + childID);
+ $(child).fadeIn();
+ };
+ $.get(requestURL, data, successHandler);
+ },
+
+ queryRemote: function(evt) {
+ var target = evt.currentTarget;
+ var id = target.id;
+
+ var childID = id + "-child";
+ var tbody = id + "-tbody";
+
+ var requestURL = contextURL + "/jmx";
+ var canonicalName = $(target).attr("domain") + ":name=" + $(target).attr("name");
+ var hostPort = $(target).attr("hostport");
+ var requestData = {
+ "ajax": "getAllExecutorAttributes",
+ "mBean": canonicalName,
+ "hostPort": hostPort
+ };
+ var view = this;
+ var successHandler = function(data) {
+ var table = $('#' + tbody);
+ $(table).empty();
+
+ for (var key in data.attributes) {
+ var value = data.attributes[key];
+
+ var tr = document.createElement("tr");
+ var tdName = document.createElement("td");
+ var tdVal = document.createElement("td");
+
+ $(tdName).addClass('property-key');
+ $(tdName).text(key);
+
+ value = view.formatValue(value);
+ $(tdVal).text(value);
+
+ $(tr).append(tdName);
+ $(tr).append(tdVal);
+
+ $('#' + tbody).append(tr);
+ }
+
+ var child = $("#" + childID);
+ $(child).fadeIn();
+ };
+ $.get(requestURL, requestData, successHandler);
+ },
+
+ collapseRow: function(evt) {
+ $(evt.currentTarget).parent().parent().fadeOut();
+ },
+
+ render: function() {
+ }
});
var remoteTables = new Array();
$(function() {
- jmxTableView = new azkaban.JMXTableView({el:$('#all-jmx')});
+ jmxTableView = new azkaban.JMXTableView({el:$('#all-jmx')});
- $(".remoteJMX").each(function(item) {
- var newTableView = new azkaban.JMXTableView({el:$(this)});
- remoteTables.push(newTableView);
- });
+ $(".remoteJMX").each(function(item) {
+ var newTableView = new azkaban.JMXTableView({el:$(this)});
+ remoteTables.push(newTableView);
+ });
});
src/web/js/azkaban/view/job-details.js 64(+32 -32)
diff --git a/src/web/js/azkaban/view/job-details.js b/src/web/js/azkaban/view/job-details.js
index 7620b5a..8063797 100644
--- a/src/web/js/azkaban/view/job-details.js
+++ b/src/web/js/azkaban/view/job-details.js
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -18,47 +18,47 @@ $.namespace('azkaban');
var jobLogView;
azkaban.JobLogView = Backbone.View.extend({
- events: {
- "click #updateLogBtn" : "refresh"
- },
+ events: {
+ "click #updateLogBtn" : "refresh"
+ },
- initialize: function() {
- this.listenTo(this.model, "change:logData", this.render);
- },
+ initialize: function() {
+ this.listenTo(this.model, "change:logData", this.render);
+ },
- refresh: function() {
- this.model.refresh();
- },
+ refresh: function() {
+ this.model.refresh();
+ },
- render: function() {
- var re = /(https?:\/\/(([-\w\.]+)+(:\d+)?(\/([\w/_\.]*(\?\S+)?)?)?))/g;
- var log = this.model.get("logData");
- log = log.replace(re, "<a href=\"$1\" title=\"\">$1</a>");
- $("#logSection").html(log);
- }
+ render: function() {
+ var re = /(https?:\/\/(([-\w\.]+)+(:\d+)?(\/([\w/_\.]*(\?\S+)?)?)?))/g;
+ var log = this.model.get("logData");
+ log = log.replace(re, "<a href=\"$1\" title=\"\">$1</a>");
+ $("#logSection").html(log);
+ }
});
var showDialog = function(title, message) {
$('#messageTitle').text(title);
$('#messageBox').text(message);
$('#messageDialog').modal({
- closeHTML: "<a href='#' title='Close' class='modal-close'>x</a>",
- position: ["20%",],
- containerId: 'confirm-container',
- containerCss: {
- 'height': '220px',
- 'width': '565px'
- },
- onShow: function (dialog) {
- }
- });
+ closeHTML: "<a href='#' title='Close' class='modal-close'>x</a>",
+ position: ["20%",],
+ containerId: 'confirm-container',
+ containerCss: {
+ 'height': '220px',
+ 'width': '565px'
+ },
+ onShow: function (dialog) {
+ }
+ });
}
$(function() {
var jobLogModel = new azkaban.JobLogModel();
- jobLogView = new azkaban.JobLogView({
- el: $('#jobLogView'),
- model: jobLogModel
- });
+ jobLogView = new azkaban.JobLogView({
+ el: $('#jobLogView'),
+ model: jobLogModel
+ });
jobLogModel.refresh();
});
src/web/js/azkaban/view/job-edit.js 438(+219 -219)
diff --git a/src/web/js/azkaban/view/job-edit.js b/src/web/js/azkaban/view/job-edit.js
index c36ccfb..073515e 100644
--- a/src/web/js/azkaban/view/job-edit.js
+++ b/src/web/js/azkaban/view/job-edit.js
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -18,143 +18,143 @@ $.namespace('azkaban');
var jobEditView;
azkaban.JobEditView = Backbone.View.extend({
- events : {
- "click" : "closeEditingTarget",
- "click #set-btn": "handleSet",
- "click #cancel-btn": "handleCancel",
- "click #close-btn": "handleCancel",
- "click #add-btn": "handleAddRow",
- "click table .editable": "handleEditColumn",
- "click table .remove-btn": "handleRemoveColumn"
- },
-
- initialize: function(setting) {
- this.projectURL = contextURL + "manager"
- this.generalParams = {}
- this.overrideParams = {}
- },
-
- handleCancel: function(evt) {
- $('#job-edit-pane').hide();
- var tbl = document.getElementById("generalProps").tBodies[0];
- var rows = tbl.rows;
- var len = rows.length;
- for (var i = 0; i < len-1; i++) {
- tbl.deleteRow(0);
- }
- },
-
- show: function(projectName, flowName, jobName) {
- this.projectName = projectName;
- this.flowName = flowName;
- this.jobName = jobName;
-
- var projectURL = this.projectURL
-
- $('#job-edit-pane').modal();
-
- var handleAddRow = this.handleAddRow;
-
- /*var overrideParams;
- var generalParams;
- this.overrideParams = overrideParams;
- this.generalParams = generalParams;*/
- var fetchJobInfo = {
- "project": this.projectName,
- "ajax": "fetchJobInfo",
- "flowName": this.flowName,
- "jobName": this.jobName
- };
- var mythis = this;
- var fetchJobSuccessHandler = function(data) {
- if (data.error) {
- alert(data.error);
- return;
- }
- document.getElementById('jobName').innerHTML = data.jobName;
- document.getElementById('jobType').innerHTML = data.jobType;
- var generalParams = data.generalParams;
- var overrideParams = data.overrideParams;
-
- /*for (var key in generalParams) {
- var row = handleAddRow();
- var td = $(row).find('span');
- $(td[1]).text(key);
- $(td[2]).text(generalParams[key]);
- }*/
-
- mythis.overrideParams = overrideParams;
- mythis.generalParams = generalParams;
-
- for (var okey in overrideParams) {
- if (okey != 'type' && okey != 'dependencies') {
- var row = handleAddRow();
- var td = $(row).find('span');
- $(td[0]).text(okey);
- $(td[1]).text(overrideParams[okey]);
- }
- }
- };
-
- $.get(projectURL, fetchJobInfo, fetchJobSuccessHandler, "json");
- },
-
- handleSet: function(evt) {
- this.closeEditingTarget(evt);
- var jobOverride = {};
- var editRows = $(".editRow");
- for (var i = 0; i < editRows.length; ++i) {
- var row = editRows[i];
- var td = $(row).find('span');
- var key = $(td[0]).text();
- var val = $(td[1]).text();
-
- if (key && key.length > 0) {
- jobOverride[key] = val;
- }
- }
-
- var overrideParams = this.overrideParams
- var generalParams = this.generalParams
-
- jobOverride['type'] = overrideParams['type']
- if ('dependencies' in overrideParams) {
- jobOverride['dependencies'] = overrideParams['dependencies']
- }
-
- var project = this.projectName
- var flowName = this.flowName
- var jobName = this.jobName
-
- var jobOverrideData = {
- project: project,
- flowName: flowName,
- jobName: jobName,
- ajax: "setJobOverrideProperty",
- jobOverride: jobOverride
- };
-
- var projectURL = this.projectURL
- var redirectURL = projectURL+'?project='+project+'&flow='+flowName+'&job='+jobName;
- var jobOverrideSuccessHandler = function(data) {
- if (data.error) {
- alert(data.error);
- }
- else {
- window.location = redirectURL;
- }
- };
-
- $.get(projectURL, jobOverrideData, jobOverrideSuccessHandler, "json");
- },
-
- handleAddRow: function(evt) {
- var tr = document.createElement("tr");
- var tdName = document.createElement("td");
+ events : {
+ "click" : "closeEditingTarget",
+ "click #set-btn": "handleSet",
+ "click #cancel-btn": "handleCancel",
+ "click #close-btn": "handleCancel",
+ "click #add-btn": "handleAddRow",
+ "click table .editable": "handleEditColumn",
+ "click table .remove-btn": "handleRemoveColumn"
+ },
+
+ initialize: function(setting) {
+ this.projectURL = contextURL + "manager"
+ this.generalParams = {}
+ this.overrideParams = {}
+ },
+
+ handleCancel: function(evt) {
+ $('#job-edit-pane').hide();
+ var tbl = document.getElementById("generalProps").tBodies[0];
+ var rows = tbl.rows;
+ var len = rows.length;
+ for (var i = 0; i < len-1; i++) {
+ tbl.deleteRow(0);
+ }
+ },
+
+ show: function(projectName, flowName, jobName) {
+ this.projectName = projectName;
+ this.flowName = flowName;
+ this.jobName = jobName;
+
+ var projectURL = this.projectURL
+
+ $('#job-edit-pane').modal();
+
+ var handleAddRow = this.handleAddRow;
+
+ /*var overrideParams;
+ var generalParams;
+ this.overrideParams = overrideParams;
+ this.generalParams = generalParams;*/
+ var fetchJobInfo = {
+ "project": this.projectName,
+ "ajax": "fetchJobInfo",
+ "flowName": this.flowName,
+ "jobName": this.jobName
+ };
+ var mythis = this;
+ var fetchJobSuccessHandler = function(data) {
+ if (data.error) {
+ alert(data.error);
+ return;
+ }
+ document.getElementById('jobName').innerHTML = data.jobName;
+ document.getElementById('jobType').innerHTML = data.jobType;
+ var generalParams = data.generalParams;
+ var overrideParams = data.overrideParams;
+
+ /*for (var key in generalParams) {
+ var row = handleAddRow();
+ var td = $(row).find('span');
+ $(td[1]).text(key);
+ $(td[2]).text(generalParams[key]);
+ }*/
+
+ mythis.overrideParams = overrideParams;
+ mythis.generalParams = generalParams;
+
+ for (var okey in overrideParams) {
+ if (okey != 'type' && okey != 'dependencies') {
+ var row = handleAddRow();
+ var td = $(row).find('span');
+ $(td[0]).text(okey);
+ $(td[1]).text(overrideParams[okey]);
+ }
+ }
+ };
+
+ $.get(projectURL, fetchJobInfo, fetchJobSuccessHandler, "json");
+ },
+
+ handleSet: function(evt) {
+ this.closeEditingTarget(evt);
+ var jobOverride = {};
+ var editRows = $(".editRow");
+ for (var i = 0; i < editRows.length; ++i) {
+ var row = editRows[i];
+ var td = $(row).find('span');
+ var key = $(td[0]).text();
+ var val = $(td[1]).text();
+
+ if (key && key.length > 0) {
+ jobOverride[key] = val;
+ }
+ }
+
+ var overrideParams = this.overrideParams
+ var generalParams = this.generalParams
+
+ jobOverride['type'] = overrideParams['type']
+ if ('dependencies' in overrideParams) {
+ jobOverride['dependencies'] = overrideParams['dependencies']
+ }
+
+ var project = this.projectName
+ var flowName = this.flowName
+ var jobName = this.jobName
+
+ var jobOverrideData = {
+ project: project,
+ flowName: flowName,
+ jobName: jobName,
+ ajax: "setJobOverrideProperty",
+ jobOverride: jobOverride
+ };
+
+ var projectURL = this.projectURL
+ var redirectURL = projectURL+'?project='+project+'&flow='+flowName+'&job='+jobName;
+ var jobOverrideSuccessHandler = function(data) {
+ if (data.error) {
+ alert(data.error);
+ }
+ else {
+ window.location = redirectURL;
+ }
+ };
+
+ $.get(projectURL, jobOverrideData, jobOverrideSuccessHandler, "json");
+ },
+
+ handleAddRow: function(evt) {
+ var tr = document.createElement("tr");
+ var tdName = document.createElement("td");
$(tdName).addClass('property-key');
- var tdValue = document.createElement("td");
+ var tdValue = document.createElement("td");
- var remove = document.createElement("div");
+ var remove = document.createElement("div");
$(remove).addClass("pull-right").addClass('remove-btn');
var removeBtn = document.createElement("button");
$(removeBtn).attr('type', 'button');
@@ -162,80 +162,80 @@ azkaban.JobEditView = Backbone.View.extend({
$(removeBtn).text('Delete');
$(remove).append(removeBtn);
- var nameData = document.createElement("span");
- $(nameData).addClass("spanValue");
- var valueData = document.createElement("span");
- $(valueData).addClass("spanValue");
+ var nameData = document.createElement("span");
+ $(nameData).addClass("spanValue");
+ var valueData = document.createElement("span");
+ $(valueData).addClass("spanValue");
- $(tdName).append(nameData);
- $(tdName).addClass("editable");
- nameData.myparent = tdName;
+ $(tdName).append(nameData);
+ $(tdName).addClass("editable");
+ nameData.myparent = tdName;
- $(tdValue).append(valueData);
+ $(tdValue).append(valueData);
$(tdValue).append(remove);
- $(tdValue).addClass("editable");
- $(tdValue).addClass("value");
- valueData.myparent = tdValue;
-
- $(tr).addClass("editRow");
- $(tr).append(tdName);
- $(tr).append(tdValue);
-
- $(tr).insertBefore("#addRow");
- return tr;
- },
-
- handleEditColumn: function(evt) {
- var curTarget = evt.currentTarget;
- if (this.editingTarget != curTarget) {
- this.closeEditingTarget(evt);
-
- var text = $(curTarget).children(".spanValue").text();
- $(curTarget).empty();
-
- var input = document.createElement("input");
- $(input).attr("type", "text");
+ $(tdValue).addClass("editable");
+ $(tdValue).addClass("value");
+ valueData.myparent = tdValue;
+
+ $(tr).addClass("editRow");
+ $(tr).append(tdName);
+ $(tr).append(tdValue);
+
+ $(tr).insertBefore("#addRow");
+ return tr;
+ },
+
+ handleEditColumn: function(evt) {
+ var curTarget = evt.currentTarget;
+ if (this.editingTarget != curTarget) {
+ this.closeEditingTarget(evt);
+
+ var text = $(curTarget).children(".spanValue").text();
+ $(curTarget).empty();
+
+ var input = document.createElement("input");
+ $(input).attr("type", "text");
$(input).addClass("form-control").addClass("input-sm");
- $(input).val(text);
-
- $(curTarget).addClass("editing");
- $(curTarget).append(input);
- $(input).focus();
- var obj = this;
- $(input).keypress(function(evt) {
- if (evt.which == 13) {
- obj.closeEditingTarget(evt);
- }
- });
- this.editingTarget = curTarget;
- }
-
- evt.preventDefault();
- evt.stopPropagation();
- },
-
- handleRemoveColumn: function(evt) {
- var curTarget = evt.currentTarget;
- // Should be the table
- var row = curTarget.parentElement.parentElement;
- $(row).remove();
- },
-
- closeEditingTarget: function(evt) {
- if (this.editingTarget == null ||
- this.editingTarget == evt.target ||
- this.editingTarget == evt.target.myparent) {
- return;
- }
- var input = $(this.editingTarget).children("input")[0];
- var text = $(input).val();
- $(input).remove();
-
- var valueData = document.createElement("span");
- $(valueData).addClass("spanValue");
- $(valueData).text(text);
-
- if ($(this.editingTarget).hasClass("value")) {
+ $(input).val(text);
+
+ $(curTarget).addClass("editing");
+ $(curTarget).append(input);
+ $(input).focus();
+ var obj = this;
+ $(input).keypress(function(evt) {
+ if (evt.which == 13) {
+ obj.closeEditingTarget(evt);
+ }
+ });
+ this.editingTarget = curTarget;
+ }
+
+ evt.preventDefault();
+ evt.stopPropagation();
+ },
+
+ handleRemoveColumn: function(evt) {
+ var curTarget = evt.currentTarget;
+ // Should be the table
+ var row = curTarget.parentElement.parentElement;
+ $(row).remove();
+ },
+
+ closeEditingTarget: function(evt) {
+ if (this.editingTarget == null ||
+ this.editingTarget == evt.target ||
+ this.editingTarget == evt.target.myparent) {
+ return;
+ }
+ var input = $(this.editingTarget).children("input")[0];
+ var text = $(input).val();
+ $(input).remove();
+
+ var valueData = document.createElement("span");
+ $(valueData).addClass("spanValue");
+ $(valueData).text(text);
+
+ if ($(this.editingTarget).hasClass("value")) {
var remove = document.createElement("div");
$(remove).addClass("pull-right").addClass('remove-btn');
var removeBtn = document.createElement("button");
@@ -243,18 +243,18 @@ azkaban.JobEditView = Backbone.View.extend({
$(removeBtn).addClass('btn').addClass('btn-xs').addClass('btn-danger');
$(removeBtn).text('Delete');
$(remove).append(removeBtn);
- $(this.editingTarget).append(remove);
- }
-
- $(this.editingTarget).removeClass("editing");
- $(this.editingTarget).append(valueData);
- valueData.myparent = this.editingTarget;
- this.editingTarget = null;
- }
+ $(this.editingTarget).append(remove);
+ }
+
+ $(this.editingTarget).removeClass("editing");
+ $(this.editingTarget).append(valueData);
+ valueData.myparent = this.editingTarget;
+ this.editingTarget = null;
+ }
});
$(function() {
- jobEditView = new azkaban.JobEditView({
- el: $('#job-edit-pane')
- });
+ jobEditView = new azkaban.JobEditView({
+ el: $('#job-edit-pane')
+ });
});
src/web/js/azkaban/view/job-history.js 28(+14 -14)
diff --git a/src/web/js/azkaban/view/job-history.js b/src/web/js/azkaban/view/job-history.js
index b07c27e..f2119f5 100644
--- a/src/web/js/azkaban/view/job-history.js
+++ b/src/web/js/azkaban/view/job-history.js
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -23,17 +23,17 @@ var dataModel;
azkaban.DataModel = Backbone.Model.extend({});
$(function() {
- var selected;
- var series = dataSeries;
- dataModel = new azkaban.DataModel();
- dataModel.set({
- "data": series
- });
+ var selected;
+ var series = dataSeries;
+ dataModel = new azkaban.DataModel();
+ dataModel.set({
+ "data": series
+ });
dataModel.trigger('render');
- jobHistoryView = new azkaban.TimeGraphView({
- el: $('#timeGraph'),
- model: dataModel,
+ jobHistoryView = new azkaban.TimeGraphView({
+ el: $('#timeGraph'),
+ model: dataModel,
modelField: "data"
- });
-});
\ No newline at end of file
+ });
+});
src/web/js/azkaban/view/job-list.js 650(+325 -325)
diff --git a/src/web/js/azkaban/view/job-list.js b/src/web/js/azkaban/view/job-list.js
index 3bf66af..b9b7f8b 100644
--- a/src/web/js/azkaban/view/job-list.js
+++ b/src/web/js/azkaban/view/job-list.js
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -19,326 +19,326 @@
*/
azkaban.JobListView = Backbone.View.extend({
- events: {
- "keyup input": "filterJobs",
- "click li.listElement": "handleJobClick",
- "click #resetPanZoomBtn": "handleResetPanZoom",
- "click #autoPanZoomBtn": "handleAutoPanZoom",
- "contextmenu li.listElement": "handleContextMenuClick",
- "click .expandarrow": "handleToggleMenuExpand",
- "click #close-btn" : "handleClose"
- },
-
- initialize: function(settings) {
- this.model.bind('change:selected', this.handleSelectionChange, this);
- this.model.bind('change:disabled', this.handleDisabledChange, this);
- this.model.bind('change:graph', this.render, this);
- this.model.bind('change:update', this.handleStatusUpdate, this);
-
- $("#open-joblist-btn").click(this.handleOpen);
- $("#joblist-panel").hide();
-
- this.filterInput = $(this.el).find("#filter");
- this.list = $(this.el).find("#joblist");
- this.contextMenu = settings.contextMenuCallback;
- this.listNodes = {};
- },
-
- filterJobs: function(self) {
- var filter = this.filterInput.val();
- // Clear all filters first
- if (!filter || filter.trim() == "") {
- this.unfilterAll(self);
- return;
- }
-
- this.hideAll(self);
- var showList = {};
-
- // find the jobs that need to be exposed.
- for (var key in this.listNodes) {
- var li = this.listNodes[key];
- var node = li.node;
- var nodeName = node.id;
- node.listElement = li;
-
- var index = nodeName.indexOf(filter);
- if (index == -1) {
- continue;
- }
-
- var spanlabel = $(li).find("> a > span");
-
- var endIndex = index + filter.length;
- var newHTML = nodeName.substring(0, index) + "<span class=\"filterHighlight\">" +
- nodeName.substring(index, endIndex) + "</span>" +
- nodeName.substring(endIndex, nodeName.length);
- $(spanlabel).html(newHTML);
-
- // Apply classes to all the included embedded flows.
- var pIndex = key.length;
- while ((pIndex = key.lastIndexOf(":", pIndex - 1)) > 0) {
- var parentId = key.substr(0, pIndex);
- var parentLi = this.listNodes[parentId];
- $(parentLi).show();
- $(parentLi).addClass("subFilter");
- }
-
- $(li).show();
- }
- },
-
- hideAll: function(self) {
- for (var key in this.listNodes) {
- var li = this.listNodes[key];
- var label = $(li).find("> a > span");
- $(label).text(li.node.id);
- $(li).removeClass("subFilter");
- $(li).hide();
- }
- },
-
- unfilterAll: function(self) {
- for (var key in this.listNodes) {
- var li = this.listNodes[key];
- var label = $(li).find("> a > span");
- $(label).text(li.node.id);
- $(li).removeClass("subFilter");
- $(li).show();
- }
- },
-
- handleStatusUpdate: function(evt) {
- var data = this.model.get("data");
- this.changeStatuses(data);
- },
-
- changeStatuses: function(data) {
- for (var i = 0; i < data.nodes.length; ++i) {
- var node = data.nodes[i];
-
- // Confused? In updates, a node reference is given to the update node.
- var liElement = node.listElement;
- var child = $(liElement).children("a");
- if (!$(child).hasClass(node.status)) {
- $(child).removeClass(statusList.join(' '));
- $(child).addClass(node.status);
- $(child).attr("title", node.status + " (" + node.type + ")");
- }
- if (node.nodes) {
- this.changeStatuses(node);
- }
- }
- },
-
- render: function(self) {
- var data = this.model.get("data");
- var nodes = data.nodes;
-
- this.renderTree(this.list, data);
-
- //this.assignInitialStatus(self);
- this.handleDisabledChange(self);
- this.changeStatuses(data);
- },
-
- renderTree: function(el, data, prefix) {
- var nodes = data.nodes;
- if (nodes.length == 0) {
- console.log("No results");
- return;
- };
- if (!prefix) {
- prefix = "";
- }
-
- var nodeArray = nodes.slice(0);
- nodeArray.sort(function(a, b) {
- var diff = a.y - b.y;
- if (diff == 0) {
- return a.x - b.x;
- }
- else {
- return diff;
- }
- });
-
- var ul = document.createElement('ul');
- $(ul).addClass("tree-list");
- for (var i = 0; i < nodeArray.length; ++i) {
- var li = document.createElement("li");
- $(li).addClass("listElement");
- $(li).addClass("tree-list-item");
-
- // This is used for the filter step.
- var listNodeName = prefix + nodeArray[i].id;
- this.listNodes[listNodeName]=li;
- li.node = nodeArray[i];
- li.node.listElement = li;
-
- var a = document.createElement("a");
- var iconDiv = document.createElement('div');
- $(iconDiv).addClass('icon');
-
- $(a).append(iconDiv);
-
- var span = document.createElement("span");
- $(span).text(nodeArray[i].id);
- $(span).addClass("jobname");
- $(a).append(span);
- $(li).append(a);
- $(ul).append(li);
-
- if (nodeArray[i].type == "flow") {
- // Add the up down
- var expandDiv = document.createElement("div");
- $(expandDiv).addClass("expandarrow glyphicon glyphicon-chevron-down");
- $(a).append(expandDiv);
-
- // Create subtree
- var subul = this.renderTree(li, nodeArray[i], listNodeName + ":");
- $(subul).hide();
- }
- }
-
- $(el).append(ul);
- return ul;
- },
-
- handleMenuExpand: function(li) {
- var expandArrow = $(li).find("> a > .expandarrow");
- var submenu = $(li).find("> ul");
-
- $(expandArrow).removeClass("glyphicon-chevron-down");
- $(expandArrow).addClass("glyphicon-chevron-up");
- $(submenu).slideDown();
- },
-
- handleMenuCollapse: function(li) {
- var expandArrow = $(li).find("> a > .expandarrow");
- var submenu = $(li).find("> ul");
-
- $(expandArrow).removeClass("glyphicon-chevron-up");
- $(expandArrow).addClass("glyphicon-chevron-down");
- $(submenu).slideUp();
- },
-
- handleToggleMenuExpand: function(evt) {
- var expandarrow = evt.currentTarget;
- var li = $(evt.currentTarget).closest("li.listElement");
- var submenu = $(li).find("> ul");
-
- if ($(submenu).is(":visible")) {
- this.handleMenuCollapse(li);
- }
- else {
- this.handleMenuExpand(li);
- }
-
- evt.stopImmediatePropagation();
- },
-
- handleContextMenuClick: function(evt) {
- if (this.contextMenu) {
- this.contextMenu(evt, this.model, evt.currentTarget.node);
- return false;
- }
- },
-
- handleJobClick: function(evt) {
- console.log("Job clicked");
- var li = $(evt.currentTarget).closest("li.listElement");
- var node = li[0].node;
- if (!node) {
- return;
- }
-
- if (this.model.has("selected")) {
- var selected = this.model.get("selected");
- if (selected == node) {
- this.model.unset("selected");
- }
- else {
- this.model.set({"selected": node});
- }
- }
- else {
- this.model.set({"selected": node});
- }
-
- evt.stopPropagation();
- evt.cancelBubble = true;
- },
-
- handleDisabledChange: function(evt) {
- this.changeDisabled(this.model.get('data'));
- },
-
- changeDisabled: function(data) {
- for (var i =0; i < data.nodes; ++i) {
- var node = data.nodes[i];
- if (node.disabled = true) {
- removeClass(node.listElement, "nodedisabled");
- if (node.type=='flow') {
- this.changeDisabled(node);
- }
- }
- else {
- addClass(node.listElement, "nodedisabled");
- }
- }
- },
-
- handleSelectionChange: function(evt) {
- if (!this.model.hasChanged("selected")) {
- return;
- }
-
- var previous = this.model.previous("selected");
- var current = this.model.get("selected");
-
- if (previous) {
- $(previous.listElement).removeClass("active");
- }
-
- if (current) {
- $(current.listElement).addClass("active");
- this.propagateExpansion(current.listElement);
- }
- },
-
- propagateExpansion: function(li) {
- var li = $(li).parent().closest("li.listElement")[0];
- if (li) {
- this.propagateExpansion(li);
- this.handleMenuExpand(li);
- }
- },
-
- handleResetPanZoom: function(evt) {
- this.model.trigger("resetPanZoom");
- },
-
- handleAutoPanZoom: function(evt) {
- var target = evt.currentTarget;
- if ($(target).hasClass('btn-default')) {
- $(target).removeClass('btn-default');
- $(target).addClass('btn-info');
- }
- else if ($(target).hasClass('btn-info')) {
- $(target).removeClass('btn-info');
- $(target).addClass('btn-default');
- }
-
- // Using $().hasClass('active') does not use here because it appears that
- // this is called before the Bootstrap toggle completes.
- this.model.set({"autoPanZoom": $(target).hasClass('btn-info')});
- },
-
- handleClose: function(evt) {
- $("#joblist-panel").fadeOut();
- },
- handleOpen: function(evt) {
- $("#joblist-panel").fadeIn();
- }
+ events: {
+ "keyup input": "filterJobs",
+ "click li.listElement": "handleJobClick",
+ "click #resetPanZoomBtn": "handleResetPanZoom",
+ "click #autoPanZoomBtn": "handleAutoPanZoom",
+ "contextmenu li.listElement": "handleContextMenuClick",
+ "click .expandarrow": "handleToggleMenuExpand",
+ "click #close-btn" : "handleClose"
+ },
+
+ initialize: function(settings) {
+ this.model.bind('change:selected', this.handleSelectionChange, this);
+ this.model.bind('change:disabled', this.handleDisabledChange, this);
+ this.model.bind('change:graph', this.render, this);
+ this.model.bind('change:update', this.handleStatusUpdate, this);
+
+ $("#open-joblist-btn").click(this.handleOpen);
+ $("#joblist-panel").hide();
+
+ this.filterInput = $(this.el).find("#filter");
+ this.list = $(this.el).find("#joblist");
+ this.contextMenu = settings.contextMenuCallback;
+ this.listNodes = {};
+ },
+
+ filterJobs: function(self) {
+ var filter = this.filterInput.val();
+ // Clear all filters first
+ if (!filter || filter.trim() == "") {
+ this.unfilterAll(self);
+ return;
+ }
+
+ this.hideAll(self);
+ var showList = {};
+
+ // find the jobs that need to be exposed.
+ for (var key in this.listNodes) {
+ var li = this.listNodes[key];
+ var node = li.node;
+ var nodeName = node.id;
+ node.listElement = li;
+
+ var index = nodeName.indexOf(filter);
+ if (index == -1) {
+ continue;
+ }
+
+ var spanlabel = $(li).find("> a > span");
+
+ var endIndex = index + filter.length;
+ var newHTML = nodeName.substring(0, index) + "<span class=\"filterHighlight\">" +
+ nodeName.substring(index, endIndex) + "</span>" +
+ nodeName.substring(endIndex, nodeName.length);
+ $(spanlabel).html(newHTML);
+
+ // Apply classes to all the included embedded flows.
+ var pIndex = key.length;
+ while ((pIndex = key.lastIndexOf(":", pIndex - 1)) > 0) {
+ var parentId = key.substr(0, pIndex);
+ var parentLi = this.listNodes[parentId];
+ $(parentLi).show();
+ $(parentLi).addClass("subFilter");
+ }
+
+ $(li).show();
+ }
+ },
+
+ hideAll: function(self) {
+ for (var key in this.listNodes) {
+ var li = this.listNodes[key];
+ var label = $(li).find("> a > span");
+ $(label).text(li.node.id);
+ $(li).removeClass("subFilter");
+ $(li).hide();
+ }
+ },
+
+ unfilterAll: function(self) {
+ for (var key in this.listNodes) {
+ var li = this.listNodes[key];
+ var label = $(li).find("> a > span");
+ $(label).text(li.node.id);
+ $(li).removeClass("subFilter");
+ $(li).show();
+ }
+ },
+
+ handleStatusUpdate: function(evt) {
+ var data = this.model.get("data");
+ this.changeStatuses(data);
+ },
+
+ changeStatuses: function(data) {
+ for (var i = 0; i < data.nodes.length; ++i) {
+ var node = data.nodes[i];
+
+ // Confused? In updates, a node reference is given to the update node.
+ var liElement = node.listElement;
+ var child = $(liElement).children("a");
+ if (!$(child).hasClass(node.status)) {
+ $(child).removeClass(statusList.join(' '));
+ $(child).addClass(node.status);
+ $(child).attr("title", node.status + " (" + node.type + ")");
+ }
+ if (node.nodes) {
+ this.changeStatuses(node);
+ }
+ }
+ },
+
+ render: function(self) {
+ var data = this.model.get("data");
+ var nodes = data.nodes;
+
+ this.renderTree(this.list, data);
+
+ //this.assignInitialStatus(self);
+ this.handleDisabledChange(self);
+ this.changeStatuses(data);
+ },
+
+ renderTree: function(el, data, prefix) {
+ var nodes = data.nodes;
+ if (nodes.length == 0) {
+ console.log("No results");
+ return;
+ };
+ if (!prefix) {
+ prefix = "";
+ }
+
+ var nodeArray = nodes.slice(0);
+ nodeArray.sort(function(a, b) {
+ var diff = a.y - b.y;
+ if (diff == 0) {
+ return a.x - b.x;
+ }
+ else {
+ return diff;
+ }
+ });
+
+ var ul = document.createElement('ul');
+ $(ul).addClass("tree-list");
+ for (var i = 0; i < nodeArray.length; ++i) {
+ var li = document.createElement("li");
+ $(li).addClass("listElement");
+ $(li).addClass("tree-list-item");
+
+ // This is used for the filter step.
+ var listNodeName = prefix + nodeArray[i].id;
+ this.listNodes[listNodeName]=li;
+ li.node = nodeArray[i];
+ li.node.listElement = li;
+
+ var a = document.createElement("a");
+ var iconDiv = document.createElement('div');
+ $(iconDiv).addClass('icon');
+
+ $(a).append(iconDiv);
+
+ var span = document.createElement("span");
+ $(span).text(nodeArray[i].id);
+ $(span).addClass("jobname");
+ $(a).append(span);
+ $(li).append(a);
+ $(ul).append(li);
+
+ if (nodeArray[i].type == "flow") {
+ // Add the up down
+ var expandDiv = document.createElement("div");
+ $(expandDiv).addClass("expandarrow glyphicon glyphicon-chevron-down");
+ $(a).append(expandDiv);
+
+ // Create subtree
+ var subul = this.renderTree(li, nodeArray[i], listNodeName + ":");
+ $(subul).hide();
+ }
+ }
+
+ $(el).append(ul);
+ return ul;
+ },
+
+ handleMenuExpand: function(li) {
+ var expandArrow = $(li).find("> a > .expandarrow");
+ var submenu = $(li).find("> ul");
+
+ $(expandArrow).removeClass("glyphicon-chevron-down");
+ $(expandArrow).addClass("glyphicon-chevron-up");
+ $(submenu).slideDown();
+ },
+
+ handleMenuCollapse: function(li) {
+ var expandArrow = $(li).find("> a > .expandarrow");
+ var submenu = $(li).find("> ul");
+
+ $(expandArrow).removeClass("glyphicon-chevron-up");
+ $(expandArrow).addClass("glyphicon-chevron-down");
+ $(submenu).slideUp();
+ },
+
+ handleToggleMenuExpand: function(evt) {
+ var expandarrow = evt.currentTarget;
+ var li = $(evt.currentTarget).closest("li.listElement");
+ var submenu = $(li).find("> ul");
+
+ if ($(submenu).is(":visible")) {
+ this.handleMenuCollapse(li);
+ }
+ else {
+ this.handleMenuExpand(li);
+ }
+
+ evt.stopImmediatePropagation();
+ },
+
+ handleContextMenuClick: function(evt) {
+ if (this.contextMenu) {
+ this.contextMenu(evt, this.model, evt.currentTarget.node);
+ return false;
+ }
+ },
+
+ handleJobClick: function(evt) {
+ console.log("Job clicked");
+ var li = $(evt.currentTarget).closest("li.listElement");
+ var node = li[0].node;
+ if (!node) {
+ return;
+ }
+
+ if (this.model.has("selected")) {
+ var selected = this.model.get("selected");
+ if (selected == node) {
+ this.model.unset("selected");
+ }
+ else {
+ this.model.set({"selected": node});
+ }
+ }
+ else {
+ this.model.set({"selected": node});
+ }
+
+ evt.stopPropagation();
+ evt.cancelBubble = true;
+ },
+
+ handleDisabledChange: function(evt) {
+ this.changeDisabled(this.model.get('data'));
+ },
+
+ changeDisabled: function(data) {
+ for (var i =0; i < data.nodes; ++i) {
+ var node = data.nodes[i];
+ if (node.disabled = true) {
+ removeClass(node.listElement, "nodedisabled");
+ if (node.type=='flow') {
+ this.changeDisabled(node);
+ }
+ }
+ else {
+ addClass(node.listElement, "nodedisabled");
+ }
+ }
+ },
+
+ handleSelectionChange: function(evt) {
+ if (!this.model.hasChanged("selected")) {
+ return;
+ }
+
+ var previous = this.model.previous("selected");
+ var current = this.model.get("selected");
+
+ if (previous) {
+ $(previous.listElement).removeClass("active");
+ }
+
+ if (current) {
+ $(current.listElement).addClass("active");
+ this.propagateExpansion(current.listElement);
+ }
+ },
+
+ propagateExpansion: function(li) {
+ var li = $(li).parent().closest("li.listElement")[0];
+ if (li) {
+ this.propagateExpansion(li);
+ this.handleMenuExpand(li);
+ }
+ },
+
+ handleResetPanZoom: function(evt) {
+ this.model.trigger("resetPanZoom");
+ },
+
+ handleAutoPanZoom: function(evt) {
+ var target = evt.currentTarget;
+ if ($(target).hasClass('btn-default')) {
+ $(target).removeClass('btn-default');
+ $(target).addClass('btn-info');
+ }
+ else if ($(target).hasClass('btn-info')) {
+ $(target).removeClass('btn-info');
+ $(target).addClass('btn-default');
+ }
+
+ // Using $().hasClass('active') does not use here because it appears that
+ // this is called before the Bootstrap toggle completes.
+ this.model.set({"autoPanZoom": $(target).hasClass('btn-info')});
+ },
+
+ handleClose: function(evt) {
+ $("#joblist-panel").fadeOut();
+ },
+ handleOpen: function(evt) {
+ $("#joblist-panel").fadeIn();
+ }
});
src/web/js/azkaban/view/login.js 94(+47 -47)
diff --git a/src/web/js/azkaban/view/login.js b/src/web/js/azkaban/view/login.js
index 1ac077f..685153a 100644
--- a/src/web/js/azkaban/view/login.js
+++ b/src/web/js/azkaban/view/login.js
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -18,52 +18,52 @@ $.namespace('azkaban');
var loginView;
azkaban.LoginView = Backbone.View.extend({
- events: {
- "click #login-submit": "handleLogin",
- 'keypress input': 'handleKeyPress'
- },
+ events: {
+ "click #login-submit": "handleLogin",
+ 'keypress input': 'handleKeyPress'
+ },
- initialize: function(settings) {
- $('#error-msg').hide();
- },
-
- handleLogin: function(evt) {
- console.log("Logging in.");
- var username = $("#username").val();
- var password = $("#password").val();
-
- $.ajax({
- async: "false",
- url: contextURL,
- dataType: "json",
- type: "POST",
- data: {
- action: "login",
- username: username,
- password: password
- },
- success: function(data) {
- if (data.error) {
- $('#error-msg').text(data.error);
- $('#error-msg').slideDown('fast');
- }
- else {
- document.location.reload();
- }
- }
- });
- },
-
- handleKeyPress: function(evt) {
- if (evt.charCode == 13 || evt.keyCode == 13) {
- this.handleLogin();
- }
- },
-
- render: function() {
- }
+ initialize: function(settings) {
+ $('#error-msg').hide();
+ },
+
+ handleLogin: function(evt) {
+ console.log("Logging in.");
+ var username = $("#username").val();
+ var password = $("#password").val();
+
+ $.ajax({
+ async: "false",
+ url: contextURL,
+ dataType: "json",
+ type: "POST",
+ data: {
+ action: "login",
+ username: username,
+ password: password
+ },
+ success: function(data) {
+ if (data.error) {
+ $('#error-msg').text(data.error);
+ $('#error-msg').slideDown('fast');
+ }
+ else {
+ document.location.reload();
+ }
+ }
+ });
+ },
+
+ handleKeyPress: function(evt) {
+ if (evt.charCode == 13 || evt.keyCode == 13) {
+ this.handleLogin();
+ }
+ },
+
+ render: function() {
+ }
});
$(function() {
- loginView = new azkaban.LoginView({el: $('#login-form')});
+ loginView = new azkaban.LoginView({el: $('#login-form')});
});
src/web/js/azkaban/view/main.js 356(+178 -178)
diff --git a/src/web/js/azkaban/view/main.js b/src/web/js/azkaban/view/main.js
index 2ec3c5b..8ba3851 100644
--- a/src/web/js/azkaban/view/main.js
+++ b/src/web/js/azkaban/view/main.js
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -18,192 +18,192 @@ $.namespace('azkaban');
var projectTableView;
azkaban.ProjectTableView = Backbone.View.extend({
- events: {
- "click .project-expander": "expandProject"
- },
-
- initialize: function(settings) {
- },
-
- expandProject: function(evt) {
- if (evt.target.tagName == "A") {
- return;
- }
-
- var target = evt.currentTarget;
- var targetId = target.id;
- var requestURL = contextURL + "/manager";
-
- var targetExpanded = $('#' + targetId + '-child');
- var targetTBody = $('#' + targetId + '-tbody');
- var createFlowListFunction = this.createFlowListTable;
-
- if (target.loading) {
- console.log("Still loading.");
- }
- else if (target.loaded) {
- if ($(targetExpanded).is(':visible')) {
- $(target).addClass('expanded').removeClass('collapsed');
- var expander = $(target).children('.project-expander-icon')[0];
- $(expander).removeClass('glyphicon-chevron-up');
- $(expander).addClass('glyphicon-chevron-down');
- $(targetExpanded).slideUp(300);
- }
- else {
- $(target).addClass('collapsed').removeClass('expanded');
- var expander = $(target).children('.project-expander-icon')[0];
- $(expander).removeClass('glyphicon-chevron-down');
- $(expander).addClass('glyphicon-chevron-up');
- $(targetExpanded).slideDown(300);
- }
- }
- else {
- // projectId is available
- $(target).addClass('wait').removeClass('collapsed').removeClass('expanded');
- target.loading = true;
-
- var request = {
- "project": targetId,
- "ajax": "fetchprojectflows"
- };
-
- var successHandler = function(data) {
- console.log("Success");
- target.loaded = true;
- target.loading = false;
-
- createFlowListFunction(data, targetTBody);
-
- $(target).addClass('collapsed').removeClass('wait');
- var expander = $(target).children('.project-expander-icon')[0];
- $(expander).removeClass('glyphicon-chevron-down');
- $(expander).addClass('glyphicon-chevron-up');
- $(targetExpanded).slideDown(300);
- };
-
- $.get(requestURL, request, successHandler, "json");
- }
- },
-
- render: function() {
- },
-
- createFlowListTable: function(data, innerTable) {
- var flows = data.flows;
- flows.sort(function(a,b) {
- return a.flowId.localeCompare(b.flowId);
- });
- var requestURL = contextURL + "/manager?project=" + data.project + "&flow=";
- for (var i = 0; i < flows.length; ++i) {
- var id = flows[i].flowId;
- var ida = document.createElement("a");
- ida.project = data.project;
- $(ida).text(id);
- $(ida).attr("href", requestURL + id);
+ events: {
+ "click .project-expander": "expandProject"
+ },
+
+ initialize: function(settings) {
+ },
+
+ expandProject: function(evt) {
+ if (evt.target.tagName == "A") {
+ return;
+ }
+
+ var target = evt.currentTarget;
+ var targetId = target.id;
+ var requestURL = contextURL + "/manager";
+
+ var targetExpanded = $('#' + targetId + '-child');
+ var targetTBody = $('#' + targetId + '-tbody');
+ var createFlowListFunction = this.createFlowListTable;
+
+ if (target.loading) {
+ console.log("Still loading.");
+ }
+ else if (target.loaded) {
+ if ($(targetExpanded).is(':visible')) {
+ $(target).addClass('expanded').removeClass('collapsed');
+ var expander = $(target).children('.project-expander-icon')[0];
+ $(expander).removeClass('glyphicon-chevron-up');
+ $(expander).addClass('glyphicon-chevron-down');
+ $(targetExpanded).slideUp(300);
+ }
+ else {
+ $(target).addClass('collapsed').removeClass('expanded');
+ var expander = $(target).children('.project-expander-icon')[0];
+ $(expander).removeClass('glyphicon-chevron-down');
+ $(expander).addClass('glyphicon-chevron-up');
+ $(targetExpanded).slideDown(300);
+ }
+ }
+ else {
+ // projectId is available
+ $(target).addClass('wait').removeClass('collapsed').removeClass('expanded');
+ target.loading = true;
+
+ var request = {
+ "project": targetId,
+ "ajax": "fetchprojectflows"
+ };
+
+ var successHandler = function(data) {
+ console.log("Success");
+ target.loaded = true;
+ target.loading = false;
+
+ createFlowListFunction(data, targetTBody);
+
+ $(target).addClass('collapsed').removeClass('wait');
+ var expander = $(target).children('.project-expander-icon')[0];
+ $(expander).removeClass('glyphicon-chevron-down');
+ $(expander).addClass('glyphicon-chevron-up');
+ $(targetExpanded).slideDown(300);
+ };
+
+ $.get(requestURL, request, successHandler, "json");
+ }
+ },
+
+ render: function() {
+ },
+
+ createFlowListTable: function(data, innerTable) {
+ var flows = data.flows;
+ flows.sort(function(a,b) {
+ return a.flowId.localeCompare(b.flowId);
+ });
+ var requestURL = contextURL + "/manager?project=" + data.project + "&flow=";
+ for (var i = 0; i < flows.length; ++i) {
+ var id = flows[i].flowId;
+ var ida = document.createElement("a");
+ ida.project = data.project;
+ $(ida).text(id);
+ $(ida).attr("href", requestURL + id);
$(ida).addClass('list-group-item');
- $(innerTable).append(ida);
- }
- }
+ $(innerTable).append(ida);
+ }
+ }
});
var projectHeaderView;
azkaban.ProjectHeaderView = Backbone.View.extend({
- events: {
- "click #create-project-btn": "handleCreateProjectJob"
- },
-
- initialize: function(settings) {
- console.log("project header view initialize.");
- if (settings.errorMsg && settings.errorMsg != "null") {
- $('#messaging').addClass("alert-danger");
- $('#messaging').removeClass("alert-success");
- $('#messaging-message').html(settings.errorMsg);
- }
- else if (settings.successMsg && settings.successMsg != "null") {
- $('#messaging').addClass("alert-success");
- $('#messaging').removeClass("alert-danger");
- $('#messaging-message').html(settings.successMsg);
- }
- else {
- $('#messaging').removeClass("alert-success");
- $('#messaging').removeClass("alert-danger");
- }
- },
-
- handleCreateProjectJob: function(evt) {
- $('#create-project-modal').modal();
- },
-
- render: function() {
- }
+ events: {
+ "click #create-project-btn": "handleCreateProjectJob"
+ },
+
+ initialize: function(settings) {
+ console.log("project header view initialize.");
+ if (settings.errorMsg && settings.errorMsg != "null") {
+ $('#messaging').addClass("alert-danger");
+ $('#messaging').removeClass("alert-success");
+ $('#messaging-message').html(settings.errorMsg);
+ }
+ else if (settings.successMsg && settings.successMsg != "null") {
+ $('#messaging').addClass("alert-success");
+ $('#messaging').removeClass("alert-danger");
+ $('#messaging-message').html(settings.successMsg);
+ }
+ else {
+ $('#messaging').removeClass("alert-success");
+ $('#messaging').removeClass("alert-danger");
+ }
+ },
+
+ handleCreateProjectJob: function(evt) {
+ $('#create-project-modal').modal();
+ },
+
+ render: function() {
+ }
});
var createProjectView;
azkaban.CreateProjectView = Backbone.View.extend({
- events: {
- "click #create-btn": "handleCreateProject"
- },
-
- initialize: function(settings) {
- $("#modal-error-msg").hide();
- },
-
- handleCreateProject: function(evt) {
- // First make sure we can upload
- var projectName = $('#path').val();
- var description = $('#description').val();
- console.log("Creating");
- $.ajax({
- async: "false",
- url: "manager",
- dataType: "json",
- type: "POST",
- data: {
- action: "create",
- name: projectName,
- description: description
- },
- success: function(data) {
- if (data.status == "success") {
- if (data.action == "redirect") {
- window.location = data.path;
- }
- }
- else {
- if (data.action == "login") {
- window.location = "";
- }
- else {
- $("#modal-error-msg").text("ERROR: " + data.message);
- $("#modal-error-msg").slideDown("fast");
- }
- }
- }
- });
- },
-
- render: function() {
- }
+ events: {
+ "click #create-btn": "handleCreateProject"
+ },
+
+ initialize: function(settings) {
+ $("#modal-error-msg").hide();
+ },
+
+ handleCreateProject: function(evt) {
+ // First make sure we can upload
+ var projectName = $('#path').val();
+ var description = $('#description').val();
+ console.log("Creating");
+ $.ajax({
+ async: "false",
+ url: "manager",
+ dataType: "json",
+ type: "POST",
+ data: {
+ action: "create",
+ name: projectName,
+ description: description
+ },
+ success: function(data) {
+ if (data.status == "success") {
+ if (data.action == "redirect") {
+ window.location = data.path;
+ }
+ }
+ else {
+ if (data.action == "login") {
+ window.location = "";
+ }
+ else {
+ $("#modal-error-msg").text("ERROR: " + data.message);
+ $("#modal-error-msg").slideDown("fast");
+ }
+ }
+ }
+ });
+ },
+
+ render: function() {
+ }
});
var tableSorterView;
$(function() {
- projectHeaderView = new azkaban.ProjectHeaderView({
- el: $('#create-project'),
- successMsg: successMessage,
- errorMsg: errorMessage
- });
-
- projectTableView = new azkaban.ProjectTableView({
- el: $('#project-list')
- });
-
- /*tableSorterView = new azkaban.TableSorter({
- el: $('#all-jobs'),
- initialSort: $('.tb-name')
- });*/
-
- uploadView = new azkaban.CreateProjectView({
- el: $('#create-project-modal')
- });
+ projectHeaderView = new azkaban.ProjectHeaderView({
+ el: $('#create-project'),
+ successMsg: successMessage,
+ errorMsg: errorMessage
+ });
+
+ projectTableView = new azkaban.ProjectTableView({
+ el: $('#project-list')
+ });
+
+ /*tableSorterView = new azkaban.TableSorter({
+ el: $('#all-jobs'),
+ initialSort: $('.tb-name')
+ });*/
+
+ uploadView = new azkaban.CreateProjectView({
+ el: $('#create-project-modal')
+ });
});
src/web/js/azkaban/view/message-dialog.js 44(+22 -22)
diff --git a/src/web/js/azkaban/view/message-dialog.js b/src/web/js/azkaban/view/message-dialog.js
index cea0852..d13671f 100644
--- a/src/web/js/azkaban/view/message-dialog.js
+++ b/src/web/js/azkaban/view/message-dialog.js
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -18,27 +18,27 @@ $.namespace('azkaban');
var messageDialogView;
azkaban.MessageDialogView = Backbone.View.extend({
- events: {
- },
+ events: {
+ },
- initialize: function(settings) {
- },
-
- show: function(title, message, callback) {
- $("#azkaban-message-dialog-title").text(title);
- $("#azkaban-message-dialog-text").text(message);
- this.callback = callback;
- $(this.el).on('hidden.bs.modal', function() {
- if (callback) {
- callback.call();
- }
- });
- $(this.el).modal();
- }
+ initialize: function(settings) {
+ },
+
+ show: function(title, message, callback) {
+ $("#azkaban-message-dialog-title").text(title);
+ $("#azkaban-message-dialog-text").text(message);
+ this.callback = callback;
+ $(this.el).on('hidden.bs.modal', function() {
+ if (callback) {
+ callback.call();
+ }
+ });
+ $(this.el).modal();
+ }
});
$(function() {
- messageDialogView = new azkaban.MessageDialogView({
- el: $('#azkaban-message-dialog')
- });
+ messageDialogView = new azkaban.MessageDialogView({
+ el: $('#azkaban-message-dialog')
+ });
});
src/web/js/azkaban/view/project.js 446(+223 -223)
diff --git a/src/web/js/azkaban/view/project.js b/src/web/js/azkaban/view/project.js
index a63830a..d076be9 100644
--- a/src/web/js/azkaban/view/project.js
+++ b/src/web/js/azkaban/view/project.js
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -18,227 +18,227 @@ $.namespace('azkaban');
var flowTableView;
azkaban.FlowTableView = Backbone.View.extend({
- events : {
- "click .flow-expander": "expandFlowProject",
- "mouseover .expanded-flow-job-list li": "highlight",
- "mouseout .expanded-flow-job-list li": "unhighlight",
- "click .runJob": "runJob",
- "click .runWithDep": "runWithDep",
- "click .execute-flow": "executeFlow",
- "click .viewFlow": "viewFlow",
- "click .viewJob": "viewJob"
- },
-
- initialize: function(settings) {
- },
-
- expandFlowProject: function(evt) {
- if (evt.target.tagName == "A" || evt.target.tagName == "BUTTON") {
- return;
- }
-
- var target = evt.currentTarget;
- var targetId = target.id;
- var requestURL = contextURL + "/manager";
-
- var targetExpanded = $('#' + targetId + '-child');
- var targetTBody = $('#' + targetId + '-tbody');
-
- var createJobListFunction = this.createJobListTable;
- if (target.loading) {
- console.log("Still loading.");
- }
- else if (target.loaded) {
- $(targetExpanded).collapse('toggle');
- var expander = $(target).children('.flow-expander-icon')[0];
- if ($(expander).hasClass('glyphicon-chevron-down')) {
- $(expander).removeClass('glyphicon-chevron-down');
- $(expander).addClass('glyphicon-chevron-up');
- }
- else {
- $(expander).removeClass('glyphicon-chevron-up');
- $(expander).addClass('glyphicon-chevron-down');
- }
- }
- else {
- // projectName is available
- target.loading = true;
- var requestData = {
- "project": projectName,
- "ajax": "fetchflowjobs",
- "flow": targetId
- };
- var successHandler = function(data) {
- console.log("Success");
- target.loaded = true;
- target.loading = false;
- createJobListFunction(data, targetTBody);
- $(targetExpanded).collapse('show');
- var expander = $(target).children('.flow-expander-icon')[0];
- $(expander).removeClass('glyphicon-chevron-down');
- $(expander).addClass('glyphicon-chevron-up');
- };
- $.get(requestURL, requestData, successHandler, "json");
- }
- },
-
- createJobListTable: function(data, innerTable) {
- var nodes = data.nodes;
- var flowId = data.flowId;
- var project = data.project;
- var requestURL = contextURL + "/manager?project=" + project + "&flow=" + flowId + "&job=";
- for (var i = 0; i < nodes.length; i++) {
- var job = nodes[i];
- var name = job.id;
- var level = job.level;
- var nodeId = flowId + "-" + name;
-
- var li = document.createElement('li');
- $(li).addClass("list-group-item");
- $(li).attr("id", nodeId);
- li.flowId = flowId;
- li.dependents = job.dependents;
- li.dependencies = job.dependencies;
- li.projectName = project;
- li.jobName = name;
-
- if (execAccess) {
- var hoverMenuDiv = document.createElement('div');
- $(hoverMenuDiv).addClass('pull-right');
- $(hoverMenuDiv).addClass('job-buttons');
-
- var divRunJob = document.createElement('button');
- $(divRunJob).attr('type', 'button');
- $(divRunJob).addClass("btn");
- $(divRunJob).addClass("btn-success");
- $(divRunJob).addClass("btn-xs");
- $(divRunJob).addClass("runJob");
- $(divRunJob).text("Run Job");
- divRunJob.jobName = name;
- divRunJob.flowId = flowId;
- $(hoverMenuDiv).append(divRunJob);
-
- var divRunWithDep = document.createElement("button");
- $(divRunWithDep).attr('type', 'button');
- $(divRunWithDep).addClass("btn");
- $(divRunWithDep).addClass("btn-success");
- $(divRunWithDep).addClass("btn-xs");
- $(divRunWithDep).addClass("runWithDep");
- $(divRunWithDep).text("Run With Dependencies");
- divRunWithDep.jobName = name;
- divRunWithDep.flowId = flowId;
- $(hoverMenuDiv).append(divRunWithDep);
-
- $(li).append(hoverMenuDiv);
- }
-
- var ida = document.createElement("a");
- $(ida).css("margin-left", level * 20);
- $(ida).attr("href", requestURL + name);
- $(ida).text(name);
-
- $(li).append(ida);
- $(innerTable).append(li);
- }
- },
-
- unhighlight: function(evt) {
- var currentTarget = evt.currentTarget;
- $(".dependent").removeClass("dependent");
- $(".dependency").removeClass("dependency");
- },
-
- highlight: function(evt) {
- var currentTarget = evt.currentTarget;
- $(".dependent").removeClass("dependent");
- $(".dependency").removeClass("dependency");
- this.highlightJob(currentTarget);
- },
-
- highlightJob: function(currentTarget) {
- var dependents = currentTarget.dependents;
- var dependencies = currentTarget.dependencies;
- var flowid = currentTarget.flowId;
-
- if (dependents) {
- for (var i = 0; i < dependents.length; ++i) {
- var depId = flowid + "-" + dependents[i];
- $("#"+depId).toggleClass("dependent");
- }
- }
-
- if (dependencies) {
- for (var i = 0; i < dependencies.length; ++i) {
- var depId = flowid + "-" + dependencies[i];
- $("#"+depId).toggleClass("dependency");
- }
- }
- },
-
- viewFlow: function(evt) {
- console.log("View Flow");
- var flowId = evt.currentTarget.flowId;
- location.href = contextURL + "/manager?project=" + projectName + "&flow=" + flowId;
- },
-
- viewJob: function(evt) {
- console.log("View Job");
- var flowId = evt.currentTarget.flowId;
- var jobId = evt.currentTarget.jobId;
- location.href = contextURL + "/manager?project=" + projectName + "&flow=" + flowId + "&job=" + jobId;
- },
-
- runJob: function(evt) {
- console.log("Run Job");
- var jobId = evt.currentTarget.jobName;
- var flowId = evt.currentTarget.flowId;
-
- var executingData = {
- project: projectName,
- ajax: "executeFlow",
- flow: flowId,
- job: jobId
- };
-
- this.executeFlowDialog(executingData);
- },
-
- runWithDep: function(evt) {
- var jobId = evt.currentTarget.jobName;
- var flowId = evt.currentTarget.flowId;
- console.log("Run With Dep");
-
- var executingData = {
- project: projectName,
- ajax: "executeFlow",
- flow: flowId,
- job: jobId,
- withDep: true
- };
- this.executeFlowDialog(executingData);
- },
-
- executeFlow: function(evt) {
- console.log("Execute Flow");
- var flowId = $(evt.currentTarget).attr('flowid');
-
- var executingData = {
- project: projectName,
- ajax: "executeFlow",
- flow: flowId
- };
-
- this.executeFlowDialog(executingData);
- },
-
- executeFlowDialog: function(executingData) {
- flowExecuteDialogView.show(executingData);
- },
-
- render: function() {
- }
+ events : {
+ "click .flow-expander": "expandFlowProject",
+ "mouseover .expanded-flow-job-list li": "highlight",
+ "mouseout .expanded-flow-job-list li": "unhighlight",
+ "click .runJob": "runJob",
+ "click .runWithDep": "runWithDep",
+ "click .execute-flow": "executeFlow",
+ "click .viewFlow": "viewFlow",
+ "click .viewJob": "viewJob"
+ },
+
+ initialize: function(settings) {
+ },
+
+ expandFlowProject: function(evt) {
+ if (evt.target.tagName == "A" || evt.target.tagName == "BUTTON") {
+ return;
+ }
+
+ var target = evt.currentTarget;
+ var targetId = target.id;
+ var requestURL = contextURL + "/manager";
+
+ var targetExpanded = $('#' + targetId + '-child');
+ var targetTBody = $('#' + targetId + '-tbody');
+
+ var createJobListFunction = this.createJobListTable;
+ if (target.loading) {
+ console.log("Still loading.");
+ }
+ else if (target.loaded) {
+ $(targetExpanded).collapse('toggle');
+ var expander = $(target).children('.flow-expander-icon')[0];
+ if ($(expander).hasClass('glyphicon-chevron-down')) {
+ $(expander).removeClass('glyphicon-chevron-down');
+ $(expander).addClass('glyphicon-chevron-up');
+ }
+ else {
+ $(expander).removeClass('glyphicon-chevron-up');
+ $(expander).addClass('glyphicon-chevron-down');
+ }
+ }
+ else {
+ // projectName is available
+ target.loading = true;
+ var requestData = {
+ "project": projectName,
+ "ajax": "fetchflowjobs",
+ "flow": targetId
+ };
+ var successHandler = function(data) {
+ console.log("Success");
+ target.loaded = true;
+ target.loading = false;
+ createJobListFunction(data, targetTBody);
+ $(targetExpanded).collapse('show');
+ var expander = $(target).children('.flow-expander-icon')[0];
+ $(expander).removeClass('glyphicon-chevron-down');
+ $(expander).addClass('glyphicon-chevron-up');
+ };
+ $.get(requestURL, requestData, successHandler, "json");
+ }
+ },
+
+ createJobListTable: function(data, innerTable) {
+ var nodes = data.nodes;
+ var flowId = data.flowId;
+ var project = data.project;
+ var requestURL = contextURL + "/manager?project=" + project + "&flow=" + flowId + "&job=";
+ for (var i = 0; i < nodes.length; i++) {
+ var job = nodes[i];
+ var name = job.id;
+ var level = job.level;
+ var nodeId = flowId + "-" + name;
+
+ var li = document.createElement('li');
+ $(li).addClass("list-group-item");
+ $(li).attr("id", nodeId);
+ li.flowId = flowId;
+ li.dependents = job.dependents;
+ li.dependencies = job.dependencies;
+ li.projectName = project;
+ li.jobName = name;
+
+ if (execAccess) {
+ var hoverMenuDiv = document.createElement('div');
+ $(hoverMenuDiv).addClass('pull-right');
+ $(hoverMenuDiv).addClass('job-buttons');
+
+ var divRunJob = document.createElement('button');
+ $(divRunJob).attr('type', 'button');
+ $(divRunJob).addClass("btn");
+ $(divRunJob).addClass("btn-success");
+ $(divRunJob).addClass("btn-xs");
+ $(divRunJob).addClass("runJob");
+ $(divRunJob).text("Run Job");
+ divRunJob.jobName = name;
+ divRunJob.flowId = flowId;
+ $(hoverMenuDiv).append(divRunJob);
+
+ var divRunWithDep = document.createElement("button");
+ $(divRunWithDep).attr('type', 'button');
+ $(divRunWithDep).addClass("btn");
+ $(divRunWithDep).addClass("btn-success");
+ $(divRunWithDep).addClass("btn-xs");
+ $(divRunWithDep).addClass("runWithDep");
+ $(divRunWithDep).text("Run With Dependencies");
+ divRunWithDep.jobName = name;
+ divRunWithDep.flowId = flowId;
+ $(hoverMenuDiv).append(divRunWithDep);
+
+ $(li).append(hoverMenuDiv);
+ }
+
+ var ida = document.createElement("a");
+ $(ida).css("margin-left", level * 20);
+ $(ida).attr("href", requestURL + name);
+ $(ida).text(name);
+
+ $(li).append(ida);
+ $(innerTable).append(li);
+ }
+ },
+
+ unhighlight: function(evt) {
+ var currentTarget = evt.currentTarget;
+ $(".dependent").removeClass("dependent");
+ $(".dependency").removeClass("dependency");
+ },
+
+ highlight: function(evt) {
+ var currentTarget = evt.currentTarget;
+ $(".dependent").removeClass("dependent");
+ $(".dependency").removeClass("dependency");
+ this.highlightJob(currentTarget);
+ },
+
+ highlightJob: function(currentTarget) {
+ var dependents = currentTarget.dependents;
+ var dependencies = currentTarget.dependencies;
+ var flowid = currentTarget.flowId;
+
+ if (dependents) {
+ for (var i = 0; i < dependents.length; ++i) {
+ var depId = flowid + "-" + dependents[i];
+ $("#"+depId).toggleClass("dependent");
+ }
+ }
+
+ if (dependencies) {
+ for (var i = 0; i < dependencies.length; ++i) {
+ var depId = flowid + "-" + dependencies[i];
+ $("#"+depId).toggleClass("dependency");
+ }
+ }
+ },
+
+ viewFlow: function(evt) {
+ console.log("View Flow");
+ var flowId = evt.currentTarget.flowId;
+ location.href = contextURL + "/manager?project=" + projectName + "&flow=" + flowId;
+ },
+
+ viewJob: function(evt) {
+ console.log("View Job");
+ var flowId = evt.currentTarget.flowId;
+ var jobId = evt.currentTarget.jobId;
+ location.href = contextURL + "/manager?project=" + projectName + "&flow=" + flowId + "&job=" + jobId;
+ },
+
+ runJob: function(evt) {
+ console.log("Run Job");
+ var jobId = evt.currentTarget.jobName;
+ var flowId = evt.currentTarget.flowId;
+
+ var executingData = {
+ project: projectName,
+ ajax: "executeFlow",
+ flow: flowId,
+ job: jobId
+ };
+
+ this.executeFlowDialog(executingData);
+ },
+
+ runWithDep: function(evt) {
+ var jobId = evt.currentTarget.jobName;
+ var flowId = evt.currentTarget.flowId;
+ console.log("Run With Dep");
+
+ var executingData = {
+ project: projectName,
+ ajax: "executeFlow",
+ flow: flowId,
+ job: jobId,
+ withDep: true
+ };
+ this.executeFlowDialog(executingData);
+ },
+
+ executeFlow: function(evt) {
+ console.log("Execute Flow");
+ var flowId = $(evt.currentTarget).attr('flowid');
+
+ var executingData = {
+ project: projectName,
+ ajax: "executeFlow",
+ flow: flowId
+ };
+
+ this.executeFlowDialog(executingData);
+ },
+
+ executeFlowDialog: function(executingData) {
+ flowExecuteDialogView.show(executingData);
+ },
+
+ render: function() {
+ }
});
$(function() {
- flowTableView = new azkaban.FlowTableView({el:$('#flow-tabs')});
+ flowTableView = new azkaban.FlowTableView({el:$('#flow-tabs')});
});
src/web/js/azkaban/view/project-logs.js 102(+51 -51)
diff --git a/src/web/js/azkaban/view/project-logs.js b/src/web/js/azkaban/view/project-logs.js
index 0653146..f4bd5c3 100644
--- a/src/web/js/azkaban/view/project-logs.js
+++ b/src/web/js/azkaban/view/project-logs.js
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -22,40 +22,40 @@ azkaban.LogModel = Backbone.Model.extend({});
// From ProjectLogEvent.java
// ERROR(128), CREATED(1), DELETED(2), USER_PERMISSION(3), GROUP_PERMISSION(4), DESCRIPTION(5);
var typeMapping = {
- "ERROR": "Error",
- "CREATED": "Project Created",
- "DELETED": "Project Deleted",
- "USER_PERMISSION" : "User Permission",
- "GROUP_PERMISSION" : "Group Permission",
- "DESCRIPTION" : "Description Set",
- "SCHEDULE": "Schedule",
- "UPLOADED": "Uploaded"
+ "ERROR": "Error",
+ "CREATED": "Project Created",
+ "DELETED": "Project Deleted",
+ "USER_PERMISSION" : "User Permission",
+ "GROUP_PERMISSION" : "Group Permission",
+ "DESCRIPTION" : "Description Set",
+ "SCHEDULE": "Schedule",
+ "UPLOADED": "Uploaded"
};
var projectLogView;
azkaban.ProjectLogView = Backbone.View.extend({
- events: {
- "click #updateLogBtn": "handleUpdate"
- },
-
+ events: {
+ "click #updateLogBtn": "handleUpdate"
+ },
+
initialize: function(settings) {
- this.model.set({"current": 0});
- this.handleUpdate();
- },
-
+ this.model.set({"current": 0});
+ this.handleUpdate();
+ },
+
handleUpdate: function(evt) {
- var current = this.model.get("current");
- var requestURL = contextURL + "/manager";
- var model = this.model;
+ var current = this.model.get("current");
+ var requestURL = contextURL + "/manager";
+ var model = this.model;
var requestData = {
- "project": projectName,
- "ajax": "fetchProjectLogs",
- "size": 1000,
- "skip": 0
+ "project": projectName,
+ "ajax": "fetchProjectLogs",
+ "size": 1000,
+ "skip": 0
};
- var successHandler = function(data) {
- console.log("fetchLogs");
+ var successHandler = function(data) {
+ console.log("fetchLogs");
if (data.error) {
showDialog("Error", data.error);
return;
@@ -75,39 +75,39 @@ azkaban.ProjectLogView = Backbone.View.extend({
var time = event[columnMap['time']];
var type = event[columnMap['type']];
var message = event[columnMap['message']];
-
+
var containerEvent = document.createElement("tr");
$(containerEvent).addClass("projectEvent");
-
+
var containerTime = document.createElement("td");
$(containerTime).addClass("time");
$(containerTime).text(getDateFormat(new Date(time)));
-
+
var containerUser = document.createElement("td");
$(containerUser).addClass("user");
$(containerUser).text(user);
-
+
var containerType = document.createElement("td");
$(containerType).addClass("type");
$(containerType).addClass(type);
$(containerType).text(typeMapping[type] ? typeMapping[type] : type);
-
+
var containerMessage = document.createElement("td");
$(containerMessage).addClass("message");
$(containerMessage).text(message);
-
+
$(containerEvent).append(containerTime);
$(containerEvent).append(containerUser);
$(containerEvent).append(containerType);
$(containerEvent).append(containerMessage);
-
+
$(logSection).append(containerEvent);
}
-
+
model.set({"log": data});
};
- $.get(requestURL, requestData, successHandler);
- }
+ $.get(requestURL, requestData, successHandler);
+ }
});
var showDialog = function(title, message) {
@@ -116,22 +116,22 @@ var showDialog = function(title, message) {
$('#messageBox').text(message);
$('#messageDialog').modal({
- closeHTML: "<a href='#' title='Close' class='modal-close'>x</a>",
- position: ["20%",],
- containerId: 'confirm-container',
- containerCss: {
- 'height': '220px',
- 'width': '565px'
- },
- onShow: function (dialog) {
- }
- });
+ closeHTML: "<a href='#' title='Close' class='modal-close'>x</a>",
+ position: ["20%",],
+ containerId: 'confirm-container',
+ containerCss: {
+ 'height': '220px',
+ 'width': '565px'
+ },
+ onShow: function (dialog) {
+ }
+ });
}
$(function() {
- var selected;
+ var selected;
- logModel = new azkaban.LogModel();
- projectLogView = new azkaban.ProjectLogView({el:$('#projectLogView'), model: logModel});
+ logModel = new azkaban.LogModel();
+ projectLogView = new azkaban.ProjectLogView({el:$('#projectLogView'), model: logModel});
});
src/web/js/azkaban/view/project-modals.js 140(+70 -70)
diff --git a/src/web/js/azkaban/view/project-modals.js b/src/web/js/azkaban/view/project-modals.js
index 6309d1c..302cbfe 100644
--- a/src/web/js/azkaban/view/project-modals.js
+++ b/src/web/js/azkaban/view/project-modals.js
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -18,78 +18,78 @@ $.namespace('azkaban');
var projectView;
azkaban.ProjectView = Backbone.View.extend({
- events: {
- "click #project-upload-btn": "handleUploadProjectJob",
- "click #project-delete-btn": "handleDeleteProject"
- },
-
- initialize: function(settings) {
- },
-
- handleUploadProjectJob: function(evt) {
- console.log("click upload project");
- $('#upload-project-modal').modal();
- },
-
- handleDeleteProject: function(evt) {
- console.log("click delete project");
- $('#delete-project-modal').modal();
- },
-
- render: function() {
- }
+ events: {
+ "click #project-upload-btn": "handleUploadProjectJob",
+ "click #project-delete-btn": "handleDeleteProject"
+ },
+
+ initialize: function(settings) {
+ },
+
+ handleUploadProjectJob: function(evt) {
+ console.log("click upload project");
+ $('#upload-project-modal').modal();
+ },
+
+ handleDeleteProject: function(evt) {
+ console.log("click delete project");
+ $('#delete-project-modal').modal();
+ },
+
+ render: function() {
+ }
});
var uploadProjectView;
azkaban.UploadProjectView = Backbone.View.extend({
- events: {
- "click #upload-project-btn": "handleCreateProject"
- },
-
- initialize: function(settings) {
- console.log("Hide upload project modal error msg");
- $("#upload-project-modal-error-msg").hide();
- },
-
- handleCreateProject: function(evt) {
- console.log("Upload project button.");
- $("#upload-project-form").submit();
- },
-
- render: function() {
- }
+ events: {
+ "click #upload-project-btn": "handleCreateProject"
+ },
+
+ initialize: function(settings) {
+ console.log("Hide upload project modal error msg");
+ $("#upload-project-modal-error-msg").hide();
+ },
+
+ handleCreateProject: function(evt) {
+ console.log("Upload project button.");
+ $("#upload-project-form").submit();
+ },
+
+ render: function() {
+ }
});
var deleteProjectView;
azkaban.DeleteProjectView = Backbone.View.extend({
- events: {
- "click #delete-btn": "handleDeleteProject"
- },
-
- initialize: function(settings) {
- },
-
- handleDeleteProject: function(evt) {
- $("#delete-form").submit();
- },
-
- render: function() {
- }
+ events: {
+ "click #delete-btn": "handleDeleteProject"
+ },
+
+ initialize: function(settings) {
+ },
+
+ handleDeleteProject: function(evt) {
+ $("#delete-form").submit();
+ },
+
+ render: function() {
+ }
});
var projectDescription;
azkaban.ProjectDescriptionView = Backbone.View.extend({
- events: {
- "click #project-description": "handleDescriptionEdit",
+ events: {
+ "click #project-description": "handleDescriptionEdit",
"click #project-description-btn": "handleDescriptionSave"
- },
+ },
- initialize: function(settings) {
+ initialize: function(settings) {
console.log("project description initialize");
- },
-
- handleDescriptionEdit: function(evt) {
- console.log("Edit description");
+ },
+
+ handleDescriptionEdit: function(evt) {
+ console.log("Edit description");
var description = null;
if ($('#project-description').hasClass('editable-placeholder')) {
description = '';
@@ -101,17 +101,17 @@ azkaban.ProjectDescriptionView = Backbone.View.extend({
$('#project-description-edit').attr("value", description);
$('#project-description').hide();
$('#project-description-form').show();
- },
+ },
handleDescriptionSave: function(evt) {
var newText = $('#project-description-edit').val();
if ($('#project-description-edit').hasClass('has-error')) {
$('#project-description-edit').removeClass('has-error');
}
- var requestURL = contextURL + "/manager";
+ var requestURL = contextURL + "/manager";
var requestData = {
- "project": projectName,
- "ajax":"changeDescription",
+ "project": projectName,
+ "ajax":"changeDescription",
"description": newText
};
var successHandler = function(data) {
@@ -133,21 +133,21 @@ azkaban.ProjectDescriptionView = Backbone.View.extend({
$.get(requestURL, requestData, successHandler, "json");
},
- render: function() {
- }
+ render: function() {
+ }
});
$(function() {
- projectView = new azkaban.ProjectView({
+ projectView = new azkaban.ProjectView({
el: $('#project-options')
});
- uploadView = new azkaban.UploadProjectView({
+ uploadView = new azkaban.UploadProjectView({
el: $('#upload-project-modal')
});
- deleteProjectView = new azkaban.DeleteProjectView({
+ deleteProjectView = new azkaban.DeleteProjectView({
el: $('#delete-project-modal')
});
- projectDescription = new azkaban.ProjectDescriptionView({
+ projectDescription = new azkaban.ProjectDescriptionView({
el: $('#project-sidebar')
});
});
src/web/js/azkaban/view/project-permissions.js 618(+309 -309)
diff --git a/src/web/js/azkaban/view/project-permissions.js b/src/web/js/azkaban/view/project-permissions.js
index 08bf865..2cdce28 100644
--- a/src/web/js/azkaban/view/project-permissions.js
+++ b/src/web/js/azkaban/view/project-permissions.js
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -20,335 +20,335 @@ var permissionTableView;
var groupPermissionTableView;
azkaban.PermissionTableView = Backbone.View.extend({
- events : {
- "click button": "handleChangePermission"
- },
-
- initialize : function(settings) {
- this.group = settings.group;
- this.proxy = settings.proxy;
- },
-
- render: function() {
- },
-
- handleChangePermission: function(evt) {
- var currentTarget = evt.currentTarget;
- changePermissionView.display(currentTarget.id, false, this.group, this.proxy);
- }
+ events : {
+ "click button": "handleChangePermission"
+ },
+
+ initialize : function(settings) {
+ this.group = settings.group;
+ this.proxy = settings.proxy;
+ },
+
+ render: function() {
+ },
+
+ handleChangePermission: function(evt) {
+ var currentTarget = evt.currentTarget;
+ changePermissionView.display(currentTarget.id, false, this.group, this.proxy);
+ }
});
var proxyTableView;
azkaban.ProxyTableView= Backbone.View.extend({
- events : {
- "click button": "handleRemoveProxy"
- },
-
- initialize : function(settings) {
- },
-
- render: function() {
- },
-
- handleRemoveProxy: function(evt) {
- removeProxyView.display($(evt.currentTarget).attr("name"));
- }
+ events : {
+ "click button": "handleRemoveProxy"
+ },
+
+ initialize : function(settings) {
+ },
+
+ render: function() {
+ },
+
+ handleRemoveProxy: function(evt) {
+ removeProxyView.display($(evt.currentTarget).attr("name"));
+ }
});
var removeProxyView;
azkaban.RemoveProxyView = Backbone.View.extend({
- events: {
- "click #remove-proxy-btn": "handleRemoveProxy"
- },
-
- initialize : function(settings) {
- $('#remove-proxy-error-msg').hide();
- },
-
- display: function(proxyName) {
- this.el.proxyName = proxyName;
- $("#remove-proxy-msg").text("Removing proxy user '" + proxyName + "'");
- $(this.el).modal().on('hide.bs.modal', function(e) {
+ events: {
+ "click #remove-proxy-btn": "handleRemoveProxy"
+ },
+
+ initialize : function(settings) {
+ $('#remove-proxy-error-msg').hide();
+ },
+
+ display: function(proxyName) {
+ this.el.proxyName = proxyName;
+ $("#remove-proxy-msg").text("Removing proxy user '" + proxyName + "'");
+ $(this.el).modal().on('hide.bs.modal', function(e) {
$('#remove-proxy-error-msg').hide();
});
- },
-
- handleRemoveProxy: function() {
- var requestURL = contextURL + "/manager";
- var proxyName = this.el.proxyName;
- var requestData = {
- "project": projectName,
- "name": proxyName,
- "ajax": "removeProxyUser"
- };
- var successHandler = function(data) {
- console.log("Output");
- if (data.error) {
- $("#remove-proxy-error-msg").text(data.error);
- $("#remove-proxy-error-msg").slideDown();
- return;
- }
- var replaceURL = requestURL + "?project=" + projectName +"&permissions";
- window.location.replace(replaceURL);
- };
-
- $.get(requestURL, requestData, successHandler, "json");
- }
+ },
+
+ handleRemoveProxy: function() {
+ var requestURL = contextURL + "/manager";
+ var proxyName = this.el.proxyName;
+ var requestData = {
+ "project": projectName,
+ "name": proxyName,
+ "ajax": "removeProxyUser"
+ };
+ var successHandler = function(data) {
+ console.log("Output");
+ if (data.error) {
+ $("#remove-proxy-error-msg").text(data.error);
+ $("#remove-proxy-error-msg").slideDown();
+ return;
+ }
+ var replaceURL = requestURL + "?project=" + projectName +"&permissions";
+ window.location.replace(replaceURL);
+ };
+
+ $.get(requestURL, requestData, successHandler, "json");
+ }
});
var addProxyView;
azkaban.AddProxyView = Backbone.View.extend({
- events: {
- "click #add-proxy-btn": "handleAddProxy"
- },
-
- initialize : function(settings) {
- $('#add-proxy-error-msg').hide();
- },
-
- display: function() {
- $(this.el).modal().on('hide.bs.modal', function(e) {
+ events: {
+ "click #add-proxy-btn": "handleAddProxy"
+ },
+
+ initialize : function(settings) {
+ $('#add-proxy-error-msg').hide();
+ },
+
+ display: function() {
+ $(this.el).modal().on('hide.bs.modal', function(e) {
$('#add-proxy-error-msg').hide();
});
- },
-
- handleAddProxy: function() {
- var requestURL = contextURL + "/manager";
- var name = $('#proxy-user-box').val().trim();
- var requestData = {
- "project": projectName,
- "name": name,
- "ajax":"addProxyUser"
- };
-
- var successHandler = function(data) {
- console.log("Output");
- if (data.error) {
- $("#add-proxy-error-msg").text(data.error);
- $("#add-proxy-error-msg").slideDown();
- return;
- }
-
- var replaceURL = requestURL + "?project=" + projectName +"&permissions";
- window.location.replace(replaceURL);
- };
- $.get(requestURL, requestData, successHandler, "json");
- }
+ },
+
+ handleAddProxy: function() {
+ var requestURL = contextURL + "/manager";
+ var name = $('#proxy-user-box').val().trim();
+ var requestData = {
+ "project": projectName,
+ "name": name,
+ "ajax":"addProxyUser"
+ };
+
+ var successHandler = function(data) {
+ console.log("Output");
+ if (data.error) {
+ $("#add-proxy-error-msg").text(data.error);
+ $("#add-proxy-error-msg").slideDown();
+ return;
+ }
+
+ var replaceURL = requestURL + "?project=" + projectName +"&permissions";
+ window.location.replace(replaceURL);
+ };
+ $.get(requestURL, requestData, successHandler, "json");
+ }
});
var changePermissionView;
azkaban.ChangePermissionView= Backbone.View.extend({
- events: {
- "click input[type=checkbox]": "handleCheckboxClick",
- "click #change-btn": "handleChangePermissions"
- },
-
- initialize: function(settings) {
- $('#change-permission-error-msg').hide();
- },
-
- display: function(userid, newPerm, group, proxy) {
- // 6 is the length of the prefix "group-"
- this.userid = group ? userid.substring(6, userid.length) : userid;
- if(group == true) {
- this.userid = userid.substring(6, userid.length)
- } else if (proxy == true) {
- this.userid = userid.substring(6, userid.length)
- } else {
- this.userid = userid
- }
-
- this.permission = {};
- $('#user-box').val(this.userid);
- this.newPerm = newPerm;
- this.group = group;
-
- var prefix = userid;
- var adminInput = $("#" + prefix + "-admin-checkbox");
- var readInput = $("#" + prefix + "-read-checkbox");
- var writeInput = $("#" + prefix + "-write-checkbox");
- var executeInput = $("#" + prefix + "-execute-checkbox");
- var scheduleInput = $("#" + prefix + "-schedule-checkbox");
-
- if (newPerm) {
- if (group) {
- $('#change-title').text("Add New Group Permissions");
- }
- else if(proxy){
- $('#change-title').text("Add New Proxy User Permissions");
- }
- else{
- $('#change-title').text("Add New User Permissions");
- }
- $('#user-box').attr("disabled", null);
-
- // default
- this.permission.admin = false;
- this.permission.read = true;
- this.permission.write = false;
- this.permission.execute = false;
- this.permission.schedule = false;
- }
- else {
- if (group) {
- $('#change-title').text("Change Group Permissions");
- }
- else {
- $('#change-title').text("Change User Permissions");
- }
-
- $('#user-box').attr("disabled", "disabled");
-
- this.permission.admin = $(adminInput).is(":checked");
- this.permission.read = $(readInput).is(":checked");
- this.permission.write = $(writeInput).is(":checked");
- this.permission.execute = $(executeInput).is(":checked");
- this.permission.schedule = $(scheduleInput).is(":checked");
- }
-
- this.changeCheckbox();
-
- changePermissionView.render();
- $('#change-permission').modal().on('hide.bs.modal', function(e) {
+ events: {
+ "click input[type=checkbox]": "handleCheckboxClick",
+ "click #change-btn": "handleChangePermissions"
+ },
+
+ initialize: function(settings) {
+ $('#change-permission-error-msg').hide();
+ },
+
+ display: function(userid, newPerm, group, proxy) {
+ // 6 is the length of the prefix "group-"
+ this.userid = group ? userid.substring(6, userid.length) : userid;
+ if(group == true) {
+ this.userid = userid.substring(6, userid.length)
+ } else if (proxy == true) {
+ this.userid = userid.substring(6, userid.length)
+ } else {
+ this.userid = userid
+ }
+
+ this.permission = {};
+ $('#user-box').val(this.userid);
+ this.newPerm = newPerm;
+ this.group = group;
+
+ var prefix = userid;
+ var adminInput = $("#" + prefix + "-admin-checkbox");
+ var readInput = $("#" + prefix + "-read-checkbox");
+ var writeInput = $("#" + prefix + "-write-checkbox");
+ var executeInput = $("#" + prefix + "-execute-checkbox");
+ var scheduleInput = $("#" + prefix + "-schedule-checkbox");
+
+ if (newPerm) {
+ if (group) {
+ $('#change-title').text("Add New Group Permissions");
+ }
+ else if(proxy){
+ $('#change-title').text("Add New Proxy User Permissions");
+ }
+ else{
+ $('#change-title').text("Add New User Permissions");
+ }
+ $('#user-box').attr("disabled", null);
+
+ // default
+ this.permission.admin = false;
+ this.permission.read = true;
+ this.permission.write = false;
+ this.permission.execute = false;
+ this.permission.schedule = false;
+ }
+ else {
+ if (group) {
+ $('#change-title').text("Change Group Permissions");
+ }
+ else {
+ $('#change-title').text("Change User Permissions");
+ }
+
+ $('#user-box').attr("disabled", "disabled");
+
+ this.permission.admin = $(adminInput).is(":checked");
+ this.permission.read = $(readInput).is(":checked");
+ this.permission.write = $(writeInput).is(":checked");
+ this.permission.execute = $(executeInput).is(":checked");
+ this.permission.schedule = $(scheduleInput).is(":checked");
+ }
+
+ this.changeCheckbox();
+
+ changePermissionView.render();
+ $('#change-permission').modal().on('hide.bs.modal', function(e) {
$('#change-permission-error-msg').hide();
});
- },
-
- render: function() {
- },
-
- handleCheckboxClick: function(evt) {
- console.log("click");
- var targetName = evt.currentTarget.name;
- if(targetName == "proxy") {
- this.doProxy = evt.currentTarget.checked;
- }
- else {
- this.permission[targetName] = evt.currentTarget.checked;
- }
- this.changeCheckbox(evt);
- },
-
- changeCheckbox: function(evt) {
- var perm = this.permission;
-
- if (perm.admin) {
- $("#admin-change").attr("checked", true);
- $("#read-change").attr("checked", true);
- $("#read-change").attr("disabled", "disabled");
-
- $("#write-change").attr("checked", true);
- $("#write-change").attr("disabled", "disabled");
-
- $("#execute-change").attr("checked", true);
- $("#execute-change").attr("disabled", "disabled");
-
- $("#schedule-change").attr("checked", true);
- $("#schedule-change").attr("disabled", "disabled");
- }
- else {
- $("#admin-change").attr("checked", false);
-
- $("#read-change").attr("checked", perm.read);
- $("#read-change").attr("disabled", null);
-
- $("#write-change").attr("checked", perm.write);
- $("#write-change").attr("disabled", null);
-
- $("#execute-change").attr("checked", perm.execute);
- $("#execute-change").attr("disabled", null);
-
- $("#schedule-change").attr("checked", perm.schedule);
- $("#schedule-change").attr("disabled", null);
- }
-
- $("#change-btn").removeClass("btn-disabled");
- $("#change-btn").attr("disabled", null);
-
- if (perm.admin || perm.read || perm.write || perm.execute || perm.schedule) {
- $("#change-btn").text("Commit");
- }
- else {
- if (this.newPerm) {
- $("#change-btn").disabled = true;
- $("#change-btn").addClass("btn-disabled");
- }
- else {
- $("#change-btn").text("Remove");
- }
- }
- },
-
- handleChangePermissions : function(evt) {
- var requestURL = contextURL + "/manager";
- var name = $('#user-box').val().trim();
- var command = this.newPerm ? "addPermission" : "changePermission";
- var group = this.group;
-
- var permission = {};
- permission.admin = $("#admin-change").is(":checked");
- permission.read = $("#read-change").is(":checked");
- permission.write = $("#write-change").is(":checked");
- permission.execute = $("#execute-change").is(":checked");
- permission.schedule = $("#schedule-change").is(":checked");
-
- var requestData = {
- "project": projectName,
- "name": name,
- "ajax": command,
- "permissions": this.permission,
- "group": group
- };
- var successHandler = function(data) {
- console.log("Output");
- if (data.error) {
- $("#change-permission-error-msg").text(data.error);
- $("#change-permission-error-msg").slideDown();
- return;
- }
-
- var replaceURL = requestURL + "?project=" + projectName +"&permissions";
- window.location.replace(replaceURL);
- };
-
- $.get(requestURL, requestData, successHandler, "json");
- }
+ },
+
+ render: function() {
+ },
+
+ handleCheckboxClick: function(evt) {
+ console.log("click");
+ var targetName = evt.currentTarget.name;
+ if(targetName == "proxy") {
+ this.doProxy = evt.currentTarget.checked;
+ }
+ else {
+ this.permission[targetName] = evt.currentTarget.checked;
+ }
+ this.changeCheckbox(evt);
+ },
+
+ changeCheckbox: function(evt) {
+ var perm = this.permission;
+
+ if (perm.admin) {
+ $("#admin-change").attr("checked", true);
+ $("#read-change").attr("checked", true);
+ $("#read-change").attr("disabled", "disabled");
+
+ $("#write-change").attr("checked", true);
+ $("#write-change").attr("disabled", "disabled");
+
+ $("#execute-change").attr("checked", true);
+ $("#execute-change").attr("disabled", "disabled");
+
+ $("#schedule-change").attr("checked", true);
+ $("#schedule-change").attr("disabled", "disabled");
+ }
+ else {
+ $("#admin-change").attr("checked", false);
+
+ $("#read-change").attr("checked", perm.read);
+ $("#read-change").attr("disabled", null);
+
+ $("#write-change").attr("checked", perm.write);
+ $("#write-change").attr("disabled", null);
+
+ $("#execute-change").attr("checked", perm.execute);
+ $("#execute-change").attr("disabled", null);
+
+ $("#schedule-change").attr("checked", perm.schedule);
+ $("#schedule-change").attr("disabled", null);
+ }
+
+ $("#change-btn").removeClass("btn-disabled");
+ $("#change-btn").attr("disabled", null);
+
+ if (perm.admin || perm.read || perm.write || perm.execute || perm.schedule) {
+ $("#change-btn").text("Commit");
+ }
+ else {
+ if (this.newPerm) {
+ $("#change-btn").disabled = true;
+ $("#change-btn").addClass("btn-disabled");
+ }
+ else {
+ $("#change-btn").text("Remove");
+ }
+ }
+ },
+
+ handleChangePermissions : function(evt) {
+ var requestURL = contextURL + "/manager";
+ var name = $('#user-box').val().trim();
+ var command = this.newPerm ? "addPermission" : "changePermission";
+ var group = this.group;
+
+ var permission = {};
+ permission.admin = $("#admin-change").is(":checked");
+ permission.read = $("#read-change").is(":checked");
+ permission.write = $("#write-change").is(":checked");
+ permission.execute = $("#execute-change").is(":checked");
+ permission.schedule = $("#schedule-change").is(":checked");
+
+ var requestData = {
+ "project": projectName,
+ "name": name,
+ "ajax": command,
+ "permissions": this.permission,
+ "group": group
+ };
+ var successHandler = function(data) {
+ console.log("Output");
+ if (data.error) {
+ $("#change-permission-error-msg").text(data.error);
+ $("#change-permission-error-msg").slideDown();
+ return;
+ }
+
+ var replaceURL = requestURL + "?project=" + projectName +"&permissions";
+ window.location.replace(replaceURL);
+ };
+
+ $.get(requestURL, requestData, successHandler, "json");
+ }
});
$(function() {
- permissionTableView = new azkaban.PermissionTableView({
- el: $('#permissions-table'),
- group: false,
- proxy: false
- });
- groupPermissionTableView = new azkaban.PermissionTableView({
- el: $('#group-permissions-table'),
- group: true,
- proxy: false
- });
- proxyTableView = new azkaban.ProxyTableView({
- el: $('#proxy-user-table'),
- group: false,
- proxy: true
- });
- changePermissionView = new azkaban.ChangePermissionView({
- el: $('#change-permission')
- });
- addProxyView = new azkaban.AddProxyView({
- el: $('#add-proxy')
- });
- removeProxyView = new azkaban.RemoveProxyView({
- el: $('#remove-proxy')
- });
- $('#addUser').bind('click', function() {
- changePermissionView.display("", true, false, false);
- });
-
- $('#addGroup').bind('click', function() {
- changePermissionView.display("", true, true, false);
- });
-
- $('#addProxyUser').bind('click', function() {
- addProxyView.display();
- });
+ permissionTableView = new azkaban.PermissionTableView({
+ el: $('#permissions-table'),
+ group: false,
+ proxy: false
+ });
+ groupPermissionTableView = new azkaban.PermissionTableView({
+ el: $('#group-permissions-table'),
+ group: true,
+ proxy: false
+ });
+ proxyTableView = new azkaban.ProxyTableView({
+ el: $('#proxy-user-table'),
+ group: false,
+ proxy: true
+ });
+ changePermissionView = new azkaban.ChangePermissionView({
+ el: $('#change-permission')
+ });
+ addProxyView = new azkaban.AddProxyView({
+ el: $('#add-proxy')
+ });
+ removeProxyView = new azkaban.RemoveProxyView({
+ el: $('#remove-proxy')
+ });
+ $('#addUser').bind('click', function() {
+ changePermissionView.display("", true, false, false);
+ });
+
+ $('#addGroup').bind('click', function() {
+ changePermissionView.display("", true, true, false);
+ });
+
+ $('#addProxyUser').bind('click', function() {
+ addProxyView.display();
+ });
});
src/web/js/azkaban/view/scheduled.js 20(+10 -10)
diff --git a/src/web/js/azkaban/view/scheduled.js b/src/web/js/azkaban/view/scheduled.js
index 101324e..d470aac 100644
--- a/src/web/js/azkaban/view/scheduled.js
+++ b/src/web/js/azkaban/view/scheduled.js
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -19,12 +19,12 @@ $.namespace('azkaban');
var slaView;
var tableSorterView;
$(function() {
- slaView = new azkaban.ChangeSlaView({el:$('#sla-options')});
- tableSorterView = new azkaban.TableSorter({el:$('#scheduledFlowsTbl')});
- //var requestURL = contextURL + "/manager";
+ slaView = new azkaban.ChangeSlaView({el:$('#sla-options')});
+ tableSorterView = new azkaban.TableSorter({el:$('#scheduledFlowsTbl')});
+ //var requestURL = contextURL + "/manager";
- // Set up the Flow options view. Create a new one every time :p
- //$('#addSlaBtn').click( function() {
- // slaView.show();
- //});
+ // Set up the Flow options view. Create a new one every time :p
+ //$('#addSlaBtn').click( function() {
+ // slaView.show();
+ //});
});
src/web/js/azkaban/view/schedule-options.js 1100(+550 -550)
diff --git a/src/web/js/azkaban/view/schedule-options.js b/src/web/js/azkaban/view/schedule-options.js
index b51af58..f899f05 100644
--- a/src/web/js/azkaban/view/schedule-options.js
+++ b/src/web/js/azkaban/view/schedule-options.js
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -24,563 +24,563 @@ var scheduleFlowView;
var scheduleFlowData;
//function recurseAllAncestors(nodes, disabledMap, id, disable) {
-// var node = nodes[id];
-//
-// if (node.inNodes) {
-// for (var key in node.inNodes) {
-// disabledMap[key] = disable;
-// recurseAllAncestors(nodes, disabledMap, key, disable);
-// }
-// }
+// var node = nodes[id];
+//
+// if (node.inNodes) {
+// for (var key in node.inNodes) {
+// disabledMap[key] = disable;
+// recurseAllAncestors(nodes, disabledMap, key, disable);
+// }
+// }
//}
//
//function recurseAllDescendents(nodes, disabledMap, id, disable) {
-// var node = nodes[id];
-//
-// if (node.outNodes) {
-// for (var key in node.outNodes) {
-// disabledMap[key] = disable;
-// recurseAllDescendents(nodes, disabledMap, key, disable);
-// }
-// }
+// var node = nodes[id];
+//
+// if (node.outNodes) {
+// for (var key in node.outNodes) {
+// disabledMap[key] = disable;
+// recurseAllDescendents(nodes, disabledMap, key, disable);
+// }
+// }
//}
//
azkaban.ScheduleContextMenu = Backbone.View.extend({
- events : {
- "click #scheduleDisableArrow" : "handleDisabledClick",
- "click #scheduleEnableArrow" : "handleEnabledClick"
- },
- initialize: function(settings) {
- $('#scheduleDisableSub').hide();
- $('#scheduleEnableSub').hide();
- },
- handleEnabledClick: function(evt) {
- if(evt.stopPropagation) {
- evt.stopPropagation();
- }
- evt.cancelBubble=true;
-
- if (evt.currentTarget.expanded) {
- evt.currentTarget.expanded=false;
- $('#scheduleEnableArrow').removeClass('collapse');
- $('#scheduleEnableSub').hide();
- }
- else {
- evt.currentTarget.expanded=true;
- $('#scheduleEnableArrow').addClass('collapse');
- $('#scheduleEnableSub').show();
- }
- },
- handleDisabledClick: function(evt) {
- if(evt.stopPropagation) {
- evt.stopPropagation();
- }
- evt.cancelBubble=true;
-
- if (evt.currentTarget.expanded) {
- evt.currentTarget.expanded=false;
- $('#scheduleDisableArrow').removeClass('collapse');
- $('#scheduleDisableSub').hide();
- }
- else {
- evt.currentTarget.expanded=true;
- $('#scheduleDisableArrow').addClass('collapse');
- $('#scheduleDisableSub').show();
- }
- }
+ events : {
+ "click #scheduleDisableArrow" : "handleDisabledClick",
+ "click #scheduleEnableArrow" : "handleEnabledClick"
+ },
+ initialize: function(settings) {
+ $('#scheduleDisableSub').hide();
+ $('#scheduleEnableSub').hide();
+ },
+ handleEnabledClick: function(evt) {
+ if(evt.stopPropagation) {
+ evt.stopPropagation();
+ }
+ evt.cancelBubble=true;
+
+ if (evt.currentTarget.expanded) {
+ evt.currentTarget.expanded=false;
+ $('#scheduleEnableArrow').removeClass('collapse');
+ $('#scheduleEnableSub').hide();
+ }
+ else {
+ evt.currentTarget.expanded=true;
+ $('#scheduleEnableArrow').addClass('collapse');
+ $('#scheduleEnableSub').show();
+ }
+ },
+ handleDisabledClick: function(evt) {
+ if(evt.stopPropagation) {
+ evt.stopPropagation();
+ }
+ evt.cancelBubble=true;
+
+ if (evt.currentTarget.expanded) {
+ evt.currentTarget.expanded=false;
+ $('#scheduleDisableArrow').removeClass('collapse');
+ $('#scheduleDisableSub').hide();
+ }
+ else {
+ evt.currentTarget.expanded=true;
+ $('#scheduleDisableArrow').addClass('collapse');
+ $('#scheduleDisableSub').show();
+ }
+ }
});
azkaban.ScheduleFlowView = Backbone.View.extend({
- events : {
- "click #schedule-btn": "handleScheduleFlow",
- "click #adv-schedule-opt-btn": "handleAdvancedSchedule"
- },
- initialize : function(settings) {
- $( "#datepicker" ).datepicker();
- $( "#datepicker" ).datepicker('setDate', new Date());
- $("#errorMsg").hide();
- },
- handleAdvancedSchedule : function(evt) {
- console.log("Clicked advanced schedule options button");
- //$('#confirm-container').hide();
- $.modal.close();
- advancedScheduleView.show();
- },
- handleScheduleFlow : function(evt) {
-
- var hourVal = $('#hour').val();
- var minutesVal = $('#minutes').val();
- var ampmVal = $('#am_pm').val();
- var timezoneVal = $('#timezone').val();
- var dateVal = $('#datepicker').val();
- var is_recurringVal = $('#is_recurring').val();
- var periodVal = $('#period').val();
- var periodUnits = $('#period_units').val();
-
- console.log("Creating schedule for "+projectName+"."+flowName);
- $.ajax({
- async: "false",
- url: "schedule",
- dataType: "json",
- type: "POST",
- data: {
- action:"scheduleFlow",
- projectId:projectId,
- projectName:projectName,
- flowName:flowName,
- hour:hourVal,
- minutes:minutesVal,
- am_pm:ampmVal,
- timezone:timezoneVal,
- date:dateVal,
- userExec:"dummy",
- is_recurring:is_recurringVal,
- period:periodVal,
- period_units:periodUnits
- },
- success: function(data) {
- if (data.status == "success") {
- console.log("Successfully scheduled for "+projectName+"."+flowName);
- if (data.action == "redirect") {
- window.location = contextURL + "/manager?project=" + projectName + "&flow=" + flowName ;
- }
- else{
- $("#success_message").text("Flow " + projectName + "." + flowName + " scheduled!" );
- window.location = contextURL + "/manager?project=" + projectName + "&flow=" + flowName ;
- }
- }
- else {
- if (data.action == "login") {
- window.location = "";
- }
- else {
- $("#errorMsg").text("ERROR: " + data.message);
- $("#errorMsg").slideDown("fast");
- }
- }
- }
- });
-
- },
- render: function() {
- }
+ events : {
+ "click #schedule-btn": "handleScheduleFlow",
+ "click #adv-schedule-opt-btn": "handleAdvancedSchedule"
+ },
+ initialize : function(settings) {
+ $( "#datepicker" ).datepicker();
+ $( "#datepicker" ).datepicker('setDate', new Date());
+ $("#errorMsg").hide();
+ },
+ handleAdvancedSchedule : function(evt) {
+ console.log("Clicked advanced schedule options button");
+ //$('#confirm-container').hide();
+ $.modal.close();
+ advancedScheduleView.show();
+ },
+ handleScheduleFlow : function(evt) {
+
+ var hourVal = $('#hour').val();
+ var minutesVal = $('#minutes').val();
+ var ampmVal = $('#am_pm').val();
+ var timezoneVal = $('#timezone').val();
+ var dateVal = $('#datepicker').val();
+ var is_recurringVal = $('#is_recurring').val();
+ var periodVal = $('#period').val();
+ var periodUnits = $('#period_units').val();
+
+ console.log("Creating schedule for "+projectName+"."+flowName);
+ $.ajax({
+ async: "false",
+ url: "schedule",
+ dataType: "json",
+ type: "POST",
+ data: {
+ action:"scheduleFlow",
+ projectId:projectId,
+ projectName:projectName,
+ flowName:flowName,
+ hour:hourVal,
+ minutes:minutesVal,
+ am_pm:ampmVal,
+ timezone:timezoneVal,
+ date:dateVal,
+ userExec:"dummy",
+ is_recurring:is_recurringVal,
+ period:periodVal,
+ period_units:periodUnits
+ },
+ success: function(data) {
+ if (data.status == "success") {
+ console.log("Successfully scheduled for "+projectName+"."+flowName);
+ if (data.action == "redirect") {
+ window.location = contextURL + "/manager?project=" + projectName + "&flow=" + flowName ;
+ }
+ else{
+ $("#success_message").text("Flow " + projectName + "." + flowName + " scheduled!" );
+ window.location = contextURL + "/manager?project=" + projectName + "&flow=" + flowName ;
+ }
+ }
+ else {
+ if (data.action == "login") {
+ window.location = "";
+ }
+ else {
+ $("#errorMsg").text("ERROR: " + data.message);
+ $("#errorMsg").slideDown("fast");
+ }
+ }
+ }
+ });
+
+ },
+ render: function() {
+ }
});
azkaban.AdvancedScheduleView = Backbone.View.extend({
- events : {
- "click" : "closeEditingTarget",
- "click #adv-schedule-btn": "handleAdvSchedule",
- "click #schedule-cancel-btn": "handleCancel",
- "click .modal-close": "handleCancel",
- "click #scheduleGeneralOptions": "handleGeneralOptionsSelect",
- "click #scheduleFlowOptions": "handleFlowOptionsSelect",
- "click #scheduleAddRow": "handleAddRow",
- "click table .editable": "handleEditColumn",
- "click table .removeIcon": "handleRemoveColumn"
- },
- initialize: function(setting) {
- this.contextMenu = new azkaban.ScheduleContextMenu({el:$('#scheduleDisableJobMenu')});
- this.handleGeneralOptionsSelect();
- $( "#advdatepicker" ).datepicker();
- $( "#advdatepicker" ).datepicker('setDate', new Date());
- },
- show: function() {
- $('#scheduleModalBackground').show();
- $('#schedule-options').show();
- this.handleGeneralOptionsSelect();
-
- scheduleFlowData = this.model.clone();
- this.flowData = scheduleFlowData;
- var flowData = scheduleFlowData;
-
- var fetchData = {"project": projectName, "ajax":"flowInfo", "flow":flowName};
-
- var executeURL = contextURL + "/executor";
- this.executeURL = executeURL;
- var scheduleURL = contextURL + "/schedule";
- this.scheduleURL = scheduleURL;
- var handleAddRow = this.handleAddRow;
-
- var data = flowData.get("data");
- var nodes = {};
- for (var i=0; i < data.nodes.length; ++i) {
- var node = data.nodes[i];
- nodes[node.id] = node;
- }
-
- for (var i=0; i < data.edges.length; ++i) {
- var edge = data.edges[i];
- var fromNode = nodes[edge.from];
- var toNode = nodes[edge.target];
-
- if (!fromNode.outNodes) {
- fromNode.outNodes = {};
- }
- fromNode.outNodes[toNode.id] = toNode;
-
- if (!toNode.inNodes) {
- toNode.inNodes = {};
- }
- toNode.inNodes[fromNode.id] = fromNode;
- }
- flowData.set({nodes: nodes});
-
- var disabled = {};
- for (var i = 0; i < data.nodes.length; ++i) {
- var updateNode = data.nodes[i];
- if (updateNode.status == "DISABLED" || updateNode.status == "SKIPPED") {
- updateNode.status = "READY";
- disabled[updateNode.id] = true;
- }
- if (updateNode.status == "SUCCEEDED" || updateNode.status=="RUNNING") {
- disabled[updateNode.id] = true;
- }
- }
- flowData.set({disabled: disabled});
-
- $.get(
- executeURL,
- fetchData,
- function(data) {
- if (data.error) {
- alert(data.error);
- }
- else {
- if (data.successEmails) {
- $('#scheduleSuccessEmails').val(data.successEmails.join());
- }
- if (data.failureEmails) {
- $('#scheduleFailureEmails').val(data.failureEmails.join());
- }
-
- if (data.failureAction) {
- $('#scheduleFailureAction').val(data.failureAction);
- }
- if (data.notifyFailureFirst) {
- $('#scheduleNotifyFailureFirst').attr('checked', true);
- }
- if (data.notifyFailureLast) {
- $('#scheduleNotifyFailureLast').attr('checked', true);
- }
- if (data.flowParam) {
- var flowParam = data.flowParam;
- for (var key in flowParam) {
- var row = handleAddRow();
- var td = $(row).find('td');
- $(td[0]).text(key);
- $(td[1]).text(flowParam[key]);
- }
- }
-
- if (!data.running || data.running.length == 0) {
- $(".radio").attr("disabled", "disabled");
- $(".radioLabel").addClass("disabled", "disabled");
- }
- }
- },
- "json"
- );
- },
- handleCancel: function(evt) {
- $('#scheduleModalBackground').hide();
- $('#schedule-options').hide();
- },
- handleGeneralOptionsSelect: function(evt) {
- $('#scheduleFlowOptions').removeClass('selected');
- $('#scheduleGeneralOptions').addClass('selected');
-
- $('#scheduleGeneralPanel').show();
- $('#scheduleGraphPanel').hide();
- },
- handleFlowOptionsSelect: function(evt) {
- $('#scheduleGeneralOptions').removeClass('selected');
- $('#scheduleFlowOptions').addClass('selected');
-
- $('#scheduleGraphPanel').show();
- $('#scheduleGeneralPanel').hide();
-
- if (this.flowSetup) {
- return;
- }
-
- scheduleCustomSvgGraphView = new azkaban.SvgGraphView({el:$('#scheduleSvgDivCustom'), model: scheduleFlowData, rightClick: {id: 'scheduleDisableJobMenu', callback: this.handleDisableMenuClick}});
- scheduleCustomJobsListView = new azkaban.JobListView({el:$('#scheduleJobListCustom'), model: scheduleFlowData, rightClick: {id: 'scheduleDisableJobMenu', callback: this.handleDisableMenuClick}});
- scheduleFlowData.trigger("change:graph");
-
- this.flowSetup = true;
- },
- handleAdvSchedule: function(evt) {
- var scheduleURL = this.scheduleURL;
- var disabled = this.flowData.get("disabled");
- var disabledJobs = "";
- for(var job in disabled) {
- if(disabled[job] == true) {
- disabledJobs += "," + job;
- }
- }
- var failureAction = $('#scheduleFailureAction').val();
- var failureEmails = $('#scheduleFailureEmails').val();
- var successEmails = $('#scheduleSuccessEmails').val();
- var notifyFailureFirst = $('#scheduleNotifyFailureFirst').is(':checked');
- var notifyFailureLast = $('#scheduleNotifyFailureLast').is(':checked');
- var executingJobOption = $('input:radio[name=gender]:checked').val();
-
-
- var scheduleTime = $('#advhour').val() + "," + $('#advminutes').val() + "," + $('#advam_pm').val() + "," + $('#advtimezone').val();
- var scheduleDate = $('#advdatepicker').val();
- var is_recurring = $('#advis_recurring').val();
- var period = $('#advperiod').val() + $('#advperiod_units').val();
-
- var flowOverride = {};
- var editRows = $(".editRow");
- for (var i = 0; i < editRows.length; ++i) {
- var row = editRows[i];
- var td = $(row).find('td');
- var key = $(td[0]).text();
- var val = $(td[1]).text();
-
- if (key && key.length > 0) {
- flowOverride[key] = val;
- }
- }
-
- var scheduleData = {
- projectId:projectId,
- projectName: projectName,
- ajax: "advSchedule",
- flowName: flowName,
- scheduleTime: scheduleTime,
- scheduleDate: scheduleDate,
- is_recurring: is_recurring,
- period: period,
- disabledJobs: disabledJobs,
- failureAction: failureAction,
- failureEmails: failureEmails,
- successEmails: successEmails,
- notifyFailureFirst: notifyFailureFirst,
- notifyFailureLast: notifyFailureLast,
- executingJobOption: executingJobOption,
- flowOverride: flowOverride
- };
-
- $.post(
- scheduleURL,
- scheduleData,
- function(data) {
- if (data.error) {
- alert(data.error);
- }
- else {
- window.location = scheduleURL;
- }
- },
- "json"
- )
- },
- handleAddRow: function(evt) {
- var tr = document.createElement("tr");
- var tdName = document.createElement("td");
- var tdValue = document.createElement("td");
-
- var icon = document.createElement("span");
- $(icon).addClass("removeIcon");
- var nameData = document.createElement("span");
- $(nameData).addClass("spanValue");
- var valueData = document.createElement("span");
- $(valueData).addClass("spanValue");
-
- $(tdName).append(icon);
- $(tdName).append(nameData);
- $(tdName).addClass("name");
- $(tdName).addClass("editable");
-
- $(tdValue).append(valueData);
- $(tdValue).addClass("editable");
-
- $(tr).addClass("editRow");
- $(tr).append(tdName);
- $(tr).append(tdValue);
-
- $(tr).insertBefore("#scheduleAddRow");
- return tr;
- },
- handleEditColumn : function(evt) {
- var curTarget = evt.currentTarget;
-
- if (this.editingTarget != curTarget) {
- this.closeEditingTarget();
-
- var text = $(curTarget).children(".spanValue").text();
- $(curTarget).empty();
-
- var input = document.createElement("input");
- $(input).attr("type", "text");
- $(input).css("width", "100%");
- $(input).val(text);
- $(curTarget).addClass("editing");
- $(curTarget).append(input);
- $(input).focus();
- this.editingTarget = curTarget;
- }
- },
- handleRemoveColumn : function(evt) {
- var curTarget = evt.currentTarget;
- // Should be the table
- var row = curTarget.parentElement.parentElement;
- $(row).remove();
- },
- closeEditingTarget: function(evt) {
- if (this.editingTarget != null && this.editingTarget != evt.target && this.editingTarget != evt.target.parentElement ) {
- var input = $(this.editingTarget).children("input")[0];
- var text = $(input).val();
- $(input).remove();
-
- var valueData = document.createElement("span");
- $(valueData).addClass("spanValue");
- $(valueData).text(text);
-
- if ($(this.editingTarget).hasClass("name")) {
- var icon = document.createElement("span");
- $(icon).addClass("removeIcon");
- $(this.editingTarget).append(icon);
- }
-
- $(this.editingTarget).removeClass("editing");
- $(this.editingTarget).append(valueData);
- this.editingTarget = null;
- }
- },
- handleDisableMenuClick : function(action, el, pos) {
- var flowData = scheduleFlowData;
- var jobid = el[0].jobid;
- var requestURL = contextURL + "/manager?project=" + projectName + "&flow=" + flowName + "&job=" + jobid;
- if (action == "open") {
- window.location.href = requestURL;
- }
- else if(action == "openwindow") {
- window.open(requestURL);
- }
- else if(action == "disable") {
- var disabled = flowData.get("disabled");
-
- disabled[jobid] = true;
- flowData.set({disabled: disabled});
- flowData.trigger("change:disabled");
- }
- else if(action == "disableAll") {
- var disabled = flowData.get("disabled");
-
- var nodes = flowData.get("nodes");
- for (var key in nodes) {
- disabled[key] = true;
- }
-
- flowData.set({disabled: disabled});
- flowData.trigger("change:disabled");
- }
- else if (action == "disableParents") {
- var disabled = flowData.get("disabled");
- var nodes = flowData.get("nodes");
- var inNodes = nodes[jobid].inNodes;
-
- if (inNodes) {
- for (var key in inNodes) {
- disabled[key] = true;
- }
- }
-
- flowData.set({disabled: disabled});
- flowData.trigger("change:disabled");
- }
- else if (action == "disableChildren") {
- var disabledMap = flowData.get("disabled");
- var nodes = flowData.get("nodes");
- var outNodes = nodes[jobid].outNodes;
-
- if (outNodes) {
- for (var key in outNodes) {
- disabledMap[key] = true;
- }
- }
-
- flowData.set({disabled: disabledMap});
- flowData.trigger("change:disabled");
- }
- else if (action == "disableAncestors") {
- var disabled = flowData.get("disabled");
- var nodes = flowData.get("nodes");
-
- recurseAllAncestors(nodes, disabled, jobid, true);
-
- flowData.set({disabled: disabled});
- flowData.trigger("change:disabled");
- }
- else if (action == "disableDescendents") {
- var disabled = flowData.get("disabled");
- var nodes = flowData.get("nodes");
-
- recurseAllDescendents(nodes, disabled, jobid, true);
-
- flowData.set({disabled: disabled});
- flowData.trigger("change:disabled");
- }
- else if(action == "enable") {
- var disabled = flowData.get("disabled");
-
- disabled[jobid] = false;
- flowData.set({disabled: disabled});
- flowData.trigger("change:disabled");
- }
- else if(action == "enableAll") {
- disabled = {};
- flowData.set({disabled: disabled});
- flowData.trigger("change:disabled");
- }
- else if (action == "enableParents") {
- var disabled = flowData.get("disabled");
- var nodes = flowData.get("nodes");
- var inNodes = nodes[jobid].inNodes;
-
- if (inNodes) {
- for (var key in inNodes) {
- disabled[key] = false;
- }
- }
-
- flowData.set({disabled: disabled});
- flowData.trigger("change:disabled");
- }
- else if (action == "enableChildren") {
- var disabled = flowData.get("disabled");
- var nodes = flowData.get("nodes");
- var outNodes = nodes[jobid].outNodes;
-
- if (outNodes) {
- for (var key in outNodes) {
- disabled[key] = false;
- }
- }
-
- flowData.set({disabled: disabled});
- flowData.trigger("change:disabled");
- }
- else if (action == "enableAncestors") {
- var disabled = flowData.get("disabled");
- var nodes = flowData.get("nodes");
-
- recurseAllAncestors(nodes, disabled, jobid, false);
-
- flowData.set({disabled: disabled});
- flowData.trigger("change:disabled");
- }
- else if (action == "enableDescendents") {
- var disabled = flowData.get("disabled");
- var nodes = flowData.get("nodes");
-
- recurseAllDescendents(nodes, disabled, jobid, false);
-
- flowData.set({disabled: disabled});
- flowData.trigger("change:disabled");
- }
- }
+ events : {
+ "click" : "closeEditingTarget",
+ "click #adv-schedule-btn": "handleAdvSchedule",
+ "click #schedule-cancel-btn": "handleCancel",
+ "click .modal-close": "handleCancel",
+ "click #scheduleGeneralOptions": "handleGeneralOptionsSelect",
+ "click #scheduleFlowOptions": "handleFlowOptionsSelect",
+ "click #scheduleAddRow": "handleAddRow",
+ "click table .editable": "handleEditColumn",
+ "click table .removeIcon": "handleRemoveColumn"
+ },
+ initialize: function(setting) {
+ this.contextMenu = new azkaban.ScheduleContextMenu({el:$('#scheduleDisableJobMenu')});
+ this.handleGeneralOptionsSelect();
+ $( "#advdatepicker" ).datepicker();
+ $( "#advdatepicker" ).datepicker('setDate', new Date());
+ },
+ show: function() {
+ $('#scheduleModalBackground').show();
+ $('#schedule-options').show();
+ this.handleGeneralOptionsSelect();
+
+ scheduleFlowData = this.model.clone();
+ this.flowData = scheduleFlowData;
+ var flowData = scheduleFlowData;
+
+ var fetchData = {"project": projectName, "ajax":"flowInfo", "flow":flowName};
+
+ var executeURL = contextURL + "/executor";
+ this.executeURL = executeURL;
+ var scheduleURL = contextURL + "/schedule";
+ this.scheduleURL = scheduleURL;
+ var handleAddRow = this.handleAddRow;
+
+ var data = flowData.get("data");
+ var nodes = {};
+ for (var i=0; i < data.nodes.length; ++i) {
+ var node = data.nodes[i];
+ nodes[node.id] = node;
+ }
+
+ for (var i=0; i < data.edges.length; ++i) {
+ var edge = data.edges[i];
+ var fromNode = nodes[edge.from];
+ var toNode = nodes[edge.target];
+
+ if (!fromNode.outNodes) {
+ fromNode.outNodes = {};
+ }
+ fromNode.outNodes[toNode.id] = toNode;
+
+ if (!toNode.inNodes) {
+ toNode.inNodes = {};
+ }
+ toNode.inNodes[fromNode.id] = fromNode;
+ }
+ flowData.set({nodes: nodes});
+
+ var disabled = {};
+ for (var i = 0; i < data.nodes.length; ++i) {
+ var updateNode = data.nodes[i];
+ if (updateNode.status == "DISABLED" || updateNode.status == "SKIPPED") {
+ updateNode.status = "READY";
+ disabled[updateNode.id] = true;
+ }
+ if (updateNode.status == "SUCCEEDED" || updateNode.status=="RUNNING") {
+ disabled[updateNode.id] = true;
+ }
+ }
+ flowData.set({disabled: disabled});
+
+ $.get(
+ executeURL,
+ fetchData,
+ function(data) {
+ if (data.error) {
+ alert(data.error);
+ }
+ else {
+ if (data.successEmails) {
+ $('#scheduleSuccessEmails').val(data.successEmails.join());
+ }
+ if (data.failureEmails) {
+ $('#scheduleFailureEmails').val(data.failureEmails.join());
+ }
+
+ if (data.failureAction) {
+ $('#scheduleFailureAction').val(data.failureAction);
+ }
+ if (data.notifyFailureFirst) {
+ $('#scheduleNotifyFailureFirst').attr('checked', true);
+ }
+ if (data.notifyFailureLast) {
+ $('#scheduleNotifyFailureLast').attr('checked', true);
+ }
+ if (data.flowParam) {
+ var flowParam = data.flowParam;
+ for (var key in flowParam) {
+ var row = handleAddRow();
+ var td = $(row).find('td');
+ $(td[0]).text(key);
+ $(td[1]).text(flowParam[key]);
+ }
+ }
+
+ if (!data.running || data.running.length == 0) {
+ $(".radio").attr("disabled", "disabled");
+ $(".radioLabel").addClass("disabled", "disabled");
+ }
+ }
+ },
+ "json"
+ );
+ },
+ handleCancel: function(evt) {
+ $('#scheduleModalBackground').hide();
+ $('#schedule-options').hide();
+ },
+ handleGeneralOptionsSelect: function(evt) {
+ $('#scheduleFlowOptions').removeClass('selected');
+ $('#scheduleGeneralOptions').addClass('selected');
+
+ $('#scheduleGeneralPanel').show();
+ $('#scheduleGraphPanel').hide();
+ },
+ handleFlowOptionsSelect: function(evt) {
+ $('#scheduleGeneralOptions').removeClass('selected');
+ $('#scheduleFlowOptions').addClass('selected');
+
+ $('#scheduleGraphPanel').show();
+ $('#scheduleGeneralPanel').hide();
+
+ if (this.flowSetup) {
+ return;
+ }
+
+ scheduleCustomSvgGraphView = new azkaban.SvgGraphView({el:$('#scheduleSvgDivCustom'), model: scheduleFlowData, rightClick: {id: 'scheduleDisableJobMenu', callback: this.handleDisableMenuClick}});
+ scheduleCustomJobsListView = new azkaban.JobListView({el:$('#scheduleJobListCustom'), model: scheduleFlowData, rightClick: {id: 'scheduleDisableJobMenu', callback: this.handleDisableMenuClick}});
+ scheduleFlowData.trigger("change:graph");
+
+ this.flowSetup = true;
+ },
+ handleAdvSchedule: function(evt) {
+ var scheduleURL = this.scheduleURL;
+ var disabled = this.flowData.get("disabled");
+ var disabledJobs = "";
+ for(var job in disabled) {
+ if(disabled[job] == true) {
+ disabledJobs += "," + job;
+ }
+ }
+ var failureAction = $('#scheduleFailureAction').val();
+ var failureEmails = $('#scheduleFailureEmails').val();
+ var successEmails = $('#scheduleSuccessEmails').val();
+ var notifyFailureFirst = $('#scheduleNotifyFailureFirst').is(':checked');
+ var notifyFailureLast = $('#scheduleNotifyFailureLast').is(':checked');
+ var executingJobOption = $('input:radio[name=gender]:checked').val();
+
+
+ var scheduleTime = $('#advhour').val() + "," + $('#advminutes').val() + "," + $('#advam_pm').val() + "," + $('#advtimezone').val();
+ var scheduleDate = $('#advdatepicker').val();
+ var is_recurring = $('#advis_recurring').val();
+ var period = $('#advperiod').val() + $('#advperiod_units').val();
+
+ var flowOverride = {};
+ var editRows = $(".editRow");
+ for (var i = 0; i < editRows.length; ++i) {
+ var row = editRows[i];
+ var td = $(row).find('td');
+ var key = $(td[0]).text();
+ var val = $(td[1]).text();
+
+ if (key && key.length > 0) {
+ flowOverride[key] = val;
+ }
+ }
+
+ var scheduleData = {
+ projectId:projectId,
+ projectName: projectName,
+ ajax: "advSchedule",
+ flowName: flowName,
+ scheduleTime: scheduleTime,
+ scheduleDate: scheduleDate,
+ is_recurring: is_recurring,
+ period: period,
+ disabledJobs: disabledJobs,
+ failureAction: failureAction,
+ failureEmails: failureEmails,
+ successEmails: successEmails,
+ notifyFailureFirst: notifyFailureFirst,
+ notifyFailureLast: notifyFailureLast,
+ executingJobOption: executingJobOption,
+ flowOverride: flowOverride
+ };
+
+ $.post(
+ scheduleURL,
+ scheduleData,
+ function(data) {
+ if (data.error) {
+ alert(data.error);
+ }
+ else {
+ window.location = scheduleURL;
+ }
+ },
+ "json"
+ )
+ },
+ handleAddRow: function(evt) {
+ var tr = document.createElement("tr");
+ var tdName = document.createElement("td");
+ var tdValue = document.createElement("td");
+
+ var icon = document.createElement("span");
+ $(icon).addClass("removeIcon");
+ var nameData = document.createElement("span");
+ $(nameData).addClass("spanValue");
+ var valueData = document.createElement("span");
+ $(valueData).addClass("spanValue");
+
+ $(tdName).append(icon);
+ $(tdName).append(nameData);
+ $(tdName).addClass("name");
+ $(tdName).addClass("editable");
+
+ $(tdValue).append(valueData);
+ $(tdValue).addClass("editable");
+
+ $(tr).addClass("editRow");
+ $(tr).append(tdName);
+ $(tr).append(tdValue);
+
+ $(tr).insertBefore("#scheduleAddRow");
+ return tr;
+ },
+ handleEditColumn : function(evt) {
+ var curTarget = evt.currentTarget;
+
+ if (this.editingTarget != curTarget) {
+ this.closeEditingTarget();
+
+ var text = $(curTarget).children(".spanValue").text();
+ $(curTarget).empty();
+
+ var input = document.createElement("input");
+ $(input).attr("type", "text");
+ $(input).css("width", "100%");
+ $(input).val(text);
+ $(curTarget).addClass("editing");
+ $(curTarget).append(input);
+ $(input).focus();
+ this.editingTarget = curTarget;
+ }
+ },
+ handleRemoveColumn : function(evt) {
+ var curTarget = evt.currentTarget;
+ // Should be the table
+ var row = curTarget.parentElement.parentElement;
+ $(row).remove();
+ },
+ closeEditingTarget: function(evt) {
+ if (this.editingTarget != null && this.editingTarget != evt.target && this.editingTarget != evt.target.parentElement ) {
+ var input = $(this.editingTarget).children("input")[0];
+ var text = $(input).val();
+ $(input).remove();
+
+ var valueData = document.createElement("span");
+ $(valueData).addClass("spanValue");
+ $(valueData).text(text);
+
+ if ($(this.editingTarget).hasClass("name")) {
+ var icon = document.createElement("span");
+ $(icon).addClass("removeIcon");
+ $(this.editingTarget).append(icon);
+ }
+
+ $(this.editingTarget).removeClass("editing");
+ $(this.editingTarget).append(valueData);
+ this.editingTarget = null;
+ }
+ },
+ handleDisableMenuClick : function(action, el, pos) {
+ var flowData = scheduleFlowData;
+ var jobid = el[0].jobid;
+ var requestURL = contextURL + "/manager?project=" + projectName + "&flow=" + flowName + "&job=" + jobid;
+ if (action == "open") {
+ window.location.href = requestURL;
+ }
+ else if(action == "openwindow") {
+ window.open(requestURL);
+ }
+ else if(action == "disable") {
+ var disabled = flowData.get("disabled");
+
+ disabled[jobid] = true;
+ flowData.set({disabled: disabled});
+ flowData.trigger("change:disabled");
+ }
+ else if(action == "disableAll") {
+ var disabled = flowData.get("disabled");
+
+ var nodes = flowData.get("nodes");
+ for (var key in nodes) {
+ disabled[key] = true;
+ }
+
+ flowData.set({disabled: disabled});
+ flowData.trigger("change:disabled");
+ }
+ else if (action == "disableParents") {
+ var disabled = flowData.get("disabled");
+ var nodes = flowData.get("nodes");
+ var inNodes = nodes[jobid].inNodes;
+
+ if (inNodes) {
+ for (var key in inNodes) {
+ disabled[key] = true;
+ }
+ }
+
+ flowData.set({disabled: disabled});
+ flowData.trigger("change:disabled");
+ }
+ else if (action == "disableChildren") {
+ var disabledMap = flowData.get("disabled");
+ var nodes = flowData.get("nodes");
+ var outNodes = nodes[jobid].outNodes;
+
+ if (outNodes) {
+ for (var key in outNodes) {
+ disabledMap[key] = true;
+ }
+ }
+
+ flowData.set({disabled: disabledMap});
+ flowData.trigger("change:disabled");
+ }
+ else if (action == "disableAncestors") {
+ var disabled = flowData.get("disabled");
+ var nodes = flowData.get("nodes");
+
+ recurseAllAncestors(nodes, disabled, jobid, true);
+
+ flowData.set({disabled: disabled});
+ flowData.trigger("change:disabled");
+ }
+ else if (action == "disableDescendents") {
+ var disabled = flowData.get("disabled");
+ var nodes = flowData.get("nodes");
+
+ recurseAllDescendents(nodes, disabled, jobid, true);
+
+ flowData.set({disabled: disabled});
+ flowData.trigger("change:disabled");
+ }
+ else if(action == "enable") {
+ var disabled = flowData.get("disabled");
+
+ disabled[jobid] = false;
+ flowData.set({disabled: disabled});
+ flowData.trigger("change:disabled");
+ }
+ else if(action == "enableAll") {
+ disabled = {};
+ flowData.set({disabled: disabled});
+ flowData.trigger("change:disabled");
+ }
+ else if (action == "enableParents") {
+ var disabled = flowData.get("disabled");
+ var nodes = flowData.get("nodes");
+ var inNodes = nodes[jobid].inNodes;
+
+ if (inNodes) {
+ for (var key in inNodes) {
+ disabled[key] = false;
+ }
+ }
+
+ flowData.set({disabled: disabled});
+ flowData.trigger("change:disabled");
+ }
+ else if (action == "enableChildren") {
+ var disabled = flowData.get("disabled");
+ var nodes = flowData.get("nodes");
+ var outNodes = nodes[jobid].outNodes;
+
+ if (outNodes) {
+ for (var key in outNodes) {
+ disabled[key] = false;
+ }
+ }
+
+ flowData.set({disabled: disabled});
+ flowData.trigger("change:disabled");
+ }
+ else if (action == "enableAncestors") {
+ var disabled = flowData.get("disabled");
+ var nodes = flowData.get("nodes");
+
+ recurseAllAncestors(nodes, disabled, jobid, false);
+
+ flowData.set({disabled: disabled});
+ flowData.trigger("change:disabled");
+ }
+ else if (action == "enableDescendents") {
+ var disabled = flowData.get("disabled");
+ var nodes = flowData.get("nodes");
+
+ recurseAllDescendents(nodes, disabled, jobid, false);
+
+ flowData.set({disabled: disabled});
+ flowData.trigger("change:disabled");
+ }
+ }
});
src/web/js/azkaban/view/schedule-panel.js 124(+62 -62)
diff --git a/src/web/js/azkaban/view/schedule-panel.js b/src/web/js/azkaban/view/schedule-panel.js
index aa73f31..13da163 100644
--- a/src/web/js/azkaban/view/schedule-panel.js
+++ b/src/web/js/azkaban/view/schedule-panel.js
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -18,76 +18,76 @@ $.namespace('azkaban');
var schedulePanelView;
azkaban.SchedulePanelView = Backbone.View.extend({
- events: {
- "click #schedule-button": "scheduleFlow"
- },
-
- initialize: function(settings) {
- $("#timepicker").datetimepicker({pickDate: false});
- $("#datepicker").datetimepicker({pickTime: false});
- },
-
- render: function() {
- },
-
- showSchedulePanel: function() {
- $('#schedule-modal').modal();
- },
-
- hideSchedulePanel: function() {
- $('#schedule-modal').modal("hide");
- },
-
- scheduleFlow: function() {
+ events: {
+ "click #schedule-button": "scheduleFlow"
+ },
+
+ initialize: function(settings) {
+ $("#timepicker").datetimepicker({pickDate: false});
+ $("#datepicker").datetimepicker({pickTime: false});
+ },
+
+ render: function() {
+ },
+
+ showSchedulePanel: function() {
+ $('#schedule-modal').modal();
+ },
+
+ hideSchedulePanel: function() {
+ $('#schedule-modal').modal("hide");
+ },
+
+ scheduleFlow: function() {
var timeVal = $('#timepicker').val();
- var timezoneVal = $('#timezone').val();
+ var timezoneVal = $('#timezone').val();
+
+ var dateVal = $('#datepicker').val();
- var dateVal = $('#datepicker').val();
-
var is_recurringVal = $('#is_recurring').val();
- var periodVal = $('#period').val();
- var periodUnits = $('#period_units').val();
-
- var scheduleURL = contextURL + "/schedule"
- var scheduleData = flowExecuteDialogView.getExecutionOptionData();
+ var periodVal = $('#period').val();
+ var periodUnits = $('#period_units').val();
+
+ var scheduleURL = contextURL + "/schedule"
+ var scheduleData = flowExecuteDialogView.getExecutionOptionData();
- console.log("Creating schedule for " + projectName + "." +
- scheduleData.flow);
+ console.log("Creating schedule for " + projectName + "." +
+ scheduleData.flow);
var scheduleTime = moment(timeVal, 'h/mm A').format('h,mm,A,') + timezoneVal;
console.log(scheduleTime);
-
- var scheduleDate = $('#datepicker').val();
- var is_recurring = document.getElementById('is_recurring').checked
- ? 'on' : 'off';
- var period = $('#period').val() + $('#period_units').val();
-
- scheduleData.ajax = "scheduleFlow";
- scheduleData.projectName = projectName;
- scheduleData.scheduleTime = scheduleTime;
- scheduleData.scheduleDate = scheduleDate;
- scheduleData.is_recurring = is_recurring;
- scheduleData.period = period;
-
- var successHandler = function(data) {
- if (data.error) {
- schedulePanelView.hideSchedulePanel();
- messageDialogView.show("Error Scheduling Flow", data.message);
- }
- else {
- schedulePanelView.hideSchedulePanel();
- messageDialogView.show("Flow Scheduled", data.message, function() {
+
+ var scheduleDate = $('#datepicker').val();
+ var is_recurring = document.getElementById('is_recurring').checked
+ ? 'on' : 'off';
+ var period = $('#period').val() + $('#period_units').val();
+
+ scheduleData.ajax = "scheduleFlow";
+ scheduleData.projectName = projectName;
+ scheduleData.scheduleTime = scheduleTime;
+ scheduleData.scheduleDate = scheduleDate;
+ scheduleData.is_recurring = is_recurring;
+ scheduleData.period = period;
+
+ var successHandler = function(data) {
+ if (data.error) {
+ schedulePanelView.hideSchedulePanel();
+ messageDialogView.show("Error Scheduling Flow", data.message);
+ }
+ else {
+ schedulePanelView.hideSchedulePanel();
+ messageDialogView.show("Flow Scheduled", data.message, function() {
window.location.href = scheduleURL;
});
- }
- };
+ }
+ };
- $.post(scheduleURL, scheduleData, successHandler, "json");
- }
+ $.post(scheduleURL, scheduleData, successHandler, "json");
+ }
});
$(function() {
- schedulePanelView = new azkaban.SchedulePanelView({
- el: $('#schedule-modal')
- });
+ schedulePanelView = new azkaban.SchedulePanelView({
+ el: $('#schedule-modal')
+ });
});
src/web/js/azkaban/view/schedule-sla.js 550(+275 -275)
diff --git a/src/web/js/azkaban/view/schedule-sla.js b/src/web/js/azkaban/view/schedule-sla.js
index 0fecd0b..cffffca 100644
--- a/src/web/js/azkaban/view/schedule-sla.js
+++ b/src/web/js/azkaban/view/schedule-sla.js
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -17,276 +17,276 @@
$.namespace('azkaban');
azkaban.ChangeSlaView = Backbone.View.extend({
- events: {
- "click": "closeEditingTarget",
- "click #set-sla-btn": "handleSetSla",
- "click #remove-sla-btn": "handleRemoveSla",
- "click #add-btn": "handleAddRow"
- },
-
- initialize: function(setting) {
- $('#sla-options').on('hidden.bs.modal', function() {
- slaView.handleSlaCancel();
- });
- },
-
- handleSlaCancel: function() {
- console.log("Clicked cancel button");
- var scheduleURL = contextURL + "/schedule";
- var tFlowRules = document.getElementById("flowRulesTbl").tBodies[0];
- var rows = tFlowRules.rows;
- var rowLength = rows.length
- for (var i = 0; i < rowLength-1; i++) {
- tFlowRules.deleteRow(0);
- }
- },
-
- initFromSched: function(scheduleId, flowName) {
- this.scheduleId = scheduleId;
- var scheduleURL = contextURL + "/schedule"
- this.scheduleURL = scheduleURL;
-
- var indexToName = {};
- var nameToIndex = {};
- var indexToText = {};
- this.indexToName = indexToName;
- this.nameToIndex = nameToIndex;
- this.indexToText = indexToText;
-
- var ruleBoxOptions = ["SUCCESS", "FINISH"];
- this.ruleBoxOptions = ruleBoxOptions;
-
- var fetchScheduleData = {
- "scheduleId": this.scheduleId,
- "ajax": "slaInfo"
- };
-
- var successHandler = function(data) {
- if (data.error) {
- alert(data.error);
- return;
- }
- if (data.slaEmails) {
- $('#slaEmails').val(data.slaEmails.join());
- }
-
- var allJobNames = data.allJobNames;
-
- indexToName[0] = "";
- nameToIndex[flowName] = 0;
- indexToText[0] = "flow " + flowName;
- for (var i = 1; i <= allJobNames.length; i++) {
- indexToName[i] = allJobNames[i-1];
- nameToIndex[allJobNames[i-1]] = i;
- indexToText[i] = "job " + allJobNames[i-1];
- }
-
- // populate with existing settings
- if (data.settings) {
- var tFlowRules = document.getElementById("flowRulesTbl").tBodies[0];
- for (var setting in data.settings) {
- var rFlowRule = tFlowRules.insertRow(0);
-
- var cId = rFlowRule.insertCell(-1);
- var idSelect = document.createElement("select");
- idSelect.setAttribute("class", "form-control");
- for (var i in indexToName) {
- idSelect.options[i] = new Option(indexToText[i], indexToName[i]);
- if (data.settings[setting].id == indexToName[i]) {
- idSelect.options[i].selected = true;
- }
- }
- cId.appendChild(idSelect);
-
- var cRule = rFlowRule.insertCell(-1);
- var ruleSelect = document.createElement("select");
- ruleSelect.setAttribute("class", "form-control");
- for (var i in ruleBoxOptions) {
- ruleSelect.options[i] = new Option(ruleBoxOptions[i], ruleBoxOptions[i]);
- if (data.settings[setting].rule == ruleBoxOptions[i]) {
- ruleSelect.options[i].selected = true;
- }
- }
- cRule.appendChild(ruleSelect);
-
- var cDuration = rFlowRule.insertCell(-1);
- var duration = document.createElement("input");
- duration.type = "text";
- duration.setAttribute("class", "form-control durationpick");
- var rawMinutes = data.settings[setting].duration;
- var intMinutes = rawMinutes.substring(0, rawMinutes.length-1);
- var minutes = parseInt(intMinutes);
- var hours = Math.floor(minutes / 60);
- minutes = minutes % 60;
- duration.value = hours + ":" + minutes;
- cDuration.appendChild(duration);
-
- var cEmail = rFlowRule.insertCell(-1);
- var emailCheck = document.createElement("input");
- emailCheck.type = "checkbox";
- for (var act in data.settings[setting].actions) {
- if (data.settings[setting].actions[act] == "EMAIL") {
- emailCheck.checked = true;
- }
- }
- cEmail.appendChild(emailCheck);
-
- var cKill = rFlowRule.insertCell(-1);
- var killCheck = document.createElement("input");
- killCheck.type = "checkbox";
- for (var act in data.settings[setting].actions) {
- if (data.settings[setting].actions[act] == "KILL") {
- killCheck.checked = true;
- }
- }
- cKill.appendChild(killCheck);
- $('.durationpick').datetimepicker({
- pickDate: false,
- use24hours: true
- });
- }
- }
- $('.durationpick').datetimepicker({
- pickDate: false,
- use24hours: true
- });
- };
-
- $.get(this.scheduleURL, fetchScheduleData, successHandler, "json");
-
- $('#sla-options').modal();
-
- //this.schedFlowOptions = sched.flowOptions
- console.log("Loaded schedule info. Ready to set SLA.");
- },
-
- handleRemoveSla: function(evt) {
- console.log("Clicked remove sla button");
- var scheduleURL = this.scheduleURL;
- var redirectURL = this.scheduleURL;
- var requestData = {
- "action": "removeSla",
- "scheduleId": this.scheduleId
- };
- var successHandler = function(data) {
- if (data.error) {
- $('#errorMsg').text(data.error)
- }
- else {
- window.location = redirectURL
- }
- };
- $.post(scheduleURL, requestData, successHanlder, "json");
- },
-
- handleSetSla: function(evt) {
- var slaEmails = $('#slaEmails').val();
- var settings = {};
- var tFlowRules = document.getElementById("flowRulesTbl").tBodies[0];
- for (var row = 0; row < tFlowRules.rows.length-1; row++) {
- var rFlowRule = tFlowRules.rows[row];
- var id = rFlowRule.cells[0].firstChild.value;
- var rule = rFlowRule.cells[1].firstChild.value;
- var duration = rFlowRule.cells[2].firstChild.value;
- var email = rFlowRule.cells[3].firstChild.checked;
- var kill = rFlowRule.cells[4].firstChild.checked;
- settings[row] = id + "," + rule + "," + duration + "," + email + "," + kill;
- }
-
- var slaData = {
- scheduleId: this.scheduleId,
- ajax: "setSla",
- slaEmails: slaEmails,
- settings: settings
- };
-
- var scheduleURL = this.scheduleURL;
- var successHandler = function(data) {
- if (data.error) {
- alert(data.error);
- }
- else {
- tFlowRules.length = 0;
- window.location = scheduleURL;
- }
- };
- $.post(scheduleURL, slaData, successHandler, "json");
- },
-
- handleAddRow: function(evt) {
- var indexToName = this.indexToName;
- var nameToIndex = this.nameToIndex;
- var indexToText = this.indexToText;
- var ruleBoxOptions = this.ruleBoxOptions;
-
- var tFlowRules = document.getElementById("flowRulesTbl").tBodies[0];
- var rFlowRule = tFlowRules.insertRow(tFlowRules.rows.length-1);
-
- var cId = rFlowRule.insertCell(-1);
- var idSelect = document.createElement("select");
- idSelect.setAttribute("class", "form-control");
- for (var i in indexToName) {
- idSelect.options[i] = new Option(indexToText[i], indexToName[i]);
- }
- cId.appendChild(idSelect);
-
- var cRule = rFlowRule.insertCell(-1);
- var ruleSelect = document.createElement("select");
- ruleSelect.setAttribute("class", "form-control");
- for (var i in ruleBoxOptions) {
- ruleSelect.options[i] = new Option(ruleBoxOptions[i], ruleBoxOptions[i]);
- }
- cRule.appendChild(ruleSelect);
-
- var cDuration = rFlowRule.insertCell(-1);
- var duration = document.createElement("input");
- duration.type = "text";
- duration.setAttribute("class", "durationpick form-control");
- cDuration.appendChild(duration);
-
- var cEmail = rFlowRule.insertCell(-1);
- var emailCheck = document.createElement("input");
- emailCheck.type = "checkbox";
- cEmail.appendChild(emailCheck);
-
- var cKill = rFlowRule.insertCell(-1);
- var killCheck = document.createElement("input");
- killCheck.type = "checkbox";
- cKill.appendChild(killCheck);
-
- $('.durationpick').datetimepicker({
- pickDate: false,
- use24hours: true
- });
- return rFlowRule;
- },
-
- handleEditColumn: function(evt) {
- var curTarget = evt.currentTarget;
- if (this.editingTarget != curTarget) {
- this.closeEditingTarget();
-
- var text = $(curTarget).children(".spanValue").text();
- $(curTarget).empty();
-
- var input = document.createElement("input");
- $(input).attr("type", "text");
- $(input).css("width", "100%");
- $(input).val(text);
- $(curTarget).addClass("editing");
- $(curTarget).append(input);
- $(input).focus();
- this.editingTarget = curTarget;
- }
- },
-
- handleRemoveColumn: function(evt) {
- var curTarget = evt.currentTarget;
- // Should be the table
- var row = curTarget.parentElement.parentElement;
- $(row).remove();
- },
-
- closeEditingTarget: function(evt) {
- }
+ events: {
+ "click": "closeEditingTarget",
+ "click #set-sla-btn": "handleSetSla",
+ "click #remove-sla-btn": "handleRemoveSla",
+ "click #add-btn": "handleAddRow"
+ },
+
+ initialize: function(setting) {
+ $('#sla-options').on('hidden.bs.modal', function() {
+ slaView.handleSlaCancel();
+ });
+ },
+
+ handleSlaCancel: function() {
+ console.log("Clicked cancel button");
+ var scheduleURL = contextURL + "/schedule";
+ var tFlowRules = document.getElementById("flowRulesTbl").tBodies[0];
+ var rows = tFlowRules.rows;
+ var rowLength = rows.length
+ for (var i = 0; i < rowLength-1; i++) {
+ tFlowRules.deleteRow(0);
+ }
+ },
+
+ initFromSched: function(scheduleId, flowName) {
+ this.scheduleId = scheduleId;
+ var scheduleURL = contextURL + "/schedule"
+ this.scheduleURL = scheduleURL;
+
+ var indexToName = {};
+ var nameToIndex = {};
+ var indexToText = {};
+ this.indexToName = indexToName;
+ this.nameToIndex = nameToIndex;
+ this.indexToText = indexToText;
+
+ var ruleBoxOptions = ["SUCCESS", "FINISH"];
+ this.ruleBoxOptions = ruleBoxOptions;
+
+ var fetchScheduleData = {
+ "scheduleId": this.scheduleId,
+ "ajax": "slaInfo"
+ };
+
+ var successHandler = function(data) {
+ if (data.error) {
+ alert(data.error);
+ return;
+ }
+ if (data.slaEmails) {
+ $('#slaEmails').val(data.slaEmails.join());
+ }
+
+ var allJobNames = data.allJobNames;
+
+ indexToName[0] = "";
+ nameToIndex[flowName] = 0;
+ indexToText[0] = "flow " + flowName;
+ for (var i = 1; i <= allJobNames.length; i++) {
+ indexToName[i] = allJobNames[i-1];
+ nameToIndex[allJobNames[i-1]] = i;
+ indexToText[i] = "job " + allJobNames[i-1];
+ }
+
+ // populate with existing settings
+ if (data.settings) {
+ var tFlowRules = document.getElementById("flowRulesTbl").tBodies[0];
+ for (var setting in data.settings) {
+ var rFlowRule = tFlowRules.insertRow(0);
+
+ var cId = rFlowRule.insertCell(-1);
+ var idSelect = document.createElement("select");
+ idSelect.setAttribute("class", "form-control");
+ for (var i in indexToName) {
+ idSelect.options[i] = new Option(indexToText[i], indexToName[i]);
+ if (data.settings[setting].id == indexToName[i]) {
+ idSelect.options[i].selected = true;
+ }
+ }
+ cId.appendChild(idSelect);
+
+ var cRule = rFlowRule.insertCell(-1);
+ var ruleSelect = document.createElement("select");
+ ruleSelect.setAttribute("class", "form-control");
+ for (var i in ruleBoxOptions) {
+ ruleSelect.options[i] = new Option(ruleBoxOptions[i], ruleBoxOptions[i]);
+ if (data.settings[setting].rule == ruleBoxOptions[i]) {
+ ruleSelect.options[i].selected = true;
+ }
+ }
+ cRule.appendChild(ruleSelect);
+
+ var cDuration = rFlowRule.insertCell(-1);
+ var duration = document.createElement("input");
+ duration.type = "text";
+ duration.setAttribute("class", "form-control durationpick");
+ var rawMinutes = data.settings[setting].duration;
+ var intMinutes = rawMinutes.substring(0, rawMinutes.length-1);
+ var minutes = parseInt(intMinutes);
+ var hours = Math.floor(minutes / 60);
+ minutes = minutes % 60;
+ duration.value = hours + ":" + minutes;
+ cDuration.appendChild(duration);
+
+ var cEmail = rFlowRule.insertCell(-1);
+ var emailCheck = document.createElement("input");
+ emailCheck.type = "checkbox";
+ for (var act in data.settings[setting].actions) {
+ if (data.settings[setting].actions[act] == "EMAIL") {
+ emailCheck.checked = true;
+ }
+ }
+ cEmail.appendChild(emailCheck);
+
+ var cKill = rFlowRule.insertCell(-1);
+ var killCheck = document.createElement("input");
+ killCheck.type = "checkbox";
+ for (var act in data.settings[setting].actions) {
+ if (data.settings[setting].actions[act] == "KILL") {
+ killCheck.checked = true;
+ }
+ }
+ cKill.appendChild(killCheck);
+ $('.durationpick').datetimepicker({
+ pickDate: false,
+ use24hours: true
+ });
+ }
+ }
+ $('.durationpick').datetimepicker({
+ pickDate: false,
+ use24hours: true
+ });
+ };
+
+ $.get(this.scheduleURL, fetchScheduleData, successHandler, "json");
+
+ $('#sla-options').modal();
+
+ //this.schedFlowOptions = sched.flowOptions
+ console.log("Loaded schedule info. Ready to set SLA.");
+ },
+
+ handleRemoveSla: function(evt) {
+ console.log("Clicked remove sla button");
+ var scheduleURL = this.scheduleURL;
+ var redirectURL = this.scheduleURL;
+ var requestData = {
+ "action": "removeSla",
+ "scheduleId": this.scheduleId
+ };
+ var successHandler = function(data) {
+ if (data.error) {
+ $('#errorMsg').text(data.error)
+ }
+ else {
+ window.location = redirectURL
+ }
+ };
+ $.post(scheduleURL, requestData, successHanlder, "json");
+ },
+
+ handleSetSla: function(evt) {
+ var slaEmails = $('#slaEmails').val();
+ var settings = {};
+ var tFlowRules = document.getElementById("flowRulesTbl").tBodies[0];
+ for (var row = 0; row < tFlowRules.rows.length-1; row++) {
+ var rFlowRule = tFlowRules.rows[row];
+ var id = rFlowRule.cells[0].firstChild.value;
+ var rule = rFlowRule.cells[1].firstChild.value;
+ var duration = rFlowRule.cells[2].firstChild.value;
+ var email = rFlowRule.cells[3].firstChild.checked;
+ var kill = rFlowRule.cells[4].firstChild.checked;
+ settings[row] = id + "," + rule + "," + duration + "," + email + "," + kill;
+ }
+
+ var slaData = {
+ scheduleId: this.scheduleId,
+ ajax: "setSla",
+ slaEmails: slaEmails,
+ settings: settings
+ };
+
+ var scheduleURL = this.scheduleURL;
+ var successHandler = function(data) {
+ if (data.error) {
+ alert(data.error);
+ }
+ else {
+ tFlowRules.length = 0;
+ window.location = scheduleURL;
+ }
+ };
+ $.post(scheduleURL, slaData, successHandler, "json");
+ },
+
+ handleAddRow: function(evt) {
+ var indexToName = this.indexToName;
+ var nameToIndex = this.nameToIndex;
+ var indexToText = this.indexToText;
+ var ruleBoxOptions = this.ruleBoxOptions;
+
+ var tFlowRules = document.getElementById("flowRulesTbl").tBodies[0];
+ var rFlowRule = tFlowRules.insertRow(tFlowRules.rows.length-1);
+
+ var cId = rFlowRule.insertCell(-1);
+ var idSelect = document.createElement("select");
+ idSelect.setAttribute("class", "form-control");
+ for (var i in indexToName) {
+ idSelect.options[i] = new Option(indexToText[i], indexToName[i]);
+ }
+ cId.appendChild(idSelect);
+
+ var cRule = rFlowRule.insertCell(-1);
+ var ruleSelect = document.createElement("select");
+ ruleSelect.setAttribute("class", "form-control");
+ for (var i in ruleBoxOptions) {
+ ruleSelect.options[i] = new Option(ruleBoxOptions[i], ruleBoxOptions[i]);
+ }
+ cRule.appendChild(ruleSelect);
+
+ var cDuration = rFlowRule.insertCell(-1);
+ var duration = document.createElement("input");
+ duration.type = "text";
+ duration.setAttribute("class", "durationpick form-control");
+ cDuration.appendChild(duration);
+
+ var cEmail = rFlowRule.insertCell(-1);
+ var emailCheck = document.createElement("input");
+ emailCheck.type = "checkbox";
+ cEmail.appendChild(emailCheck);
+
+ var cKill = rFlowRule.insertCell(-1);
+ var killCheck = document.createElement("input");
+ killCheck.type = "checkbox";
+ cKill.appendChild(killCheck);
+
+ $('.durationpick').datetimepicker({
+ pickDate: false,
+ use24hours: true
+ });
+ return rFlowRule;
+ },
+
+ handleEditColumn: function(evt) {
+ var curTarget = evt.currentTarget;
+ if (this.editingTarget != curTarget) {
+ this.closeEditingTarget();
+
+ var text = $(curTarget).children(".spanValue").text();
+ $(curTarget).empty();
+
+ var input = document.createElement("input");
+ $(input).attr("type", "text");
+ $(input).css("width", "100%");
+ $(input).val(text);
+ $(curTarget).addClass("editing");
+ $(curTarget).append(input);
+ $(input).focus();
+ this.editingTarget = curTarget;
+ }
+ },
+
+ handleRemoveColumn: function(evt) {
+ var curTarget = evt.currentTarget;
+ // Should be the table
+ var row = curTarget.parentElement.parentElement;
+ $(row).remove();
+ },
+
+ closeEditingTarget: function(evt) {
+ }
});
src/web/js/azkaban/view/schedule-svg.js 1034(+517 -517)
diff --git a/src/web/js/azkaban/view/schedule-svg.js b/src/web/js/azkaban/view/schedule-svg.js
index a49a8a9..dada311 100644
--- a/src/web/js/azkaban/view/schedule-svg.js
+++ b/src/web/js/azkaban/view/schedule-svg.js
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -18,518 +18,518 @@ $.namespace('azkaban');
$(function() {
- var border = 20;
- var header = 30;
- var minTimeWidth = 80;
- var timeWidth = minTimeWidth;
- var lineHeight = 40;
- var numDays = 7;
- var today = new Date();
- var totalHeight = (border * 2 + header + 24 * lineHeight);
- var monthConst = ["January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"];
- var dayOfWeekConst = ["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday","Sunday"];
- var hourMillisConst = 3600 * 1000;
- var dayMillisConst = 24 * hourMillisConst;
-
- $("#svgDivCustom").svg({onLoad:
- function (svg) {
-
- var totalWidth = $("#svgDivCustom").width();
-
- $("#svgDivCustom").find("svg").eq(0).removeAttr("width");
-
-
- //Outer g
- var gMain = svg.group({transform: "translate(" + border + ".5," + border + ".5)", stroke : "#999", strokeWidth: 1});
- var defaultDate = new Date(today.setDate(today.getDate() - today.getDay()));
- today = new Date();
- var svgDate = defaultDate;
-
- //Load the date from the hash if existing
- if(window.location.hash) {
- try {
- var dateParts = window.location.hash.replace("#", "").split("-");
- var newDate = new Date(parseInt(dateParts[0]), parseInt(dateParts[1]) - 1, parseInt(dateParts[2]));
- if(!isNaN(newDate)) {
- svgDate = newDate;
- }
- }
- catch(err){ }
- }
-
- //Used to filter projects or flows out
- var filterProject = new Array();
- var filterFlow = new Array();
-
- $(".nav-prev-week").click(function (event) {
- svgDate = new Date(svgDate.valueOf() - 7 * dayMillisConst);
- window.location.hash = "#" + svgDate.getFullYear() + "-" + (svgDate.getMonth() + 1) + "-" + svgDate.getDate();
- loadSvg(svgDate);
- event.stopPropagation();
- });
- $(".nav-next-week").click(function (event) {
- svgDate = new Date(svgDate.valueOf() + 7 * dayMillisConst);
- window.location.hash = "#" + svgDate.getFullYear() + "-" + (svgDate.getMonth() + 1) + "-" + svgDate.getDate();
- loadSvg(svgDate);
- event.stopPropagation();
- });
- $(".nav-this-week").click(function (event) {
- svgDate = defaultDate;
- window.location.hash = "#" + svgDate.getFullYear() + "-" + (svgDate.getMonth() + 1) + "-" + svgDate.getDate();
- loadSvg(svgDate);
- event.stopPropagation();
- });
-
-
-
- loadSvg(svgDate);
-
- function loadSvg(firstDay)
- {
- //Text to show which month/year it is
- var monthIndicatorText = monthConst[firstDay.getMonth()] + " " + firstDay.getFullYear().toString();
- //Measure a good width for the text to display well
- timeWidth = Math.max(minTimeWidth, measureText(svg, monthIndicatorText, {fontSize: "20", style: "text-anchor: end;"}));
-
- var dayWidth = Math.floor((totalWidth - 3 * border - timeWidth) / numDays);
-
- //svg.configure({viewBox: "0 0 " + totalWidth + " " + totalHeight, style: "width:100%"}, true);
- svg.remove(gMain);
- gMain = svg.group({transform: "translate(" + border + ".5," + border + ".5)", stroke : "#999", strokeWidth: 1});
- svg.text(gMain, timeWidth, header - 8, monthIndicatorText, {fontSize: "20", style: "text-anchor: end;", fill : "#F60", stroke : "none"});
- //time indicator group
- var gLeft = svg.group(gMain, {transform: "translate(0," + header + ")"});
- //Draw lines and hours
- for(var i = 0; i < 24; i++)
- {
- svg.line(gLeft, 0, i * lineHeight, timeWidth, i * lineHeight);
- //Gets the hour text from an integer from 0 to 23
- var hourText = getHourText(i);
- //Move text down a bit? TODO: Is there a CSS option for top anchor?
- svg.text(gLeft, timeWidth, i * lineHeight + 15, hourText, {fontSize: "14", style: "text-anchor: end;", fill : "#333", stroke : "none"});
- }
-
- //var firstDay = new Date();//(new Date()).valueOf();
- firstDay = new Date(firstDay.getFullYear(), firstDay.getMonth(), firstDay.getDate()).valueOf();
- var isThisWeek = -1;
- //Draw background
- for(var deltaDay = 0; deltaDay < numDays; deltaDay++)
- {
- //Day group
- var gDay = svg.group(gMain, {transform: "translate(" + (border + timeWidth + deltaDay * dayWidth) + "," + header + ")"});
-
- //This is temporary.
- var date = new Date(firstDay + dayMillisConst * deltaDay);
- var day = date.getDate();
-
- //Draw box around
- var isToday = date.getFullYear() == today.getFullYear() && date.getMonth() == today.getMonth() && date.getDate() == today.getDate();
- if(isToday)
- {
- isThisWeek = deltaDay;
- }
- svg.rect(gDay, 0, -header, dayWidth, 24 * lineHeight + header, {fill : "none", stroke : "#F60"});
- //Draw day title
- svg.text(gDay, 6, -8, day + " " + dayOfWeekConst[date.getDay()], {fontSize: "20", fill : isToday?"#06C":"#F60", stroke : "none"});
-
- //Draw horizontal lines
- for(var i = 0; i < 24; i++)
- {
- svg.line(gDay, 0, i * lineHeight, dayWidth, i * lineHeight);
- }
- }
-
- var gDayViewOuterGroup = svg.group(gMain);
- var gDayView = svg.group(gDayViewOuterGroup, {transform: "translate(" + (border + timeWidth) + "," + header + ")"});
- if(isThisWeek != -1)
- {
- var date = new Date(firstDay + dayMillisConst * isThisWeek);
- var day = date.getDate();
- var gDay = svg.group(gMain, {transform: "translate(" + (border + timeWidth + isThisWeek * dayWidth) + "," + header + ")"});
- svg.rect(gDay, 0, -header, dayWidth, 24 * lineHeight + header, {fill : "none", stroke : "#06F"});
- var lineY = Math.floor(today.getHours() * lineHeight + today.getMinutes() * lineHeight / 60);
- svg.line(gDay, 0, lineY, dayWidth, lineY, {fill : "none", stroke : "#06F", strokeWidth : 4});
- }
-
- //A list of all items
- var itemByDay = new Array();
- for(var deltaDay = 0; deltaDay < numDays; deltaDay++) {
- itemByDay[deltaDay] = new Array();
- }
-
- var itemByScheduleIdMap = {};
-
- function filterApplies(item) {
- for(var i = 0; i < filterProject.length; i++) {
- if(item.projectname == filterProject[i].projectname) {
- return true;
- }
- }
- for(var i = 0; i < filterFlow.length; i++) {
- if(item.projectname == filterFlow[i].projectname && item.flowname == filterFlow[i].flowname) {
- return true;
- }
- }
- return false;
- }
-
- //Function that re-renders all loaded items
- function renderDays() {
- //Clear items inside the day view
- svg.remove(gDayView);
- gDayView = svg.group(gDayViewOuterGroup, {transform: "translate(" + (border + timeWidth) + "," + header + ")"});
-
- //Add day groups
- for(var deltaDay = 0; deltaDay < numDays; deltaDay++) {
- var gDay = svg.group(gDayView, {transform: "translate(" + (deltaDay * dayWidth) + ")"});
- var data = itemByDay[deltaDay];
- //Sort the arrays to have a better view
- data.sort(function (a, b){
- //Smaller time in front
- var timeDiff = a.time - b.time;
- if(timeDiff == 0) {
- //Larger length in front
- var lengthDiff = b.length - a.length;
- if(lengthDiff == 0) {
- //Sort by alphabetical
- return (a.flowname < b.flowname ? 1 : a.flowname > b.flowname ? -1 : 0);
- }
- return lengthDiff;
- }
- return timeDiff;
- });
- //Sort items to columns
- var columns = new Array();
- columns.push(new Array());
- //Every item is parsed through here into columns
- for(var i = 0; i < data.length; i++) {
- //Apply filters here
- if(filterApplies(data[i])) {
- continue;
- }
-
- var foundColumn = false;
- //Go through every column until a place can be found
- for(var j = 0; j < columns.length; j++) {
- if(!intersectArray(data[i], columns[j])) {
- //Found a place
- columns[j].push(data[i]);
- foundColumn = true;
- break;
- }
- }
- //No place, create new column
- if(!foundColumn) {
- columns.push(new Array());
- columns[columns.length - 1].push(data[i]);
- }
- }
-
- //Actually drawing them
- for(var i = 0; i < columns.length; i++) {
- //Split into columns
- var gColumn = svg.group(gDay, {transform: "translate(" + Math.floor(i * dayWidth / columns.length) + ")", style: "opacity: 0.8"});
- for(var j = 0; j < columns[i].length; j++) {
- //Draw item
- var item = columns[i][j];
- var startTime = new Date(item.time);
- var startY = Math.floor(startTime.getHours() * lineHeight + startTime.getMinutes() * lineHeight / 60);
- var endTime = new Date(item.time + item.length );
- var endY = Math.ceil(startY + (item.length * lineHeight) / hourMillisConst);
- var deltaY = Math.ceil(endY - startY);
- if(deltaY < 5){
- deltaY = 5;
- }
- //var anchor = svg.a(gColumn);
- var itemUrl = contextURL + "/manager?project=" + item.projectname + "&flow=" + item.flowname;
- var gItem = svg.link(gColumn, itemUrl, {transform: "translate(0," + startY + ")"});
-
- //Pass the item into the DOM data store to be retrieved later on
- $(gItem).data("item", item);
-
- //Replace the context handler
- gItem.addEventListener('contextmenu', handleContextMenu);
-
- //Add a tooltip on mouse over
- gItem.addEventListener('mouseover', handleMouseOver);
- //Remove the tooltip on mouse out
- gItem.addEventListener('mouseout', handleMouseOut);
-
- //$(gItem).attr("style","color:red");
- var rect = svg.rect(gItem, 0, 0, Math.ceil(dayWidth / columns.length), deltaY, 0, 0, {fill : item.item.color, stroke : "#444", strokeWidth : 1});
-
- item.rect = rect;
- //Draw text
- //svg.text(gItem, 6, 16, item.flowname, {fontSize: "13", fill : "#000", stroke : "none"});
- }
- }
- }
- }
-
- function processItem(item, scheduled)
- {
- var firstTime = item.time;
- var startTime = firstDay;
- var endTime = firstDay + numDays * dayMillisConst;
- var period = item.period;
- var restrictedStartTime = Math.max(firstDay, today.valueOf());
- if(!scheduled){
- restrictedStartTime = firstDay;
- }
-
- // Shift time until we're past the start time
- if (period > 0) {
- // Calculate next execution time efficiently
- // Take into account items that ends in the date specified, but does not start on that date
- var periods = Math.floor((restrictedStartTime - (firstTime)) / period);
- //Make sure we don't subtract
- if(periods < 0){
- periods = 0;
- }
- firstTime += period * periods;
- // Increment in case we haven't arrived yet. This will apply to most of the cases
- while (firstTime < restrictedStartTime) {
- firstTime += period;
- }
- }
-
- // Bad or no period
- if (period <= 0) {
- // Single instance case
- if (firstTime >= restrictedStartTime && firstTime < endTime) {
- addItem({scheduleid: item.scheduleid, flowname : item.flowname, projectname: item.projectname, time: firstTime, length: item.length, item: item});
- }
- }
- else {
- if(period <= hourMillisConst) {
- addItem({scheduleid: item.scheduleid, flowname : item.flowname, projectname: item.projectname, time: firstTime, length: endTime - firstTime, item: item});
- }
- else{
- // Repetitive schedule, firstTime is assumed to be after startTime
- while (firstTime < endTime) {
- addItem({scheduleid: item.scheduleid, flowname : item.flowname, projectname: item.projectname, time: firstTime, length: item.length, item: item});
- firstTime += period;
- }
- }
- }
- }
-
- function addItem(obj)
- {
- var itemStartTime = new Date(obj.time);
- var itemEndTime = new Date(obj.time + obj.length);
- var itemStartDate = new Date(itemStartTime.getFullYear(), itemStartTime.getMonth(), itemStartTime.getDate());
- var itemEndDate = new Date(itemEndTime.getFullYear(), itemEndTime.getMonth(), itemEndTime.getDate());
-
- //Cross date item, cut it to only today's portion and add another item starting tomorrow morning
- if(itemStartDate.valueOf() != itemEndDate.valueOf() && itemEndTime.valueOf() != itemStartDate + dayMillisConst)
- {
- var nextMorning = itemStartDate.valueOf() + dayMillisConst;
- var excess = obj.length - (nextMorning - itemStartTime.valueOf());
- obj.length = nextMorning - itemStartTime.valueOf();
- while(excess > 0)
- {
- var tempLength = excess;
- if(tempLength > dayMillisConst){
- tempLength = dayMillisConst;
- }
-
- var item2 = {scheduleid: obj.scheduleid, time: nextMorning, length: tempLength, projectname: obj.projectname, flowname: obj.flowname, item: obj.item};
- addItem(item2);
- excess -= tempLength;
- nextMorning += dayMillisConst;
- }
- }
-
- //Now the item should be only in one day
- var index = (itemStartDate.valueOf() - firstDay) / dayMillisConst;
- if(index >= 0 && index < numDays)
- {
- //Add the item to the rendering list
- itemByDay[index].push(obj);
- //obj.item.objs.push(obj);
-
- if(!itemByScheduleIdMap[obj.scheduleid]){
- itemByScheduleIdMap[obj.scheduleid] = new Array();
- }
- itemByScheduleIdMap[obj.scheduleid].push(obj);
- }
- }
-
- function handleContextMenu(event) {
- var requestURL = $(this).attr("href");
- var item = $(this).data("item");
- var menu = [
- {title: "Job \"" + item.flowname + "\" From Project \"" + item.projectname + "\""},
- {title: "View Job", callback: function() {window.location.href=requestURL;}},
- {title: "View Job in New Window", callback: function() {window.open(requestURL);}},
- {title: "Hide Job", callback: function() {filterFlow.push(item); renderDays();}},
- {title: "Hide All Jobs From the Same Project", callback: function() {filterProject.push(item); renderDays();}}
- ];
- contextMenuView.show(event, menu);
- event.preventDefault();
- event.stopPropagation();
- return false;
- }
-
- function handleMouseOver(event) {
- //Create the new tooltip
- var requestURL = $(this).attr("href");
- var obj = $(this).data("item");
- var offset = $("svg").offset();
- var thisOffset = $(this).offset();
-
- var tooltip = svg.group({transform: "translate(" + (thisOffset.left - offset.left + 2) + "," + (thisOffset.top - offset.top - 2) + ")"});
- var text = [
- "\"" + obj.flowname + "\" from \"" + obj.projectname + "\"",
- "Repeat: " + formatReadablePeriod(obj.item.period)
- ];
-
- if(obj.item.period == 0){
- text[1] = "";
- if(obj.item.history == true) {
- if(obj.item.status == 50){
- text[1] = "SUCCEEDED";
- }
- else if(obj.item.status == 60){
- text[1] = "KILLED";
- }
- else if(obj.item.status == 70){
- text[1] = "FAILED";
- }
- else if(obj.item.status == 80){
- text[1] = "FAILED_FINISHING";
- }
- else if(obj.item.status == 90){
- text[1] = "SKIPPED";
- }
- }
- }
- var textLength = Math.max(measureText(svg, text[0], {fontSize: "13"}), measureText(svg, text[1], {fontSize: "13"}));
- var rect = svg.rect(tooltip, 0, -40, textLength + 4, 40, {fill : "#FFF", stroke : "none"});
- svg.text(tooltip, 2, -25, text[0], {fontSize: "13", fill : "#000", stroke : "none"});
- svg.text(tooltip, 2, -5, text[1], {fontSize: "13", fill : "#000", stroke : "none"});
-
- //Store tooltip
- $(this).data("tooltip", tooltip);
-
- if(itemByScheduleIdMap[obj.scheduleid]){
- //Item highlight effect
- var arry = itemByScheduleIdMap[obj.scheduleid];
- for(var i = 0; i < arry.length; i++) {
- $(arry[i].rect).attr("fill", "#FF0");
- }
- }
- }
-
- function handleMouseOut(event) {
- //Item highlight effect
- var obj = $(this).data("item");
- //Item highlight effect
- if(itemByScheduleIdMap[obj.scheduleid]){
- var arry = itemByScheduleIdMap[obj.scheduleid];
- for(var i = 0; i < arry.length; i++) {
- var obj2 = obj.item.objs[i];
- $(arry[i].rect).attr("fill", arry[i].item.color);
- }
- }
- //Clear the fade interval
- $($(this).data("tooltip")).fadeOut(250, function(){ svg.remove(this); });
- }
-
- //Asynchronously load data
- var requestURL = contextURL + "/schedule";
- $.ajax({
- type: "GET",
- url: requestURL,
- data: {"ajax": "loadFlow"},
- dataType: "json",
- success: function (data)
- {
- var items = data.items;
-
- //Sort items by day
- for(var i = 0; i < items.length; i++)
- {
- //items[i].length = hourMillisConst; //TODO: Remove this to get the actual length
- items[i].objs = new Array();
- items[i].color = "#69F";
- processItem(items[i], true);
- }
- //Trigger a re-rendering of all the data
- renderDays();
- }
- });
- for(var deltaDay = 0; deltaDay < numDays; deltaDay++) {
- $.ajax({
- type: "GET",
- url: requestURL,
- data: {"ajax": "loadHistory", "startTime": firstDay + deltaDay * dayMillisConst, "loadAll" : 0},
- //dataType: "json",
- success: function (data)
- {
- var items = data.items;
-
- //Sort items by day
- for(var i = 0; i < items.length; i++)
- {
- //if(items[i].length < 5 * 60 * 1000) items[i].length = 5 * 60 * 1000;
- items[i].objs = new Array();
- items[i].color = "#7E7";
- if(items[i].status == 60 || items[i].status == 70 || items[i].status == 80)
- items[i].color = "#E77";
- processItem(items[i], false);
- }
- //Trigger a re-rendering of all the data
- renderDays();
- }
- });
- }
- }
- }, settings : {
- "xmlns" : "http://www.w3.org/2000/svg",
- "xmlns:xlink" : "http://www.w3.org/1999/xlink",
- "shape-rendering" : "optimize-speed",
- "style" : "width:100%;height:" + totalHeight + "px"
- }});
-
- function dayMatch(d1, d2) {
- return d1.getDate() == d2.getDate() && d1.getFullYear() == d2.getFullYear() && d1.getMonth() == d2.getMonth();
- }
-
- function getHourText(hour) {
- return (hour==0 ? "12 AM" : (hour<12 ? hour + " AM" : (hour==12 ? "12 PM" : (hour-12) + " PM" )));
- }
-
- function intersectArray(a, arry) {
- for(var i = 0; i < arry.length; i++) {
- var b = arry[i];
- if(a.time < b.time + b.length && a.time + a.length > b.time) {
- return true;
- }
- }
-
- return false;
- }
-
- function measureText(svg, text, options) {
- var test = svg.text(0, 0, text, options);
- var width = test.getComputedTextLength();
- svg.remove(test);
- return width;
- }
-
- function formatReadablePeriod(period) {
- var days = Math.floor(period / dayMillisConst);
- var hour = period - days * dayMillisConst;
- var hours = Math.floor(hour / hourMillisConst);
- var min = hour - hours * hourMillisConst;
- var mins = Math.floor(min / 60000);
-
- var text = "";
- if(days > 0) text = (days == 1 ? "24 hours" : days.toString() + " days");
- if(hours > 0) text = text + " " + (hours == 1 ? "1 hour" : hours.toString() + " hours");
- if(mins > 0) text = text + " " + (mins == 1 ? "1 minute" : mins.toString() + " minutes");
- return text;
- }
+ var border = 20;
+ var header = 30;
+ var minTimeWidth = 80;
+ var timeWidth = minTimeWidth;
+ var lineHeight = 40;
+ var numDays = 7;
+ var today = new Date();
+ var totalHeight = (border * 2 + header + 24 * lineHeight);
+ var monthConst = ["January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"];
+ var dayOfWeekConst = ["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday","Sunday"];
+ var hourMillisConst = 3600 * 1000;
+ var dayMillisConst = 24 * hourMillisConst;
+
+ $("#svgDivCustom").svg({onLoad:
+ function (svg) {
+
+ var totalWidth = $("#svgDivCustom").width();
+
+ $("#svgDivCustom").find("svg").eq(0).removeAttr("width");
+
+
+ //Outer g
+ var gMain = svg.group({transform: "translate(" + border + ".5," + border + ".5)", stroke : "#999", strokeWidth: 1});
+ var defaultDate = new Date(today.setDate(today.getDate() - today.getDay()));
+ today = new Date();
+ var svgDate = defaultDate;
+
+ //Load the date from the hash if existing
+ if(window.location.hash) {
+ try {
+ var dateParts = window.location.hash.replace("#", "").split("-");
+ var newDate = new Date(parseInt(dateParts[0]), parseInt(dateParts[1]) - 1, parseInt(dateParts[2]));
+ if(!isNaN(newDate)) {
+ svgDate = newDate;
+ }
+ }
+ catch(err){ }
+ }
+
+ //Used to filter projects or flows out
+ var filterProject = new Array();
+ var filterFlow = new Array();
+
+ $(".nav-prev-week").click(function (event) {
+ svgDate = new Date(svgDate.valueOf() - 7 * dayMillisConst);
+ window.location.hash = "#" + svgDate.getFullYear() + "-" + (svgDate.getMonth() + 1) + "-" + svgDate.getDate();
+ loadSvg(svgDate);
+ event.stopPropagation();
+ });
+ $(".nav-next-week").click(function (event) {
+ svgDate = new Date(svgDate.valueOf() + 7 * dayMillisConst);
+ window.location.hash = "#" + svgDate.getFullYear() + "-" + (svgDate.getMonth() + 1) + "-" + svgDate.getDate();
+ loadSvg(svgDate);
+ event.stopPropagation();
+ });
+ $(".nav-this-week").click(function (event) {
+ svgDate = defaultDate;
+ window.location.hash = "#" + svgDate.getFullYear() + "-" + (svgDate.getMonth() + 1) + "-" + svgDate.getDate();
+ loadSvg(svgDate);
+ event.stopPropagation();
+ });
+
+
+
+ loadSvg(svgDate);
+
+ function loadSvg(firstDay)
+ {
+ //Text to show which month/year it is
+ var monthIndicatorText = monthConst[firstDay.getMonth()] + " " + firstDay.getFullYear().toString();
+ //Measure a good width for the text to display well
+ timeWidth = Math.max(minTimeWidth, measureText(svg, monthIndicatorText, {fontSize: "20", style: "text-anchor: end;"}));
+
+ var dayWidth = Math.floor((totalWidth - 3 * border - timeWidth) / numDays);
+
+ //svg.configure({viewBox: "0 0 " + totalWidth + " " + totalHeight, style: "width:100%"}, true);
+ svg.remove(gMain);
+ gMain = svg.group({transform: "translate(" + border + ".5," + border + ".5)", stroke : "#999", strokeWidth: 1});
+ svg.text(gMain, timeWidth, header - 8, monthIndicatorText, {fontSize: "20", style: "text-anchor: end;", fill : "#F60", stroke : "none"});
+ //time indicator group
+ var gLeft = svg.group(gMain, {transform: "translate(0," + header + ")"});
+ //Draw lines and hours
+ for(var i = 0; i < 24; i++)
+ {
+ svg.line(gLeft, 0, i * lineHeight, timeWidth, i * lineHeight);
+ //Gets the hour text from an integer from 0 to 23
+ var hourText = getHourText(i);
+ //Move text down a bit? TODO: Is there a CSS option for top anchor?
+ svg.text(gLeft, timeWidth, i * lineHeight + 15, hourText, {fontSize: "14", style: "text-anchor: end;", fill : "#333", stroke : "none"});
+ }
+
+ //var firstDay = new Date();//(new Date()).valueOf();
+ firstDay = new Date(firstDay.getFullYear(), firstDay.getMonth(), firstDay.getDate()).valueOf();
+ var isThisWeek = -1;
+ //Draw background
+ for(var deltaDay = 0; deltaDay < numDays; deltaDay++)
+ {
+ //Day group
+ var gDay = svg.group(gMain, {transform: "translate(" + (border + timeWidth + deltaDay * dayWidth) + "," + header + ")"});
+
+ //This is temporary.
+ var date = new Date(firstDay + dayMillisConst * deltaDay);
+ var day = date.getDate();
+
+ //Draw box around
+ var isToday = date.getFullYear() == today.getFullYear() && date.getMonth() == today.getMonth() && date.getDate() == today.getDate();
+ if(isToday)
+ {
+ isThisWeek = deltaDay;
+ }
+ svg.rect(gDay, 0, -header, dayWidth, 24 * lineHeight + header, {fill : "none", stroke : "#F60"});
+ //Draw day title
+ svg.text(gDay, 6, -8, day + " " + dayOfWeekConst[date.getDay()], {fontSize: "20", fill : isToday?"#06C":"#F60", stroke : "none"});
+
+ //Draw horizontal lines
+ for(var i = 0; i < 24; i++)
+ {
+ svg.line(gDay, 0, i * lineHeight, dayWidth, i * lineHeight);
+ }
+ }
+
+ var gDayViewOuterGroup = svg.group(gMain);
+ var gDayView = svg.group(gDayViewOuterGroup, {transform: "translate(" + (border + timeWidth) + "," + header + ")"});
+ if(isThisWeek != -1)
+ {
+ var date = new Date(firstDay + dayMillisConst * isThisWeek);
+ var day = date.getDate();
+ var gDay = svg.group(gMain, {transform: "translate(" + (border + timeWidth + isThisWeek * dayWidth) + "," + header + ")"});
+ svg.rect(gDay, 0, -header, dayWidth, 24 * lineHeight + header, {fill : "none", stroke : "#06F"});
+ var lineY = Math.floor(today.getHours() * lineHeight + today.getMinutes() * lineHeight / 60);
+ svg.line(gDay, 0, lineY, dayWidth, lineY, {fill : "none", stroke : "#06F", strokeWidth : 4});
+ }
+
+ //A list of all items
+ var itemByDay = new Array();
+ for(var deltaDay = 0; deltaDay < numDays; deltaDay++) {
+ itemByDay[deltaDay] = new Array();
+ }
+
+ var itemByScheduleIdMap = {};
+
+ function filterApplies(item) {
+ for(var i = 0; i < filterProject.length; i++) {
+ if(item.projectname == filterProject[i].projectname) {
+ return true;
+ }
+ }
+ for(var i = 0; i < filterFlow.length; i++) {
+ if(item.projectname == filterFlow[i].projectname && item.flowname == filterFlow[i].flowname) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ //Function that re-renders all loaded items
+ function renderDays() {
+ //Clear items inside the day view
+ svg.remove(gDayView);
+ gDayView = svg.group(gDayViewOuterGroup, {transform: "translate(" + (border + timeWidth) + "," + header + ")"});
+
+ //Add day groups
+ for(var deltaDay = 0; deltaDay < numDays; deltaDay++) {
+ var gDay = svg.group(gDayView, {transform: "translate(" + (deltaDay * dayWidth) + ")"});
+ var data = itemByDay[deltaDay];
+ //Sort the arrays to have a better view
+ data.sort(function (a, b){
+ //Smaller time in front
+ var timeDiff = a.time - b.time;
+ if(timeDiff == 0) {
+ //Larger length in front
+ var lengthDiff = b.length - a.length;
+ if(lengthDiff == 0) {
+ //Sort by alphabetical
+ return (a.flowname < b.flowname ? 1 : a.flowname > b.flowname ? -1 : 0);
+ }
+ return lengthDiff;
+ }
+ return timeDiff;
+ });
+ //Sort items to columns
+ var columns = new Array();
+ columns.push(new Array());
+ //Every item is parsed through here into columns
+ for(var i = 0; i < data.length; i++) {
+ //Apply filters here
+ if(filterApplies(data[i])) {
+ continue;
+ }
+
+ var foundColumn = false;
+ //Go through every column until a place can be found
+ for(var j = 0; j < columns.length; j++) {
+ if(!intersectArray(data[i], columns[j])) {
+ //Found a place
+ columns[j].push(data[i]);
+ foundColumn = true;
+ break;
+ }
+ }
+ //No place, create new column
+ if(!foundColumn) {
+ columns.push(new Array());
+ columns[columns.length - 1].push(data[i]);
+ }
+ }
+
+ //Actually drawing them
+ for(var i = 0; i < columns.length; i++) {
+ //Split into columns
+ var gColumn = svg.group(gDay, {transform: "translate(" + Math.floor(i * dayWidth / columns.length) + ")", style: "opacity: 0.8"});
+ for(var j = 0; j < columns[i].length; j++) {
+ //Draw item
+ var item = columns[i][j];
+ var startTime = new Date(item.time);
+ var startY = Math.floor(startTime.getHours() * lineHeight + startTime.getMinutes() * lineHeight / 60);
+ var endTime = new Date(item.time + item.length );
+ var endY = Math.ceil(startY + (item.length * lineHeight) / hourMillisConst);
+ var deltaY = Math.ceil(endY - startY);
+ if(deltaY < 5){
+ deltaY = 5;
+ }
+ //var anchor = svg.a(gColumn);
+ var itemUrl = contextURL + "/manager?project=" + item.projectname + "&flow=" + item.flowname;
+ var gItem = svg.link(gColumn, itemUrl, {transform: "translate(0," + startY + ")"});
+
+ //Pass the item into the DOM data store to be retrieved later on
+ $(gItem).data("item", item);
+
+ //Replace the context handler
+ gItem.addEventListener('contextmenu', handleContextMenu);
+
+ //Add a tooltip on mouse over
+ gItem.addEventListener('mouseover', handleMouseOver);
+ //Remove the tooltip on mouse out
+ gItem.addEventListener('mouseout', handleMouseOut);
+
+ //$(gItem).attr("style","color:red");
+ var rect = svg.rect(gItem, 0, 0, Math.ceil(dayWidth / columns.length), deltaY, 0, 0, {fill : item.item.color, stroke : "#444", strokeWidth : 1});
+
+ item.rect = rect;
+ //Draw text
+ //svg.text(gItem, 6, 16, item.flowname, {fontSize: "13", fill : "#000", stroke : "none"});
+ }
+ }
+ }
+ }
+
+ function processItem(item, scheduled)
+ {
+ var firstTime = item.time;
+ var startTime = firstDay;
+ var endTime = firstDay + numDays * dayMillisConst;
+ var period = item.period;
+ var restrictedStartTime = Math.max(firstDay, today.valueOf());
+ if(!scheduled){
+ restrictedStartTime = firstDay;
+ }
+
+ // Shift time until we're past the start time
+ if (period > 0) {
+ // Calculate next execution time efficiently
+ // Take into account items that ends in the date specified, but does not start on that date
+ var periods = Math.floor((restrictedStartTime - (firstTime)) / period);
+ //Make sure we don't subtract
+ if(periods < 0){
+ periods = 0;
+ }
+ firstTime += period * periods;
+ // Increment in case we haven't arrived yet. This will apply to most of the cases
+ while (firstTime < restrictedStartTime) {
+ firstTime += period;
+ }
+ }
+
+ // Bad or no period
+ if (period <= 0) {
+ // Single instance case
+ if (firstTime >= restrictedStartTime && firstTime < endTime) {
+ addItem({scheduleid: item.scheduleid, flowname : item.flowname, projectname: item.projectname, time: firstTime, length: item.length, item: item});
+ }
+ }
+ else {
+ if(period <= hourMillisConst) {
+ addItem({scheduleid: item.scheduleid, flowname : item.flowname, projectname: item.projectname, time: firstTime, length: endTime - firstTime, item: item});
+ }
+ else{
+ // Repetitive schedule, firstTime is assumed to be after startTime
+ while (firstTime < endTime) {
+ addItem({scheduleid: item.scheduleid, flowname : item.flowname, projectname: item.projectname, time: firstTime, length: item.length, item: item});
+ firstTime += period;
+ }
+ }
+ }
+ }
+
+ function addItem(obj)
+ {
+ var itemStartTime = new Date(obj.time);
+ var itemEndTime = new Date(obj.time + obj.length);
+ var itemStartDate = new Date(itemStartTime.getFullYear(), itemStartTime.getMonth(), itemStartTime.getDate());
+ var itemEndDate = new Date(itemEndTime.getFullYear(), itemEndTime.getMonth(), itemEndTime.getDate());
+
+ //Cross date item, cut it to only today's portion and add another item starting tomorrow morning
+ if(itemStartDate.valueOf() != itemEndDate.valueOf() && itemEndTime.valueOf() != itemStartDate + dayMillisConst)
+ {
+ var nextMorning = itemStartDate.valueOf() + dayMillisConst;
+ var excess = obj.length - (nextMorning - itemStartTime.valueOf());
+ obj.length = nextMorning - itemStartTime.valueOf();
+ while(excess > 0)
+ {
+ var tempLength = excess;
+ if(tempLength > dayMillisConst){
+ tempLength = dayMillisConst;
+ }
+
+ var item2 = {scheduleid: obj.scheduleid, time: nextMorning, length: tempLength, projectname: obj.projectname, flowname: obj.flowname, item: obj.item};
+ addItem(item2);
+ excess -= tempLength;
+ nextMorning += dayMillisConst;
+ }
+ }
+
+ //Now the item should be only in one day
+ var index = (itemStartDate.valueOf() - firstDay) / dayMillisConst;
+ if(index >= 0 && index < numDays)
+ {
+ //Add the item to the rendering list
+ itemByDay[index].push(obj);
+ //obj.item.objs.push(obj);
+
+ if(!itemByScheduleIdMap[obj.scheduleid]){
+ itemByScheduleIdMap[obj.scheduleid] = new Array();
+ }
+ itemByScheduleIdMap[obj.scheduleid].push(obj);
+ }
+ }
+
+ function handleContextMenu(event) {
+ var requestURL = $(this).attr("href");
+ var item = $(this).data("item");
+ var menu = [
+ {title: "Job \"" + item.flowname + "\" From Project \"" + item.projectname + "\""},
+ {title: "View Job", callback: function() {window.location.href=requestURL;}},
+ {title: "View Job in New Window", callback: function() {window.open(requestURL);}},
+ {title: "Hide Job", callback: function() {filterFlow.push(item); renderDays();}},
+ {title: "Hide All Jobs From the Same Project", callback: function() {filterProject.push(item); renderDays();}}
+ ];
+ contextMenuView.show(event, menu);
+ event.preventDefault();
+ event.stopPropagation();
+ return false;
+ }
+
+ function handleMouseOver(event) {
+ //Create the new tooltip
+ var requestURL = $(this).attr("href");
+ var obj = $(this).data("item");
+ var offset = $("svg").offset();
+ var thisOffset = $(this).offset();
+
+ var tooltip = svg.group({transform: "translate(" + (thisOffset.left - offset.left + 2) + "," + (thisOffset.top - offset.top - 2) + ")"});
+ var text = [
+ "\"" + obj.flowname + "\" from \"" + obj.projectname + "\"",
+ "Repeat: " + formatReadablePeriod(obj.item.period)
+ ];
+
+ if(obj.item.period == 0){
+ text[1] = "";
+ if(obj.item.history == true) {
+ if(obj.item.status == 50){
+ text[1] = "SUCCEEDED";
+ }
+ else if(obj.item.status == 60){
+ text[1] = "KILLED";
+ }
+ else if(obj.item.status == 70){
+ text[1] = "FAILED";
+ }
+ else if(obj.item.status == 80){
+ text[1] = "FAILED_FINISHING";
+ }
+ else if(obj.item.status == 90){
+ text[1] = "SKIPPED";
+ }
+ }
+ }
+ var textLength = Math.max(measureText(svg, text[0], {fontSize: "13"}), measureText(svg, text[1], {fontSize: "13"}));
+ var rect = svg.rect(tooltip, 0, -40, textLength + 4, 40, {fill : "#FFF", stroke : "none"});
+ svg.text(tooltip, 2, -25, text[0], {fontSize: "13", fill : "#000", stroke : "none"});
+ svg.text(tooltip, 2, -5, text[1], {fontSize: "13", fill : "#000", stroke : "none"});
+
+ //Store tooltip
+ $(this).data("tooltip", tooltip);
+
+ if(itemByScheduleIdMap[obj.scheduleid]){
+ //Item highlight effect
+ var arry = itemByScheduleIdMap[obj.scheduleid];
+ for(var i = 0; i < arry.length; i++) {
+ $(arry[i].rect).attr("fill", "#FF0");
+ }
+ }
+ }
+
+ function handleMouseOut(event) {
+ //Item highlight effect
+ var obj = $(this).data("item");
+ //Item highlight effect
+ if(itemByScheduleIdMap[obj.scheduleid]){
+ var arry = itemByScheduleIdMap[obj.scheduleid];
+ for(var i = 0; i < arry.length; i++) {
+ var obj2 = obj.item.objs[i];
+ $(arry[i].rect).attr("fill", arry[i].item.color);
+ }
+ }
+ //Clear the fade interval
+ $($(this).data("tooltip")).fadeOut(250, function(){ svg.remove(this); });
+ }
+
+ //Asynchronously load data
+ var requestURL = contextURL + "/schedule";
+ $.ajax({
+ type: "GET",
+ url: requestURL,
+ data: {"ajax": "loadFlow"},
+ dataType: "json",
+ success: function (data)
+ {
+ var items = data.items;
+
+ //Sort items by day
+ for(var i = 0; i < items.length; i++)
+ {
+ //items[i].length = hourMillisConst; //TODO: Remove this to get the actual length
+ items[i].objs = new Array();
+ items[i].color = "#69F";
+ processItem(items[i], true);
+ }
+ //Trigger a re-rendering of all the data
+ renderDays();
+ }
+ });
+ for(var deltaDay = 0; deltaDay < numDays; deltaDay++) {
+ $.ajax({
+ type: "GET",
+ url: requestURL,
+ data: {"ajax": "loadHistory", "startTime": firstDay + deltaDay * dayMillisConst, "loadAll" : 0},
+ //dataType: "json",
+ success: function (data)
+ {
+ var items = data.items;
+
+ //Sort items by day
+ for(var i = 0; i < items.length; i++)
+ {
+ //if(items[i].length < 5 * 60 * 1000) items[i].length = 5 * 60 * 1000;
+ items[i].objs = new Array();
+ items[i].color = "#7E7";
+ if(items[i].status == 60 || items[i].status == 70 || items[i].status == 80)
+ items[i].color = "#E77";
+ processItem(items[i], false);
+ }
+ //Trigger a re-rendering of all the data
+ renderDays();
+ }
+ });
+ }
+ }
+ }, settings : {
+ "xmlns" : "http://www.w3.org/2000/svg",
+ "xmlns:xlink" : "http://www.w3.org/1999/xlink",
+ "shape-rendering" : "optimize-speed",
+ "style" : "width:100%;height:" + totalHeight + "px"
+ }});
+
+ function dayMatch(d1, d2) {
+ return d1.getDate() == d2.getDate() && d1.getFullYear() == d2.getFullYear() && d1.getMonth() == d2.getMonth();
+ }
+
+ function getHourText(hour) {
+ return (hour==0 ? "12 AM" : (hour<12 ? hour + " AM" : (hour==12 ? "12 PM" : (hour-12) + " PM" )));
+ }
+
+ function intersectArray(a, arry) {
+ for(var i = 0; i < arry.length; i++) {
+ var b = arry[i];
+ if(a.time < b.time + b.length && a.time + a.length > b.time) {
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ function measureText(svg, text, options) {
+ var test = svg.text(0, 0, text, options);
+ var width = test.getComputedTextLength();
+ svg.remove(test);
+ return width;
+ }
+
+ function formatReadablePeriod(period) {
+ var days = Math.floor(period / dayMillisConst);
+ var hour = period - days * dayMillisConst;
+ var hours = Math.floor(hour / hourMillisConst);
+ var min = hour - hours * hourMillisConst;
+ var mins = Math.floor(min / 60000);
+
+ var text = "";
+ if(days > 0) text = (days == 1 ? "24 hours" : days.toString() + " days");
+ if(hours > 0) text = text + " " + (hours == 1 ? "1 hour" : hours.toString() + " hours");
+ if(mins > 0) text = text + " " + (mins == 1 ? "1 minute" : mins.toString() + " minutes");
+ return text;
+ }
});
src/web/js/azkaban/view/svg-graph.js 1312(+656 -656)
diff --git a/src/web/js/azkaban/view/svg-graph.js b/src/web/js/azkaban/view/svg-graph.js
index 7e0d179..8af7abc 100644
--- a/src/web/js/azkaban/view/svg-graph.js
+++ b/src/web/js/azkaban/view/svg-graph.js
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -21,661 +21,661 @@
$.namespace('azkaban');
azkaban.SvgGraphView = Backbone.View.extend({
- events: {
- },
-
- initialize: function(settings) {
- this.model.bind('change:selected', this.changeSelected, this);
- this.model.bind('centerNode', this.centerNode, this);
- this.model.bind('change:graph', this.render, this);
- this.model.bind('resetPanZoom', this.resetPanZoom, this);
- this.model.bind('change:update', this.handleStatusUpdate, this);
- this.model.bind('change:disabled', this.handleDisabledChange, this);
- this.model.bind('change:updateAll', this.handleUpdateAllStatus, this);
- this.model.bind('expandFlow', this.expandFlow, this);
- this.model.bind('collapseFlow', this.collapseFlow, this);
-
- this.graphMargin = settings.graphMargin ? settings.graphMargin : 25;
- this.svgns = "http://www.w3.org/2000/svg";
- this.xlinksn = "http://www.w3.org/1999/xlink";
-
- var graphDiv = this.el[0];
- var svg = $(this.el).find('svg')[0];
- if (!svg) {
- svg = this.el;
- }
-
- this.svgGraph = svg;
- $(this.svgGraph).svg();
- this.svg = $(svg).svg('get');
-
- $(this.svgGraph).empty();
-
- // Create mainG node
- var gNode = document.createElementNS(this.svgns, 'g');
- gNode.setAttribute("class", "main graph");
- svg.appendChild(gNode);
- this.mainG = gNode;
-
- if (settings.rightClick) {
- this.rightClick = settings.rightClick;
- }
-
- $(svg).svgNavigate();
-
- var self = this;
- if (self.rightClick && self.rightClick.graph) {
- $(svg).on("contextmenu", function(evt) {
- console.log("graph click");
- var currentTarget = evt.currentTarget;
-
- self.rightClick.graph(evt, self.model, currentTarget.data);
- return false;
- });
- }
-
- this.tooltipcontainer = settings.tooltipcontainer ? settings.tooltipcontainer : "body";
- if (settings.render) {
- this.render();
- }
- },
-
- render: function() {
- console.log("graph render");
- $(this.mainG).empty();
-
- this.graphBounds = this.renderGraph(this.model.get("data"), this.mainG);
- this.resetPanZoom(0);
- },
-
- renderGraph: function(data, g) {
- g.data = data;
- var nodes = data.nodes;
- var edges = data.edges;
- var nodeMap = data.nodeMap;
-
- // Create a g node for edges, so that they're forced in the back.
- var edgeG = this.svg.group(g);
- if (nodes.length == 0) {
- console.log("No results");
- return;
- };
-
- // Assign labels
- for (var i = 0; i < nodes.length; ++i) {
- nodes[i].label = nodes[i].id;
- }
-
- var self = this;
- for (var i = 0; i < nodes.length; ++i) {
- this.drawNode(this, nodes[i], g);
- $(nodes[i].gNode).click(function(evt) {
- var selected = self.model.get("selected");
- if (selected == evt.currentTarget.data) {
- self.model.unset("selected");
- }
- else {
- self.model.set({"selected":evt.currentTarget.data});
- }
-
- evt.stopPropagation();
- evt.cancelBubble = true;
- });
- }
-
- // layout
- layoutGraph(nodes, edges, 10);
- var bounds = this.calculateBounds(nodes);
- this.moveNodes(nodes);
-
- for (var i = 0; i < edges.length; ++i) {
- edges[i].toNode = nodeMap[edges[i].to];
- edges[i].fromNode = nodeMap[edges[i].from];
- this.drawEdge(this, edges[i], edgeG);
- }
-
- this.model.set({"flowId":data.flowId, "edges": edges});
-
- var margin = this.graphMargin;
- bounds.minX = bounds.minX ? bounds.minX - margin : -margin;
- bounds.minY = bounds.minY ? bounds.minY - margin : -margin;
- bounds.maxX = bounds.maxX ? bounds.maxX + margin : margin;
- bounds.maxY = bounds.maxY ? bounds.maxY + margin : margin;
-
- this.assignInitialStatus(this, data);
-
- if (self.rightClick) {
- if (self.rightClick.node) {
- // Proper children selectors don't work properly on svg
- for (var i = 0; i < nodes.length; ++i) {
- $(nodes[i].gNode).on("contextmenu", function(evt) {
- console.log("node click");
- var currentTarget = evt.currentTarget;
- self.rightClick.node(evt, self.model, currentTarget.data);
- return false;
- });
- }
- }
- if (this.rightClick.graph) {
- $(g).on("contextmenu", function(evt) {
- console.log("graph click");
- var currentTarget = evt.currentTarget;
-
- self.rightClick.graph(evt, self.model, currentTarget.data);
- return false;
- });
- }
- };
-
- $(".node").each(function(d,i) {
- $(this).tooltip({
- container: self.tooltipcontainer,
- delay: {
- show: 500,
- hide: 100
- }
- });
- });
-
- return bounds;
- },
-
- handleDisabledChange: function(evt) {
- this.changeDisabled(this.model.get('data'));
- },
-
- changeDisabled: function(data) {
- for (var i = 0; i < data.nodes.length; ++i) {
- var node = data.nodes[i];
- if (node.disabled) {
- if (node.gNode) {
- addClass(node.gNode, "nodeDisabled");
- $(node.gNode).attr("title", "DISABLED (" + node.type + ")").tooltip('fixTitle');
- }
- }
- else {
- if (node.gNode) {
- removeClass(node.gNode, "nodeDisabled");
- $(node.gNode).attr("title", node.status + " (" + node.type + ")").tooltip('fixTitle');
- }
- if (node.type=='flow') {
- this.changeDisabled(node);
- }
- }
- }
- },
-
- assignInitialStatus: function(evt, data) {
- for (var i = 0; i < data.nodes.length; ++i) {
- var updateNode = data.nodes[i];
- var g = updateNode.gNode;
- var initialStatus = updateNode.status ? updateNode.status : "READY";
-
- addClass(g, initialStatus);
- var title = initialStatus + " (" + updateNode.type + ")";
-
- if (updateNode.disabled) {
- addClass(g, "nodeDisabled");
- title = "DISABLED (" + updateNode.type + ")";
- }
- $(g).attr("title", title);
- }
- },
-
- changeSelected: function(self) {
- console.log("change selected");
- var selected = this.model.get("selected");
- var previous = this.model.previous("selected");
-
- if (previous) {
- // Unset previous
- removeClass(previous.gNode, "selected");
- }
-
- if (selected) {
- this.propagateExpansion(selected);
- var g = selected.gNode;
- addClass(g, "selected");
-
- console.log(this.model.get("autoPanZoom"));
- if (this.model.get("autoPanZoom")) {
- this.centerNode(selected);
- }
- }
- },
-
- propagateExpansion: function(node) {
- if (node.parent.type) {
- this.propagateExpansion(node.parent);
- this.expandFlow(node.parent);
- }
- },
-
- handleStatusUpdate: function(evt) {
- var updateData = this.model.get("update");
- var data = this.model.get("data");
- this.updateStatusChanges(updateData, data);
- },
-
- updateStatusChanges: function(updateData, data) {
- // Assumes all changes have been applied.
- if (updateData.nodes) {
- var nodeMap = data.nodeMap;
- for (var i = 0; i < updateData.nodes.length; ++i) {
- var node = updateData.nodes[i];
- var nodeToUpdate = nodeMap[node.id];
-
- var g = nodeToUpdate.gNode;
- if (g) {
- this.handleRemoveAllStatus(g);
- addClass(g, nodeToUpdate.status);
-
- var title = nodeToUpdate.status + " (" + nodeToUpdate.type + ")";
- if (nodeToUpdate.disabled) {
- addClass(g, "nodeDisabled");
- title = "DISABLED (" + nodeToUpdate.type + ")";
- }
- $(g).attr("title", title).tooltip('fixTitle');
-
- if (node.nodes) {
- this.updateStatusChanges(node, nodeToUpdate);
- }
- }
- }
- }
- },
+ events: {
+ },
+
+ initialize: function(settings) {
+ this.model.bind('change:selected', this.changeSelected, this);
+ this.model.bind('centerNode', this.centerNode, this);
+ this.model.bind('change:graph', this.render, this);
+ this.model.bind('resetPanZoom', this.resetPanZoom, this);
+ this.model.bind('change:update', this.handleStatusUpdate, this);
+ this.model.bind('change:disabled', this.handleDisabledChange, this);
+ this.model.bind('change:updateAll', this.handleUpdateAllStatus, this);
+ this.model.bind('expandFlow', this.expandFlow, this);
+ this.model.bind('collapseFlow', this.collapseFlow, this);
+
+ this.graphMargin = settings.graphMargin ? settings.graphMargin : 25;
+ this.svgns = "http://www.w3.org/2000/svg";
+ this.xlinksn = "http://www.w3.org/1999/xlink";
+
+ var graphDiv = this.el[0];
+ var svg = $(this.el).find('svg')[0];
+ if (!svg) {
+ svg = this.el;
+ }
+
+ this.svgGraph = svg;
+ $(this.svgGraph).svg();
+ this.svg = $(svg).svg('get');
+
+ $(this.svgGraph).empty();
+
+ // Create mainG node
+ var gNode = document.createElementNS(this.svgns, 'g');
+ gNode.setAttribute("class", "main graph");
+ svg.appendChild(gNode);
+ this.mainG = gNode;
+
+ if (settings.rightClick) {
+ this.rightClick = settings.rightClick;
+ }
+
+ $(svg).svgNavigate();
+
+ var self = this;
+ if (self.rightClick && self.rightClick.graph) {
+ $(svg).on("contextmenu", function(evt) {
+ console.log("graph click");
+ var currentTarget = evt.currentTarget;
+
+ self.rightClick.graph(evt, self.model, currentTarget.data);
+ return false;
+ });
+ }
+
+ this.tooltipcontainer = settings.tooltipcontainer ? settings.tooltipcontainer : "body";
+ if (settings.render) {
+ this.render();
+ }
+ },
+
+ render: function() {
+ console.log("graph render");
+ $(this.mainG).empty();
+
+ this.graphBounds = this.renderGraph(this.model.get("data"), this.mainG);
+ this.resetPanZoom(0);
+ },
+
+ renderGraph: function(data, g) {
+ g.data = data;
+ var nodes = data.nodes;
+ var edges = data.edges;
+ var nodeMap = data.nodeMap;
+
+ // Create a g node for edges, so that they're forced in the back.
+ var edgeG = this.svg.group(g);
+ if (nodes.length == 0) {
+ console.log("No results");
+ return;
+ };
+
+ // Assign labels
+ for (var i = 0; i < nodes.length; ++i) {
+ nodes[i].label = nodes[i].id;
+ }
+
+ var self = this;
+ for (var i = 0; i < nodes.length; ++i) {
+ this.drawNode(this, nodes[i], g);
+ $(nodes[i].gNode).click(function(evt) {
+ var selected = self.model.get("selected");
+ if (selected == evt.currentTarget.data) {
+ self.model.unset("selected");
+ }
+ else {
+ self.model.set({"selected":evt.currentTarget.data});
+ }
+
+ evt.stopPropagation();
+ evt.cancelBubble = true;
+ });
+ }
+
+ // layout
+ layoutGraph(nodes, edges, 10);
+ var bounds = this.calculateBounds(nodes);
+ this.moveNodes(nodes);
+
+ for (var i = 0; i < edges.length; ++i) {
+ edges[i].toNode = nodeMap[edges[i].to];
+ edges[i].fromNode = nodeMap[edges[i].from];
+ this.drawEdge(this, edges[i], edgeG);
+ }
+
+ this.model.set({"flowId":data.flowId, "edges": edges});
+
+ var margin = this.graphMargin;
+ bounds.minX = bounds.minX ? bounds.minX - margin : -margin;
+ bounds.minY = bounds.minY ? bounds.minY - margin : -margin;
+ bounds.maxX = bounds.maxX ? bounds.maxX + margin : margin;
+ bounds.maxY = bounds.maxY ? bounds.maxY + margin : margin;
+
+ this.assignInitialStatus(this, data);
+
+ if (self.rightClick) {
+ if (self.rightClick.node) {
+ // Proper children selectors don't work properly on svg
+ for (var i = 0; i < nodes.length; ++i) {
+ $(nodes[i].gNode).on("contextmenu", function(evt) {
+ console.log("node click");
+ var currentTarget = evt.currentTarget;
+ self.rightClick.node(evt, self.model, currentTarget.data);
+ return false;
+ });
+ }
+ }
+ if (this.rightClick.graph) {
+ $(g).on("contextmenu", function(evt) {
+ console.log("graph click");
+ var currentTarget = evt.currentTarget;
+
+ self.rightClick.graph(evt, self.model, currentTarget.data);
+ return false;
+ });
+ }
+ };
+
+ $(".node").each(function(d,i) {
+ $(this).tooltip({
+ container: self.tooltipcontainer,
+ delay: {
+ show: 500,
+ hide: 100
+ }
+ });
+ });
+
+ return bounds;
+ },
+
+ handleDisabledChange: function(evt) {
+ this.changeDisabled(this.model.get('data'));
+ },
+
+ changeDisabled: function(data) {
+ for (var i = 0; i < data.nodes.length; ++i) {
+ var node = data.nodes[i];
+ if (node.disabled) {
+ if (node.gNode) {
+ addClass(node.gNode, "nodeDisabled");
+ $(node.gNode).attr("title", "DISABLED (" + node.type + ")").tooltip('fixTitle');
+ }
+ }
+ else {
+ if (node.gNode) {
+ removeClass(node.gNode, "nodeDisabled");
+ $(node.gNode).attr("title", node.status + " (" + node.type + ")").tooltip('fixTitle');
+ }
+ if (node.type=='flow') {
+ this.changeDisabled(node);
+ }
+ }
+ }
+ },
+
+ assignInitialStatus: function(evt, data) {
+ for (var i = 0; i < data.nodes.length; ++i) {
+ var updateNode = data.nodes[i];
+ var g = updateNode.gNode;
+ var initialStatus = updateNode.status ? updateNode.status : "READY";
+
+ addClass(g, initialStatus);
+ var title = initialStatus + " (" + updateNode.type + ")";
+
+ if (updateNode.disabled) {
+ addClass(g, "nodeDisabled");
+ title = "DISABLED (" + updateNode.type + ")";
+ }
+ $(g).attr("title", title);
+ }
+ },
+
+ changeSelected: function(self) {
+ console.log("change selected");
+ var selected = this.model.get("selected");
+ var previous = this.model.previous("selected");
+
+ if (previous) {
+ // Unset previous
+ removeClass(previous.gNode, "selected");
+ }
+
+ if (selected) {
+ this.propagateExpansion(selected);
+ var g = selected.gNode;
+ addClass(g, "selected");
+
+ console.log(this.model.get("autoPanZoom"));
+ if (this.model.get("autoPanZoom")) {
+ this.centerNode(selected);
+ }
+ }
+ },
+
+ propagateExpansion: function(node) {
+ if (node.parent.type) {
+ this.propagateExpansion(node.parent);
+ this.expandFlow(node.parent);
+ }
+ },
+
+ handleStatusUpdate: function(evt) {
+ var updateData = this.model.get("update");
+ var data = this.model.get("data");
+ this.updateStatusChanges(updateData, data);
+ },
+
+ updateStatusChanges: function(updateData, data) {
+ // Assumes all changes have been applied.
+ if (updateData.nodes) {
+ var nodeMap = data.nodeMap;
+ for (var i = 0; i < updateData.nodes.length; ++i) {
+ var node = updateData.nodes[i];
+ var nodeToUpdate = nodeMap[node.id];
+
+ var g = nodeToUpdate.gNode;
+ if (g) {
+ this.handleRemoveAllStatus(g);
+ addClass(g, nodeToUpdate.status);
+
+ var title = nodeToUpdate.status + " (" + nodeToUpdate.type + ")";
+ if (nodeToUpdate.disabled) {
+ addClass(g, "nodeDisabled");
+ title = "DISABLED (" + nodeToUpdate.type + ")";
+ }
+ $(g).attr("title", title).tooltip('fixTitle');
+
+ if (node.nodes) {
+ this.updateStatusChanges(node, nodeToUpdate);
+ }
+ }
+ }
+ }
+ },
handleRemoveAllStatus: function(gNode) {
- for (var j = 0; j < statusList.length; ++j) {
- var status = statusList[j];
- removeClass(gNode, status);
- }
- },
+ for (var j = 0; j < statusList.length; ++j) {
+ var status = statusList[j];
+ removeClass(gNode, status);
+ }
+ },
handleRightClick: function(self) {
- if (this.rightClick) {
- var callbacks = this.rightClick;
- var currentTarget = self.currentTarget;
- if (callbacks.node && currentTarget.jobid) {
- callbacks.node(self, this.model, currentTarget.nodeobj);
- }
- else if (callbacks.edge &&
- (currentTarget.nodeName == "polyline" ||
- currentTarget.nodeName == "line")) {
- callbacks.edge(self, this.model);
- }
- else if (callbacks.graph) {
- callbacks.graph(self, this.model);
- }
- return false;
- }
- return true;
- },
-
- drawEdge: function(self, edge, g) {
- var svg = this.svg;
- var svgns = self.svgns;
-
- var startNode = edge.fromNode;
- var endNode = edge.toNode;
-
- var startPointY = startNode.y + startNode.height/2;
- var endPointY = endNode.y - endNode.height/2;
-
- if (edge.guides) {
- // Create guide array
- var pointArray = new Array();
- pointArray.push([startNode.x, startPointY]);
- for (var i = 0; i < edge.guides.length; ++i ) {
- var edgeGuidePoint = edge.guides[i];
- pointArray.push([edgeGuidePoint.x, edgeGuidePoint.y]);
- }
- pointArray.push([endNode.x, endPointY]);
-
- edge.line = svg.polyline(g, pointArray, {class:"edge", fill:"none"});
- edge.line.data = edge;
- edge.oldpoints = pointArray;
- }
- else {
- edge.line = svg.line(g, startNode.x, startPointY, endNode.x, endPointY, {class:"edge"});
- edge.line.data = edge;
- }
- },
-
- drawNode: function(self, node, g) {
- if (node.type == 'flow') {
- this.drawFlowNode(self, node, g);
- }
- else {
- this.drawBoxNode(self, node, g);
- }
- },
-
- moveNodes: function(nodes) {
- var svg = this.svg;
- for (var i = 0; i < nodes.length; ++i) {
- var node = nodes[i];
- var gNode = node.gNode;
-
- svg.change(gNode, {"transform": translateStr(node.x, node.y)});
- }
- },
-
- expandFlow: function(node) {
- var svg = this.svg;
- var gnode = node.gNode;
- node.expanded = true;
-
- var innerG = gnode.innerG;
- var borderRect = innerG.borderRect;
- var labelG = innerG.labelG;
-
- var bbox;
- if (!innerG.expandedFlow) {
- var topmargin= 30, bottommargin=5;
- var hmargin = 10;
-
- var expandedFlow = svg.group(innerG, "", {class: "expandedGraph"});
- this.renderGraph(node, expandedFlow);
- innerG.expandedFlow = expandedFlow;
- removeClass(innerG, "collapsed");
- addClass(innerG, "expanded");
- node.expandedWidth = node.width;
- node.expandedHeight = node.height;
- }
- else {
- $(innerG.expandedFlow).show();
- removeClass(innerG, "collapsed");
- addClass(innerG, "expanded");
- node.width = node.expandedWidth;
- node.height = node.expandedHeight;
- }
-
- this.relayoutFlow(node);
-
- var bounds = this.calculateBounds(this.model.get("data").nodes);
-
- var margin = this.graphMargin;
- bounds.minX = bounds.minX ? bounds.minX - margin : -margin;
- bounds.minY = bounds.minY ? bounds.minY - margin : -margin;
- bounds.maxX = bounds.maxX ? bounds.maxX + margin : margin;
- bounds.maxY = bounds.maxY ? bounds.maxY + margin : margin;
- this.graphBounds = bounds;
- },
-
- collapseFlow: function(node) {
- console.log("Collapsing flow");
- var svg = this.svg;
- var gnode = node.gNode;
- node.expanded = false;
-
- var innerG = gnode.innerG;
- var borderRect = innerG.borderRect;
- var labelG = innerG.labelG;
-
- removeClass(innerG, "expanded");
- addClass(innerG, "collapsed");
-
- node.height = node.collapsedHeight;
- node.width = node.collapsedWidth;
-
- $(innerG.expandedFlow).hide();
- this.relayoutFlow(node);
-
- var bounds = this.calculateBounds(this.model.get("data").nodes);
-
- var margin = this.graphMargin;
- bounds.minX = bounds.minX ? bounds.minX - margin : -margin;
- bounds.minY = bounds.minY ? bounds.minY - margin : -margin;
- bounds.maxX = bounds.maxX ? bounds.maxX + margin : margin;
- bounds.maxY = bounds.maxY ? bounds.maxY + margin : margin;
- this.graphBounds = bounds;
- },
-
- relayoutFlow: function(node) {
- if (node.expanded) {
- this.layoutExpandedFlowNode(node);
- }
-
- var parent = node.parent;
- if (parent) {
- layoutGraph(parent.nodes, parent.edges, 10);
- this.relayoutFlow(parent);
- // Move all points again.
- this.moveNodeEdges(parent.nodes, parent.edges);
- this.animateExpandedFlowNode(node, 250);
- }
- },
-
- moveNodeEdges: function(nodes, edges) {
- var svg = this.svg;
- for (var i = 0; i < nodes.length; ++i) {
- var node = nodes[i];
- var gNode = node.gNode;
-
- $(gNode).animate({"svgTransform": translateStr(node.x, node.y)}, 250);
- }
-
- for (var j = 0; j < edges.length; ++j) {
- var edge = edges[j];
- var startNode = edge.fromNode;
- var endNode = edge.toNode;
- var line = edge.line;
-
- var startPointY = startNode.y + startNode.height/2;
- var endPointY = endNode.y - endNode.height/2;
-
- if (edge.guides) {
- // Create guide array
- var pointArray = new Array();
- pointArray.push([startNode.x, startPointY]);
- for (var i = 0; i < edge.guides.length; ++i ) {
- var edgeGuidePoint = edge.guides[i];
- pointArray.push([edgeGuidePoint.x, edgeGuidePoint.y]);
- }
- pointArray.push([endNode.x, endPointY]);
-
- animatePolylineEdge(svg, edge, pointArray, 250);
- edge.oldpoints = pointArray;
- }
- else {
- $(line).animate({
- svgX1: startNode.x,
- svgY1: startPointY,
- svgX2: endNode.x,
- svgY2: endPointY
- });
- }
- }
- },
-
- calculateBounds: function(nodes) {
- var bounds = {};
- var node = nodes[0];
- bounds.minX = node.x - 10;
- bounds.minY = node.y - 10;
- bounds.maxX = node.x + 10;
- bounds.maxY = node.y + 10;
-
- for (var i = 0; i < nodes.length; ++i) {
- node = nodes[i];
- var centerX = node.width/2;
- var centerY = node.height/2;
-
- var minX = node.x - centerX;
- var minY = node.y - centerY;
- var maxX = node.x + centerX;
- var maxY = node.y + centerY;
-
- bounds.minX = Math.min(bounds.minX, minX);
- bounds.minY = Math.min(bounds.minY, minY);
- bounds.maxX = Math.max(bounds.maxX, maxX);
- bounds.maxY = Math.max(bounds.maxY, maxY);
- }
- bounds.width = bounds.maxX - bounds.minX;
- bounds.height = bounds.maxY - bounds.minY;
-
- return bounds;
- },
-
- drawBoxNode: function(self, node, g) {
- var svg = this.svg;
- var horizontalMargin = 8;
- var verticalMargin = 2;
-
- var nodeG = svg.group(g, "", {class:"node jobnode"});
-
- var innerG = svg.group(nodeG, "", {class:"nodebox"});
- var borderRect = svg.rect(innerG, 0, 0, 10, 10, 3, 3, {class: "border"});
- var jobNameText = svg.text(innerG, horizontalMargin, 16, node.label);
- nodeG.innerG = innerG;
- innerG.borderRect = borderRect;
-
- var labelBBox = jobNameText.getBBox();
- var totalWidth = labelBBox.width + 2*horizontalMargin;
- var totalHeight = labelBBox.height + 2*verticalMargin;
- svg.change(borderRect, {width: totalWidth, height: totalHeight});
- svg.change(jobNameText, {y: (totalHeight + labelBBox.height)/2 - 3});
- svg.change(innerG, {transform: translateStr(-totalWidth/2, -totalHeight/2)});
-
- node.width=totalWidth;
- node.height=totalHeight;
-
- node.gNode = nodeG;
- nodeG.data = node;
- },
-
- drawFlowNode: function(self, node, g) {
- var svg = this.svg;
-
- // Base flow node
- var nodeG = svg.group(g, "", {"class": "node flownode"});
-
- // Create all the elements
- var innerG = svg.group(nodeG, "", {class: "nodebox collapsed"});
- var borderRect = svg.rect(innerG, 0, 0, 10, 10, 3, 3, {class: "flowborder"});
-
- // Create label
- var labelG = svg.group(innerG);
- var iconHeight = 20;
- var iconWidth = 21;
- var textOffset = iconWidth + 4;
- var jobNameText = svg.text(labelG, textOffset, 1, node.label);
- var flowIdText = svg.text(labelG, textOffset, 11, node.flowId, {"font-size": 8})
- var tempLabelG = labelG.getBBox();
- var iconImage = svg.image(
- labelG, 0, -iconHeight/2, iconWidth, iconHeight,
- contextURL + "/images/graph-icon.png", {});
-
- // Assign key values to make searching quicker
- node.gNode=nodeG;
- nodeG.data=node;
-
- // Do this because jquery svg selectors don't work
- nodeG.innerG = innerG;
- innerG.borderRect = borderRect;
- innerG.labelG = labelG;
-
- // Layout everything in the node
- this.layoutFlowNode(self, node);
- },
-
- layoutFlowNode: function(self, node) {
- var svg = this.svg;
- var horizontalMargin = 8;
- var verticalMargin = 2;
-
- var gNode = node.gNode;
- var innerG = gNode.innerG;
- var borderRect = innerG.borderRect;
- var labelG = innerG.labelG;
-
- var labelBBox = labelG.getBBox();
- var totalWidth = labelBBox.width + 2*horizontalMargin;
- var totalHeight = labelBBox.height + 2*verticalMargin;
-
- svg.change(labelG, {transform: translateStr(horizontalMargin, labelBBox.height/2 + verticalMargin)});
- svg.change(innerG, {transform: translateStr(-totalWidth/2, -totalHeight/2)});
- svg.change(borderRect, {width: totalWidth, height: totalHeight});
-
- node.height = totalHeight;
- node.width = totalWidth;
- node.collapsedHeight = totalHeight;
- node.collapsedWidth = totalWidth;
- },
-
- layoutExpandedFlowNode: function(node) {
- var svg = this.svg;
- var topmargin= 30, bottommargin=5;
- var hmargin = 10;
-
- var gNode = node.gNode;
- var innerG = gNode.innerG;
- var borderRect = innerG.borderRect;
- var labelG = innerG.labelG;
- var expandedFlow = innerG.expandedFlow;
-
- var bound = this.calculateBounds(node.nodes);
-
- node.height = bound.height + topmargin + bottommargin;
- node.width = bound.width + hmargin*2;
- svg.change(expandedFlow, {transform: translateStr(-bound.minX + hmargin, -bound.minY + topmargin)});
- //$(innerG).animate({svgTransform: translateStr(-node.width/2, -node.height/2)}, 50);
- //$(borderRect).animate({svgWidth: node.width, svgHeight: node.height}, 50);
- },
-
- animateExpandedFlowNode: function(node, time) {
- var gNode = node.gNode;
- var innerG = gNode.innerG;
- var borderRect = innerG.borderRect;
-
- $(innerG).animate({svgTransform: translateStr(-node.width/2, -node.height/2)}, time);
- $(borderRect).animate({svgWidth: node.width, svgHeight: node.height}, time);
- $(borderRect).animate({svgFill: 'white'}, time);
- },
-
- resetPanZoom: function(duration) {
- var bounds = this.graphBounds;
- var param = {
- x: bounds.minX,
- y: bounds.minY,
- width: (bounds.maxX - bounds.minX),
- height: (bounds.maxY - bounds.minY), duration: duration
- };
-
- this.panZoom(param);
- },
-
- centerNode: function(node) {
- // The magic of affine transformation.
- // Multiply the inverse root matrix with the current matrix to get the node
- // position.
- // Rather do this than to traverse backwards through the scene graph.
- var ctm = node.gNode.getCTM();
- var transform = node.gNode.getTransformToElement();
- var globalCTM = this.mainG.getCTM().inverse();
- var otherTransform = globalCTM.multiply(ctm);
- // Also a beauty of affine transformation. The translate is always the
- // left most column of the matrix.
- var x = otherTransform.e - node.width/2;
- var y = otherTransform.f - node.height/2;
-
- this.panZoom({x: x, y: y, width: node.width, height: node.height});
- },
-
- globalNodePosition: function(gNode) {
- if (node.parent) {
-
- var parentPos = this.globalNodePosition(node.parent);
- return {x: parentPos.x + node.x, y: parentPos.y + node.y};
- }
- else {
- return {x: node.x, y: node.y};
- }
- },
-
- panZoom: function(params) {
- params.maxScale = 2;
- $(this.svgGraph).svgNavigate("transformToBox", params);
- }
+ if (this.rightClick) {
+ var callbacks = this.rightClick;
+ var currentTarget = self.currentTarget;
+ if (callbacks.node && currentTarget.jobid) {
+ callbacks.node(self, this.model, currentTarget.nodeobj);
+ }
+ else if (callbacks.edge &&
+ (currentTarget.nodeName == "polyline" ||
+ currentTarget.nodeName == "line")) {
+ callbacks.edge(self, this.model);
+ }
+ else if (callbacks.graph) {
+ callbacks.graph(self, this.model);
+ }
+ return false;
+ }
+ return true;
+ },
+
+ drawEdge: function(self, edge, g) {
+ var svg = this.svg;
+ var svgns = self.svgns;
+
+ var startNode = edge.fromNode;
+ var endNode = edge.toNode;
+
+ var startPointY = startNode.y + startNode.height/2;
+ var endPointY = endNode.y - endNode.height/2;
+
+ if (edge.guides) {
+ // Create guide array
+ var pointArray = new Array();
+ pointArray.push([startNode.x, startPointY]);
+ for (var i = 0; i < edge.guides.length; ++i ) {
+ var edgeGuidePoint = edge.guides[i];
+ pointArray.push([edgeGuidePoint.x, edgeGuidePoint.y]);
+ }
+ pointArray.push([endNode.x, endPointY]);
+
+ edge.line = svg.polyline(g, pointArray, {class:"edge", fill:"none"});
+ edge.line.data = edge;
+ edge.oldpoints = pointArray;
+ }
+ else {
+ edge.line = svg.line(g, startNode.x, startPointY, endNode.x, endPointY, {class:"edge"});
+ edge.line.data = edge;
+ }
+ },
+
+ drawNode: function(self, node, g) {
+ if (node.type == 'flow') {
+ this.drawFlowNode(self, node, g);
+ }
+ else {
+ this.drawBoxNode(self, node, g);
+ }
+ },
+
+ moveNodes: function(nodes) {
+ var svg = this.svg;
+ for (var i = 0; i < nodes.length; ++i) {
+ var node = nodes[i];
+ var gNode = node.gNode;
+
+ svg.change(gNode, {"transform": translateStr(node.x, node.y)});
+ }
+ },
+
+ expandFlow: function(node) {
+ var svg = this.svg;
+ var gnode = node.gNode;
+ node.expanded = true;
+
+ var innerG = gnode.innerG;
+ var borderRect = innerG.borderRect;
+ var labelG = innerG.labelG;
+
+ var bbox;
+ if (!innerG.expandedFlow) {
+ var topmargin= 30, bottommargin=5;
+ var hmargin = 10;
+
+ var expandedFlow = svg.group(innerG, "", {class: "expandedGraph"});
+ this.renderGraph(node, expandedFlow);
+ innerG.expandedFlow = expandedFlow;
+ removeClass(innerG, "collapsed");
+ addClass(innerG, "expanded");
+ node.expandedWidth = node.width;
+ node.expandedHeight = node.height;
+ }
+ else {
+ $(innerG.expandedFlow).show();
+ removeClass(innerG, "collapsed");
+ addClass(innerG, "expanded");
+ node.width = node.expandedWidth;
+ node.height = node.expandedHeight;
+ }
+
+ this.relayoutFlow(node);
+
+ var bounds = this.calculateBounds(this.model.get("data").nodes);
+
+ var margin = this.graphMargin;
+ bounds.minX = bounds.minX ? bounds.minX - margin : -margin;
+ bounds.minY = bounds.minY ? bounds.minY - margin : -margin;
+ bounds.maxX = bounds.maxX ? bounds.maxX + margin : margin;
+ bounds.maxY = bounds.maxY ? bounds.maxY + margin : margin;
+ this.graphBounds = bounds;
+ },
+
+ collapseFlow: function(node) {
+ console.log("Collapsing flow");
+ var svg = this.svg;
+ var gnode = node.gNode;
+ node.expanded = false;
+
+ var innerG = gnode.innerG;
+ var borderRect = innerG.borderRect;
+ var labelG = innerG.labelG;
+
+ removeClass(innerG, "expanded");
+ addClass(innerG, "collapsed");
+
+ node.height = node.collapsedHeight;
+ node.width = node.collapsedWidth;
+
+ $(innerG.expandedFlow).hide();
+ this.relayoutFlow(node);
+
+ var bounds = this.calculateBounds(this.model.get("data").nodes);
+
+ var margin = this.graphMargin;
+ bounds.minX = bounds.minX ? bounds.minX - margin : -margin;
+ bounds.minY = bounds.minY ? bounds.minY - margin : -margin;
+ bounds.maxX = bounds.maxX ? bounds.maxX + margin : margin;
+ bounds.maxY = bounds.maxY ? bounds.maxY + margin : margin;
+ this.graphBounds = bounds;
+ },
+
+ relayoutFlow: function(node) {
+ if (node.expanded) {
+ this.layoutExpandedFlowNode(node);
+ }
+
+ var parent = node.parent;
+ if (parent) {
+ layoutGraph(parent.nodes, parent.edges, 10);
+ this.relayoutFlow(parent);
+ // Move all points again.
+ this.moveNodeEdges(parent.nodes, parent.edges);
+ this.animateExpandedFlowNode(node, 250);
+ }
+ },
+
+ moveNodeEdges: function(nodes, edges) {
+ var svg = this.svg;
+ for (var i = 0; i < nodes.length; ++i) {
+ var node = nodes[i];
+ var gNode = node.gNode;
+
+ $(gNode).animate({"svgTransform": translateStr(node.x, node.y)}, 250);
+ }
+
+ for (var j = 0; j < edges.length; ++j) {
+ var edge = edges[j];
+ var startNode = edge.fromNode;
+ var endNode = edge.toNode;
+ var line = edge.line;
+
+ var startPointY = startNode.y + startNode.height/2;
+ var endPointY = endNode.y - endNode.height/2;
+
+ if (edge.guides) {
+ // Create guide array
+ var pointArray = new Array();
+ pointArray.push([startNode.x, startPointY]);
+ for (var i = 0; i < edge.guides.length; ++i ) {
+ var edgeGuidePoint = edge.guides[i];
+ pointArray.push([edgeGuidePoint.x, edgeGuidePoint.y]);
+ }
+ pointArray.push([endNode.x, endPointY]);
+
+ animatePolylineEdge(svg, edge, pointArray, 250);
+ edge.oldpoints = pointArray;
+ }
+ else {
+ $(line).animate({
+ svgX1: startNode.x,
+ svgY1: startPointY,
+ svgX2: endNode.x,
+ svgY2: endPointY
+ });
+ }
+ }
+ },
+
+ calculateBounds: function(nodes) {
+ var bounds = {};
+ var node = nodes[0];
+ bounds.minX = node.x - 10;
+ bounds.minY = node.y - 10;
+ bounds.maxX = node.x + 10;
+ bounds.maxY = node.y + 10;
+
+ for (var i = 0; i < nodes.length; ++i) {
+ node = nodes[i];
+ var centerX = node.width/2;
+ var centerY = node.height/2;
+
+ var minX = node.x - centerX;
+ var minY = node.y - centerY;
+ var maxX = node.x + centerX;
+ var maxY = node.y + centerY;
+
+ bounds.minX = Math.min(bounds.minX, minX);
+ bounds.minY = Math.min(bounds.minY, minY);
+ bounds.maxX = Math.max(bounds.maxX, maxX);
+ bounds.maxY = Math.max(bounds.maxY, maxY);
+ }
+ bounds.width = bounds.maxX - bounds.minX;
+ bounds.height = bounds.maxY - bounds.minY;
+
+ return bounds;
+ },
+
+ drawBoxNode: function(self, node, g) {
+ var svg = this.svg;
+ var horizontalMargin = 8;
+ var verticalMargin = 2;
+
+ var nodeG = svg.group(g, "", {class:"node jobnode"});
+
+ var innerG = svg.group(nodeG, "", {class:"nodebox"});
+ var borderRect = svg.rect(innerG, 0, 0, 10, 10, 3, 3, {class: "border"});
+ var jobNameText = svg.text(innerG, horizontalMargin, 16, node.label);
+ nodeG.innerG = innerG;
+ innerG.borderRect = borderRect;
+
+ var labelBBox = jobNameText.getBBox();
+ var totalWidth = labelBBox.width + 2*horizontalMargin;
+ var totalHeight = labelBBox.height + 2*verticalMargin;
+ svg.change(borderRect, {width: totalWidth, height: totalHeight});
+ svg.change(jobNameText, {y: (totalHeight + labelBBox.height)/2 - 3});
+ svg.change(innerG, {transform: translateStr(-totalWidth/2, -totalHeight/2)});
+
+ node.width=totalWidth;
+ node.height=totalHeight;
+
+ node.gNode = nodeG;
+ nodeG.data = node;
+ },
+
+ drawFlowNode: function(self, node, g) {
+ var svg = this.svg;
+
+ // Base flow node
+ var nodeG = svg.group(g, "", {"class": "node flownode"});
+
+ // Create all the elements
+ var innerG = svg.group(nodeG, "", {class: "nodebox collapsed"});
+ var borderRect = svg.rect(innerG, 0, 0, 10, 10, 3, 3, {class: "flowborder"});
+
+ // Create label
+ var labelG = svg.group(innerG);
+ var iconHeight = 20;
+ var iconWidth = 21;
+ var textOffset = iconWidth + 4;
+ var jobNameText = svg.text(labelG, textOffset, 1, node.label);
+ var flowIdText = svg.text(labelG, textOffset, 11, node.flowId, {"font-size": 8})
+ var tempLabelG = labelG.getBBox();
+ var iconImage = svg.image(
+ labelG, 0, -iconHeight/2, iconWidth, iconHeight,
+ contextURL + "/images/graph-icon.png", {});
+
+ // Assign key values to make searching quicker
+ node.gNode=nodeG;
+ nodeG.data=node;
+
+ // Do this because jquery svg selectors don't work
+ nodeG.innerG = innerG;
+ innerG.borderRect = borderRect;
+ innerG.labelG = labelG;
+
+ // Layout everything in the node
+ this.layoutFlowNode(self, node);
+ },
+
+ layoutFlowNode: function(self, node) {
+ var svg = this.svg;
+ var horizontalMargin = 8;
+ var verticalMargin = 2;
+
+ var gNode = node.gNode;
+ var innerG = gNode.innerG;
+ var borderRect = innerG.borderRect;
+ var labelG = innerG.labelG;
+
+ var labelBBox = labelG.getBBox();
+ var totalWidth = labelBBox.width + 2*horizontalMargin;
+ var totalHeight = labelBBox.height + 2*verticalMargin;
+
+ svg.change(labelG, {transform: translateStr(horizontalMargin, labelBBox.height/2 + verticalMargin)});
+ svg.change(innerG, {transform: translateStr(-totalWidth/2, -totalHeight/2)});
+ svg.change(borderRect, {width: totalWidth, height: totalHeight});
+
+ node.height = totalHeight;
+ node.width = totalWidth;
+ node.collapsedHeight = totalHeight;
+ node.collapsedWidth = totalWidth;
+ },
+
+ layoutExpandedFlowNode: function(node) {
+ var svg = this.svg;
+ var topmargin= 30, bottommargin=5;
+ var hmargin = 10;
+
+ var gNode = node.gNode;
+ var innerG = gNode.innerG;
+ var borderRect = innerG.borderRect;
+ var labelG = innerG.labelG;
+ var expandedFlow = innerG.expandedFlow;
+
+ var bound = this.calculateBounds(node.nodes);
+
+ node.height = bound.height + topmargin + bottommargin;
+ node.width = bound.width + hmargin*2;
+ svg.change(expandedFlow, {transform: translateStr(-bound.minX + hmargin, -bound.minY + topmargin)});
+ //$(innerG).animate({svgTransform: translateStr(-node.width/2, -node.height/2)}, 50);
+ //$(borderRect).animate({svgWidth: node.width, svgHeight: node.height}, 50);
+ },
+
+ animateExpandedFlowNode: function(node, time) {
+ var gNode = node.gNode;
+ var innerG = gNode.innerG;
+ var borderRect = innerG.borderRect;
+
+ $(innerG).animate({svgTransform: translateStr(-node.width/2, -node.height/2)}, time);
+ $(borderRect).animate({svgWidth: node.width, svgHeight: node.height}, time);
+ $(borderRect).animate({svgFill: 'white'}, time);
+ },
+
+ resetPanZoom: function(duration) {
+ var bounds = this.graphBounds;
+ var param = {
+ x: bounds.minX,
+ y: bounds.minY,
+ width: (bounds.maxX - bounds.minX),
+ height: (bounds.maxY - bounds.minY), duration: duration
+ };
+
+ this.panZoom(param);
+ },
+
+ centerNode: function(node) {
+ // The magic of affine transformation.
+ // Multiply the inverse root matrix with the current matrix to get the node
+ // position.
+ // Rather do this than to traverse backwards through the scene graph.
+ var ctm = node.gNode.getCTM();
+ var transform = node.gNode.getTransformToElement();
+ var globalCTM = this.mainG.getCTM().inverse();
+ var otherTransform = globalCTM.multiply(ctm);
+ // Also a beauty of affine transformation. The translate is always the
+ // left most column of the matrix.
+ var x = otherTransform.e - node.width/2;
+ var y = otherTransform.f - node.height/2;
+
+ this.panZoom({x: x, y: y, width: node.width, height: node.height});
+ },
+
+ globalNodePosition: function(gNode) {
+ if (node.parent) {
+
+ var parentPos = this.globalNodePosition(node.parent);
+ return {x: parentPos.x + node.x, y: parentPos.y + node.y};
+ }
+ else {
+ return {x: node.x, y: node.y};
+ }
+ },
+
+ panZoom: function(params) {
+ params.maxScale = 2;
+ $(this.svgGraph).svgNavigate("transformToBox", params);
+ }
});
src/web/js/azkaban/view/table-sort.js 248(+124 -124)
diff --git a/src/web/js/azkaban/view/table-sort.js b/src/web/js/azkaban/view/table-sort.js
index 40f05e8..968b7af 100644
--- a/src/web/js/azkaban/view/table-sort.js
+++ b/src/web/js/azkaban/view/table-sort.js
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -18,137 +18,137 @@ $.namespace('azkaban');
azkaban.TableSorter = Backbone.View.extend({
events: {
- "click .sortable": "handleClickSort"
+ "click .sortable": "handleClickSort"
},
initialize: function(settings) {
- $(this.el).addClass("sortableTable");
-
- var thead = $(this.el).children("thead");
- var th = $(thead).find("th");
-
- $(th).addClass("sortable");
- $("th.ignoresort").removeClass("sortable");
- var sortDiv = document.createElement("div");
-
- $(sortDiv).addClass("sortIcon");
-
- $(th).append(sortDiv);
-
- var tbody = $(this.el).children("tbody");
- var rows = $(tbody).children("tr");
-
- var row;
- for (var i = 0; i < rows.length; ++i ) {
- var nextRow = rows[i];
- if (row && $(nextRow).hasClass("childrow")) {
- if (!row.childRows) {
- row.childRows = new Array();
- }
- row.childRows.push(nextRow);
- }
- else {
- row = nextRow;
- }
- }
-
- if (settings.initialSort) {
- this.toggleSort(settings.initialSort);
- }
+ $(this.el).addClass("sortableTable");
+
+ var thead = $(this.el).children("thead");
+ var th = $(thead).find("th");
+
+ $(th).addClass("sortable");
+ $("th.ignoresort").removeClass("sortable");
+ var sortDiv = document.createElement("div");
+
+ $(sortDiv).addClass("sortIcon");
+
+ $(th).append(sortDiv);
+
+ var tbody = $(this.el).children("tbody");
+ var rows = $(tbody).children("tr");
+
+ var row;
+ for (var i = 0; i < rows.length; ++i ) {
+ var nextRow = rows[i];
+ if (row && $(nextRow).hasClass("childrow")) {
+ if (!row.childRows) {
+ row.childRows = new Array();
+ }
+ row.childRows.push(nextRow);
+ }
+ else {
+ row = nextRow;
+ }
+ }
+
+ if (settings.initialSort) {
+ this.toggleSort(settings.initialSort);
+ }
},
handleClickSort: function(evt) {
- this.toggleSort(evt.currentTarget);
+ this.toggleSort(evt.currentTarget);
},
-
- toggleSort: function(th) {
- console.log("sorting by index " + $(th).index());
- if ($(th).hasClass("asc")) {
- $(th).removeClass("asc");
- $(th).addClass("desc");
- // Sort to descending
-
- this.sort($(th).index(), true);
- }
- else if ($(th).hasClass("desc")) {
- $(th).removeClass("desc");
- $(th).addClass("asc");
-
- this.sort($(th).index(), false);
- }
- else {
- $(th).parent().children(".sortable").removeClass("asc").removeClass("desc");
- $(th).addClass("asc");
-
- this.sort($(th).index(), false);
- }
+
+ toggleSort: function(th) {
+ console.log("sorting by index " + $(th).index());
+ if ($(th).hasClass("asc")) {
+ $(th).removeClass("asc");
+ $(th).addClass("desc");
+ // Sort to descending
+
+ this.sort($(th).index(), true);
+ }
+ else if ($(th).hasClass("desc")) {
+ $(th).removeClass("desc");
+ $(th).addClass("asc");
+
+ this.sort($(th).index(), false);
+ }
+ else {
+ $(th).parent().children(".sortable").removeClass("asc").removeClass("desc");
+ $(th).addClass("asc");
+
+ this.sort($(th).index(), false);
+ }
},
-
- sort: function(index, desc) {
- var tbody = $(this.el).children("tbody");
- var rows = $(tbody).children("tr");
-
- var tdToSort = new Array();
- for (var i = 0; i < rows.length; ++i) {
- var row = rows[i];
- if (!$(row).hasClass("childrow")) {
- var td = row.children[index];
- tdToSort.push(td);
- }
- }
-
- if (desc) {
- tdToSort.sort(function(a,b) {
- var texta = $(a).text().trim().toLowerCase();
- var textb = $(b).text().trim().toLowerCase();
-
- if (texta < textb) {
- return 1;
- }
- else if (texta > textb) {
- return -1;
- }
- else {
- return 0;
- }
- });
- }
- else {
- tdToSort.sort(function(a,b) {
- var texta = $(a).text().trim().toLowerCase();
- var textb = $(b).text().trim().toLowerCase();
-
- if (texta < textb) {
- return -1;
- }
- else if (texta > textb) {
- return 1;
- }
- else {
- return 0;
- }
- });
- }
-
- var sortedTR = new Array();
- for (var i = 0; i < tdToSort.length; ++i) {
- var tr = $(tdToSort[i]).parent();
- sortedTR.push(tr);
-
- var childRows = tr[0].childRows;
- if (childRows) {
- for(var j=0; j < childRows.length; ++j) {
- sortedTR.push(childRows[j]);
- }
- }
- }
-
- for (var i = 0; i < sortedTR.length; ++i) {
- $(tbody).append(sortedTR[i]);
- }
+
+ sort: function(index, desc) {
+ var tbody = $(this.el).children("tbody");
+ var rows = $(tbody).children("tr");
+
+ var tdToSort = new Array();
+ for (var i = 0; i < rows.length; ++i) {
+ var row = rows[i];
+ if (!$(row).hasClass("childrow")) {
+ var td = row.children[index];
+ tdToSort.push(td);
+ }
+ }
+
+ if (desc) {
+ tdToSort.sort(function(a,b) {
+ var texta = $(a).text().trim().toLowerCase();
+ var textb = $(b).text().trim().toLowerCase();
+
+ if (texta < textb) {
+ return 1;
+ }
+ else if (texta > textb) {
+ return -1;
+ }
+ else {
+ return 0;
+ }
+ });
+ }
+ else {
+ tdToSort.sort(function(a,b) {
+ var texta = $(a).text().trim().toLowerCase();
+ var textb = $(b).text().trim().toLowerCase();
+
+ if (texta < textb) {
+ return -1;
+ }
+ else if (texta > textb) {
+ return 1;
+ }
+ else {
+ return 0;
+ }
+ });
+ }
+
+ var sortedTR = new Array();
+ for (var i = 0; i < tdToSort.length; ++i) {
+ var tr = $(tdToSort[i]).parent();
+ sortedTR.push(tr);
+
+ var childRows = tr[0].childRows;
+ if (childRows) {
+ for(var j=0; j < childRows.length; ++j) {
+ sortedTR.push(childRows[j]);
+ }
+ }
+ }
+
+ for (var i = 0; i < sortedTR.length; ++i) {
+ $(tbody).append(sortedTR[i]);
+ }
},
render: function() {
- console.log("render sorted table");
+ console.log("render sorted table");
}
});
src/web/js/azkaban/view/time-graph.js 32(+16 -16)
diff --git a/src/web/js/azkaban/view/time-graph.js b/src/web/js/azkaban/view/time-graph.js
index 0f53fa4..a74a49c 100644
--- a/src/web/js/azkaban/view/time-graph.js
+++ b/src/web/js/azkaban/view/time-graph.js
@@ -17,19 +17,19 @@
$.namespace('azkaban');
azkaban.TimeGraphView = Backbone.View.extend({
- events: {
- },
+ events: {
+ },
- initialize: function(settings) {
- this.model.bind('render', this.render, this);
- this.model.bind('change:page', this.render, this);
+ initialize: function(settings) {
+ this.model.bind('render', this.render, this);
+ this.model.bind('change:page', this.render, this);
this.modelField = settings.modelField;
this.graphContainer = settings.el;
this.render();
- },
+ },
- render: function(self) {
- var series = this.model.get(this.modelField);
+ render: function(self) {
+ var series = this.model.get(this.modelField);
if (series == null) {
return;
}
@@ -40,7 +40,7 @@ azkaban.TimeGraphView = Backbone.View.extend({
// Map of y value to index for faster look-up in the lineColorsCallback to
// get the status for each point.
var indexMap = {};
- for (var i = 0; i < series.length; ++i) {
+ for (var i = 0; i < series.length; ++i) {
if (series[i].startTime == null || series[i].endTime == null) {
console.log("Each element in series must have startTime and endTime");
return;
@@ -67,13 +67,13 @@ azkaban.TimeGraphView = Backbone.View.extend({
indexMap[endTime.toString()] = i;
}
- if (data.length == 0) {
- $(this.graphContainer).hide();
- return;
- }
+ if (data.length == 0) {
+ $(this.graphContainer).hide();
+ return;
+ }
- var graphDiv = document.createElement('div');
- $(this.graphContainer).html(graphDiv);
+ var graphDiv = document.createElement('div');
+ $(this.graphContainer).html(graphDiv);
var lineColorsCallback = function(row, sidx, type) {
if (type != 'point') {
@@ -127,5 +127,5 @@ azkaban.TimeGraphView = Backbone.View.extend({
yLabelFormat: yLabelFormatCallback,
hoverCallback: hoverCallback
});
- }
+ }
});
src/web/js/azkaban/view/triggers.js 362(+181 -181)
diff --git a/src/web/js/azkaban/view/triggers.js b/src/web/js/azkaban/view/triggers.js
index 83771e9..a9c15a8 100644
--- a/src/web/js/azkaban/view/triggers.js
+++ b/src/web/js/azkaban/view/triggers.js
@@ -1,12 +1,12 @@
/*
* Copyright 2012 LinkedIn Corp.
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -17,10 +17,10 @@
$.namespace('azkaban');
function expireTrigger(triggerId) {
- var triggerURL = contextURL + "/triggers"
- var redirectURL = contextURL + "/triggers"
- var requestData = {"ajax": "expireTrigger", "triggerId": triggerId};
- var successHandler = function(data) {
+ var triggerURL = contextURL + "/triggers"
+ var redirectURL = contextURL + "/triggers"
+ var requestData = {"ajax": "expireTrigger", "triggerId": triggerId};
+ var successHandler = function(data) {
if (data.error) {
//alert(data.error)
$('#errorMsg').text(data.error);
@@ -30,14 +30,14 @@ function expireTrigger(triggerId) {
window.location = redirectURL;
}
};
- $.post(triggerURL, requestData, successHandler, "json");
+ $.post(triggerURL, requestData, successHandler, "json");
}
function removeSched(scheduleId) {
- var scheduleURL = contextURL + "/schedule"
- var redirectURL = contextURL + "/schedule"
- var requestData = {"action": "removeSched", "scheduleId": scheduleId};
- var successHandler = function(data) {
+ var scheduleURL = contextURL + "/schedule"
+ var redirectURL = contextURL + "/schedule"
+ var requestData = {"action": "removeSched", "scheduleId": scheduleId};
+ var successHandler = function(data) {
if (data.error) {
//alert(data.error)
$('#errorMsg').text(data.error);
@@ -47,14 +47,14 @@ function removeSched(scheduleId) {
window.location = redirectURL;
}
};
- $.post(scheduleURL, requestData, successHandler, "json");
+ $.post(scheduleURL, requestData, successHandler, "json");
}
function removeSla(scheduleId) {
- var scheduleURL = contextURL + "/schedule"
- var redirectURL = contextURL + "/schedule"
+ var scheduleURL = contextURL + "/schedule"
+ var redirectURL = contextURL + "/schedule"
var requestData = {"action": "removeSla", "scheduleId": scheduleId};
- var successHandler = function(data) {
+ var successHandler = function(data) {
if (data.error) {
//alert(data.error)
$('#errorMsg').text(data.error)
@@ -64,52 +64,52 @@ function removeSla(scheduleId) {
window.location = redirectURL
}
};
- $.post(scheduleURL, requestData, successHandler, "json");
+ $.post(scheduleURL, requestData, successHandler, "json");
}
azkaban.ChangeSlaView = Backbone.View.extend({
- events: {
- "click" : "closeEditingTarget",
- "click #set-sla-btn": "handleSetSla",
- "click #remove-sla-btn": "handleRemoveSla",
- "click #sla-cancel-btn": "handleSlaCancel",
- "click .modal-close": "handleSlaCancel",
- "click #addRow": "handleAddRow"
- },
-
+ events: {
+ "click" : "closeEditingTarget",
+ "click #set-sla-btn": "handleSetSla",
+ "click #remove-sla-btn": "handleRemoveSla",
+ "click #sla-cancel-btn": "handleSlaCancel",
+ "click .modal-close": "handleSlaCancel",
+ "click #addRow": "handleAddRow"
+ },
+
initialize: function(setting) {
- },
-
+ },
+
handleSlaCancel: function(evt) {
- console.log("Clicked cancel button");
- var scheduleURL = contextURL + "/schedule";
-
- $('#slaModalBackground').hide();
- $('#sla-options').hide();
-
- var tFlowRules = document.getElementById("flowRulesTbl").tBodies[0];
- var rows = tFlowRules.rows;
- var rowLength = rows.length
- for (var i = 0; i < rowLength-1; i++) {
- tFlowRules.deleteRow(0);
- }
- },
-
- initFromSched: function(scheduleId, flowName) {
- this.scheduleId = scheduleId;
-
- var scheduleURL = contextURL + "/schedule"
- this.scheduleURL = scheduleURL;
- var indexToName = {};
- var nameToIndex = {};
- var indexToText = {};
- this.indexToName = indexToName;
- this.nameToIndex = nameToIndex;
- this.indexToText = indexToText;
- var ruleBoxOptions = ["SUCCESS", "FINISH"];
- this.ruleBoxOptions = ruleBoxOptions;
-
- var fetchScheduleData = {"scheduleId": this.scheduleId, "ajax": "slaInfo"};
+ console.log("Clicked cancel button");
+ var scheduleURL = contextURL + "/schedule";
+
+ $('#slaModalBackground').hide();
+ $('#sla-options').hide();
+
+ var tFlowRules = document.getElementById("flowRulesTbl").tBodies[0];
+ var rows = tFlowRules.rows;
+ var rowLength = rows.length
+ for (var i = 0; i < rowLength-1; i++) {
+ tFlowRules.deleteRow(0);
+ }
+ },
+
+ initFromSched: function(scheduleId, flowName) {
+ this.scheduleId = scheduleId;
+
+ var scheduleURL = contextURL + "/schedule"
+ this.scheduleURL = scheduleURL;
+ var indexToName = {};
+ var nameToIndex = {};
+ var indexToText = {};
+ this.indexToName = indexToName;
+ this.nameToIndex = nameToIndex;
+ this.indexToText = indexToText;
+ var ruleBoxOptions = ["SUCCESS", "FINISH"];
+ this.ruleBoxOptions = ruleBoxOptions;
+
+ var fetchScheduleData = {"scheduleId": this.scheduleId, "ajax": "slaInfo"};
var successHandler = function(data) {
if (data.error) {
alert(data.error);
@@ -118,9 +118,9 @@ azkaban.ChangeSlaView = Backbone.View.extend({
if (data.slaEmails) {
$('#slaEmails').val(data.slaEmails.join());
}
-
+
var allJobNames = data.allJobNames;
-
+
indexToName[0] = "";
nameToIndex[flowName] = 0;
indexToText[0] = "flow " + flowName;
@@ -129,7 +129,7 @@ azkaban.ChangeSlaView = Backbone.View.extend({
nameToIndex[allJobNames[i-1]] = i;
indexToText[i] = "job " + allJobNames[i-1];
}
-
+
// populate with existing settings
if (data.settings) {
$('.durationpick').timepicker({hourMax: 99});
@@ -139,7 +139,7 @@ azkaban.ChangeSlaView = Backbone.View.extend({
var tFlowRules = document.getElementById("flowRulesTbl").tBodies[0];
for (var setting in data.settings) {
var rFlowRule = tFlowRules.insertRow(0);
-
+
var cId = rFlowRule.insertCell(-1);
var idSelect = document.createElement("select");
for (var i in indexToName) {
@@ -147,9 +147,9 @@ azkaban.ChangeSlaView = Backbone.View.extend({
if (data.settings[setting].id == indexToName[i]) {
idSelect.options[i].selected = true;
}
- }
+ }
cId.appendChild(idSelect);
-
+
var cRule = rFlowRule.insertCell(-1);
var ruleSelect = document.createElement("select");
for (var i in ruleBoxOptions) {
@@ -159,7 +159,7 @@ azkaban.ChangeSlaView = Backbone.View.extend({
}
}
cRule.appendChild(ruleSelect);
-
+
var cDuration = rFlowRule.insertCell(-1);
var duration = document.createElement("input");
duration.type = "text";
@@ -181,7 +181,7 @@ azkaban.ChangeSlaView = Backbone.View.extend({
}
}
cEmail.appendChild(emailCheck);
-
+
var cKill = rFlowRule.insertCell(-1);
var killCheck = document.createElement("input");
killCheck.type = "checkbox";
@@ -195,19 +195,19 @@ azkaban.ChangeSlaView = Backbone.View.extend({
}
$('.durationpick').timepicker({hourMax: 99});
};
-
- $.get(this.scheduleURL, fetchScheduleData, successHandler, "json");
- $('#slaModalBackground').show();
- $('#sla-options').show();
-
+
+ $.get(this.scheduleURL, fetchScheduleData, successHandler, "json");
+ $('#slaModalBackground').show();
+ $('#sla-options').show();
+
//this.schedFlowOptions = sched.flowOptions
- console.log("Loaded schedule info. Ready to set SLA.");
- },
-
+ console.log("Loaded schedule info. Ready to set SLA.");
+ },
+
handleRemoveSla: function(evt) {
- console.log("Clicked remove sla button");
- var scheduleURL = this.scheduleURL;
- var redirectURL = this.scheduleURL;
+ console.log("Clicked remove sla button");
+ var scheduleURL = this.scheduleURL;
+ var redirectURL = this.scheduleURL;
var requestData = {"action": "removeSla", "scheduleId": this.scheduleId};
var successHandler = function(data) {
if (data.error) {
@@ -217,32 +217,32 @@ azkaban.ChangeSlaView = Backbone.View.extend({
window.location = redirectURL
}
};
- $.post(scheduleURL, requestData, successHandler, "json");
- },
-
+ $.post(scheduleURL, requestData, successHandler, "json");
+ },
+
handleSetSla: function(evt) {
- var slaEmails = $('#slaEmails').val();
- var settings = {};
-
- var tFlowRules = document.getElementById("flowRulesTbl").tBodies[0];
- for (var row = 0; row < tFlowRules.rows.length - 1; row++) {
- var rFlowRule = tFlowRules.rows[row];
- var id = rFlowRule.cells[0].firstChild.value;
- var rule = rFlowRule.cells[1].firstChild.value;
- var duration = rFlowRule.cells[2].firstChild.value;
- var email = rFlowRule.cells[3].firstChild.checked;
- var kill = rFlowRule.cells[4].firstChild.checked;
- settings[row] = id + "," + rule + "," + duration + "," + email + "," + kill;
- }
-
- var slaData = {
- scheduleId: this.scheduleId,
- ajax: "setSla",
- slaEmails: slaEmails,
- settings: settings
- };
-
- var scheduleURL = this.scheduleURL;
+ var slaEmails = $('#slaEmails').val();
+ var settings = {};
+
+ var tFlowRules = document.getElementById("flowRulesTbl").tBodies[0];
+ for (var row = 0; row < tFlowRules.rows.length - 1; row++) {
+ var rFlowRule = tFlowRules.rows[row];
+ var id = rFlowRule.cells[0].firstChild.value;
+ var rule = rFlowRule.cells[1].firstChild.value;
+ var duration = rFlowRule.cells[2].firstChild.value;
+ var email = rFlowRule.cells[3].firstChild.checked;
+ var kill = rFlowRule.cells[4].firstChild.checked;
+ settings[row] = id + "," + rule + "," + duration + "," + email + "," + kill;
+ }
+
+ var slaData = {
+ scheduleId: this.scheduleId,
+ ajax: "setSla",
+ slaEmails: slaEmails,
+ settings: settings
+ };
+
+ var scheduleURL = this.scheduleURL;
var successHandler = function(data) {
if (data.error) {
alert(data.error);
@@ -252,96 +252,96 @@ azkaban.ChangeSlaView = Backbone.View.extend({
window.location = scheduleURL;
}
};
-
- $.post(scheduleURL, slaData, successHandler, "json");
- },
-
+
+ $.post(scheduleURL, slaData, successHandler, "json");
+ },
+
handleAddRow: function(evt) {
- var indexToName = this.indexToName;
- var nameToIndex = this.nameToIndex;
- var indexToText = this.indexToText;
- var ruleBoxOptions = this.ruleBoxOptions;
-
- var tFlowRules = document.getElementById("flowRulesTbl").tBodies[0];
- var rFlowRule = tFlowRules.insertRow(tFlowRules.rows.length-1);
-
- var cId = rFlowRule.insertCell(-1);
- var idSelect = document.createElement("select");
- for (var i in indexToName) {
- idSelect.options[i] = new Option(indexToText[i], indexToName[i]);
- }
-
- cId.appendChild(idSelect);
-
- var cRule = rFlowRule.insertCell(-1);
- var ruleSelect = document.createElement("select");
- for (var i in ruleBoxOptions) {
- ruleSelect.options[i] = new Option(ruleBoxOptions[i], ruleBoxOptions[i]);
- }
- cRule.appendChild(ruleSelect);
-
- var cDuration = rFlowRule.insertCell(-1);
- var duration = document.createElement("input");
- duration.type = "text";
- duration.setAttribute("class", "durationpick");
- cDuration.appendChild(duration);
-
- var cEmail = rFlowRule.insertCell(-1);
- var emailCheck = document.createElement("input");
- emailCheck.type = "checkbox";
- cEmail.appendChild(emailCheck);
-
- var cKill = rFlowRule.insertCell(-1);
- var killCheck = document.createElement("input");
- killCheck.type = "checkbox";
- cKill.appendChild(killCheck);
-
- $('.durationpick').timepicker({hourMax: 99});
- return rFlowRule;
- },
-
- handleEditColumn: function(evt) {
- var curTarget = evt.currentTarget;
-
- if (this.editingTarget != curTarget) {
- this.closeEditingTarget();
-
- var text = $(curTarget).children(".spanValue").text();
- $(curTarget).empty();
-
- var input = document.createElement("input");
- $(input).attr("type", "text");
- $(input).css("width", "100%");
- $(input).val(text);
- $(curTarget).addClass("editing");
- $(curTarget).append(input);
- $(input).focus();
- this.editingTarget = curTarget;
- }
- },
-
- handleRemoveColumn: function(evt) {
- var curTarget = evt.currentTarget;
- // Should be the table
- var row = curTarget.parentElement.parentElement;
- $(row).remove();
- },
-
- closeEditingTarget: function(evt) {
- }
+ var indexToName = this.indexToName;
+ var nameToIndex = this.nameToIndex;
+ var indexToText = this.indexToText;
+ var ruleBoxOptions = this.ruleBoxOptions;
+
+ var tFlowRules = document.getElementById("flowRulesTbl").tBodies[0];
+ var rFlowRule = tFlowRules.insertRow(tFlowRules.rows.length-1);
+
+ var cId = rFlowRule.insertCell(-1);
+ var idSelect = document.createElement("select");
+ for (var i in indexToName) {
+ idSelect.options[i] = new Option(indexToText[i], indexToName[i]);
+ }
+
+ cId.appendChild(idSelect);
+
+ var cRule = rFlowRule.insertCell(-1);
+ var ruleSelect = document.createElement("select");
+ for (var i in ruleBoxOptions) {
+ ruleSelect.options[i] = new Option(ruleBoxOptions[i], ruleBoxOptions[i]);
+ }
+ cRule.appendChild(ruleSelect);
+
+ var cDuration = rFlowRule.insertCell(-1);
+ var duration = document.createElement("input");
+ duration.type = "text";
+ duration.setAttribute("class", "durationpick");
+ cDuration.appendChild(duration);
+
+ var cEmail = rFlowRule.insertCell(-1);
+ var emailCheck = document.createElement("input");
+ emailCheck.type = "checkbox";
+ cEmail.appendChild(emailCheck);
+
+ var cKill = rFlowRule.insertCell(-1);
+ var killCheck = document.createElement("input");
+ killCheck.type = "checkbox";
+ cKill.appendChild(killCheck);
+
+ $('.durationpick').timepicker({hourMax: 99});
+ return rFlowRule;
+ },
+
+ handleEditColumn: function(evt) {
+ var curTarget = evt.currentTarget;
+
+ if (this.editingTarget != curTarget) {
+ this.closeEditingTarget();
+
+ var text = $(curTarget).children(".spanValue").text();
+ $(curTarget).empty();
+
+ var input = document.createElement("input");
+ $(input).attr("type", "text");
+ $(input).css("width", "100%");
+ $(input).val(text);
+ $(curTarget).addClass("editing");
+ $(curTarget).append(input);
+ $(input).focus();
+ this.editingTarget = curTarget;
+ }
+ },
+
+ handleRemoveColumn: function(evt) {
+ var curTarget = evt.currentTarget;
+ // Should be the table
+ var row = curTarget.parentElement.parentElement;
+ $(row).remove();
+ },
+
+ closeEditingTarget: function(evt) {
+ }
});
var slaView;
var tableSorterView;
$(function() {
- var selected;
+ var selected;
- slaView = new azkaban.ChangeSlaView({el:$('#sla-options')});
- tableSorterView = new azkaban.TableSorter({el:$('#scheduledFlowsTbl')});
+ slaView = new azkaban.ChangeSlaView({el:$('#sla-options')});
+ tableSorterView = new azkaban.TableSorter({el:$('#scheduledFlowsTbl')});
/*
var requestURL = contextURL + "/manager";
- // Set up the Flow options view. Create a new one every time :p
+ // Set up the Flow options view. Create a new one every time :p
$('#addSlaBtn').click( function() {
slaView.show();
});
unit/java/azkaban/Scrubber.java 20(+10 -10)
diff --git a/unit/java/azkaban/Scrubber.java b/unit/java/azkaban/Scrubber.java
index 17c7d65..077bebf 100644
--- a/unit/java/azkaban/Scrubber.java
+++ b/unit/java/azkaban/Scrubber.java
@@ -7,14 +7,14 @@ import org.apache.log4j.Logger;
import azkaban.utils.DirectoryFlowLoader;
public class Scrubber {
- private static Logger logger = Logger.getLogger(Scrubber.class);
-
- public static void main(String[] args) {
- DirectoryFlowLoader loader = new DirectoryFlowLoader(logger);
-
- File baseDir = new File(args[0]);
- loader.loadProjectFlow(baseDir);
-
- loader.getFlowMap();
- }
+ private static Logger logger = Logger.getLogger(Scrubber.class);
+
+ public static void main(String[] args) {
+ DirectoryFlowLoader loader = new DirectoryFlowLoader(logger);
+
+ File baseDir = new File(args[0]);
+ loader.loadProjectFlow(baseDir);
+
+ loader.getFlowMap();
+ }
}
\ No newline at end of file
diff --git a/unit/java/azkaban/test/database/AzkabanDatabaseSetupTest.java b/unit/java/azkaban/test/database/AzkabanDatabaseSetupTest.java
index 0b2bd48..0774ee7 100644
--- a/unit/java/azkaban/test/database/AzkabanDatabaseSetupTest.java
+++ b/unit/java/azkaban/test/database/AzkabanDatabaseSetupTest.java
@@ -18,111 +18,113 @@ import azkaban.database.DataSourceUtils;
import azkaban.utils.Props;
public class AzkabanDatabaseSetupTest {
- @BeforeClass
- public static void setupDB() throws IOException, SQLException {
- File dbDir = new File("h2dbtest");
- if (dbDir.exists()) {
- FileUtils.deleteDirectory(dbDir);
- }
-
- dbDir.mkdir();
-
- clearUnitTestDB();
- }
-
- @AfterClass
- public static void teardownDB() {
- }
-
- @Test
- public void testH2Query() throws Exception {
- Props h2Props = getH2Props();
- AzkabanDatabaseSetup setup = new AzkabanDatabaseSetup(h2Props);
-
- // First time will create the tables
- setup.loadTableInfo();
- setup.printUpgradePlan();
- setup.updateDatabase(true, true);
- Assert.assertTrue(setup.needsUpdating());
-
- // Second time will update some tables. This is only for testing purpose and obviously we
- // wouldn't set things up this way.
- setup.loadTableInfo();
- setup.printUpgradePlan();
- setup.updateDatabase(true, true);
- Assert.assertTrue(setup.needsUpdating());
-
- // Nothing to be done
- setup.loadTableInfo();
- setup.printUpgradePlan();
- Assert.assertFalse(setup.needsUpdating());
- }
-
- @Test
- public void testMySQLQuery() throws Exception {
- Props mysqlProps = getMySQLProps();
- AzkabanDatabaseSetup setup = new AzkabanDatabaseSetup(mysqlProps);
-
- // First time will create the tables
- setup.loadTableInfo();
- setup.printUpgradePlan();
- setup.updateDatabase(true, true);
- Assert.assertTrue(setup.needsUpdating());
-
- // Second time will update some tables. This is only for testing purpose and obviously we
- // wouldn't set things up this way.
- setup.loadTableInfo();
- setup.printUpgradePlan();
- setup.updateDatabase(true, true);
- Assert.assertTrue(setup.needsUpdating());
-
- // Nothing to be done
- setup.loadTableInfo();
- setup.printUpgradePlan();
- Assert.assertFalse(setup.needsUpdating());
- }
-
- private static Props getH2Props() {
- Props props = new Props();
- props.put("database.type", "h2");
- props.put("h2.path", "h2dbtest/h2db");
- props.put("database.sql.scripts.dir", "unit/sql");
-
- return props;
- }
-
- private static Props getMySQLProps() {
- Props props = new Props();
-
- props.put("database.type", "mysql");
- props.put("mysql.port", "3306");
- props.put("mysql.host", "localhost");
- props.put("mysql.database", "azkabanunittest");
- props.put("mysql.user", "root");
- props.put("database.sql.scripts.dir", "unit/sql");
- props.put("mysql.password", "");
- props.put("mysql.numconnections", 10);
-
- return props;
- }
-
- private static void clearUnitTestDB() throws SQLException {
- Props props = new Props();
-
- props.put("database.type", "mysql");
- props.put("mysql.host", "localhost");
- props.put("mysql.port", "3306");
- props.put("mysql.database", "");
- props.put("mysql.user", "root");
- props.put("mysql.password", "");
- props.put("mysql.numconnections", 10);
-
- DataSource datasource = DataSourceUtils.getDataSource(props);
- QueryRunner runner = new QueryRunner(datasource);
- try {
- runner.update("drop database azkabanunittest");
- } catch (SQLException e) {
- }
- runner.update("create database azkabanunittest");
- }
+ @BeforeClass
+ public static void setupDB() throws IOException, SQLException {
+ File dbDir = new File("h2dbtest");
+ if (dbDir.exists()) {
+ FileUtils.deleteDirectory(dbDir);
+ }
+
+ dbDir.mkdir();
+
+ clearUnitTestDB();
+ }
+
+ @AfterClass
+ public static void teardownDB() {
+ }
+
+ @Test
+ public void testH2Query() throws Exception {
+ Props h2Props = getH2Props();
+ AzkabanDatabaseSetup setup = new AzkabanDatabaseSetup(h2Props);
+
+ // First time will create the tables
+ setup.loadTableInfo();
+ setup.printUpgradePlan();
+ setup.updateDatabase(true, true);
+ Assert.assertTrue(setup.needsUpdating());
+
+ // Second time will update some tables. This is only for testing purpose and
+ // obviously we
+ // wouldn't set things up this way.
+ setup.loadTableInfo();
+ setup.printUpgradePlan();
+ setup.updateDatabase(true, true);
+ Assert.assertTrue(setup.needsUpdating());
+
+ // Nothing to be done
+ setup.loadTableInfo();
+ setup.printUpgradePlan();
+ Assert.assertFalse(setup.needsUpdating());
+ }
+
+ @Test
+ public void testMySQLQuery() throws Exception {
+ Props mysqlProps = getMySQLProps();
+ AzkabanDatabaseSetup setup = new AzkabanDatabaseSetup(mysqlProps);
+
+ // First time will create the tables
+ setup.loadTableInfo();
+ setup.printUpgradePlan();
+ setup.updateDatabase(true, true);
+ Assert.assertTrue(setup.needsUpdating());
+
+ // Second time will update some tables. This is only for testing purpose and
+ // obviously we
+ // wouldn't set things up this way.
+ setup.loadTableInfo();
+ setup.printUpgradePlan();
+ setup.updateDatabase(true, true);
+ Assert.assertTrue(setup.needsUpdating());
+
+ // Nothing to be done
+ setup.loadTableInfo();
+ setup.printUpgradePlan();
+ Assert.assertFalse(setup.needsUpdating());
+ }
+
+ private static Props getH2Props() {
+ Props props = new Props();
+ props.put("database.type", "h2");
+ props.put("h2.path", "h2dbtest/h2db");
+ props.put("database.sql.scripts.dir", "unit/sql");
+
+ return props;
+ }
+
+ private static Props getMySQLProps() {
+ Props props = new Props();
+
+ props.put("database.type", "mysql");
+ props.put("mysql.port", "3306");
+ props.put("mysql.host", "localhost");
+ props.put("mysql.database", "azkabanunittest");
+ props.put("mysql.user", "root");
+ props.put("database.sql.scripts.dir", "unit/sql");
+ props.put("mysql.password", "");
+ props.put("mysql.numconnections", 10);
+
+ return props;
+ }
+
+ private static void clearUnitTestDB() throws SQLException {
+ Props props = new Props();
+
+ props.put("database.type", "mysql");
+ props.put("mysql.host", "localhost");
+ props.put("mysql.port", "3306");
+ props.put("mysql.database", "");
+ props.put("mysql.user", "root");
+ props.put("mysql.password", "");
+ props.put("mysql.numconnections", 10);
+
+ DataSource datasource = DataSourceUtils.getDataSource(props);
+ QueryRunner runner = new QueryRunner(datasource);
+ try {
+ runner.update("drop database azkabanunittest");
+ } catch (SQLException e) {
+ }
+ runner.update("create database azkabanunittest");
+ }
}
\ No newline at end of file
diff --git a/unit/java/azkaban/test/database/AzkabanDatabaseUpdaterTest.java b/unit/java/azkaban/test/database/AzkabanDatabaseUpdaterTest.java
index 9948db3..59b9061 100644
--- a/unit/java/azkaban/test/database/AzkabanDatabaseUpdaterTest.java
+++ b/unit/java/azkaban/test/database/AzkabanDatabaseUpdaterTest.java
@@ -17,77 +17,77 @@ import azkaban.database.DataSourceUtils;
import azkaban.utils.Props;
public class AzkabanDatabaseUpdaterTest {
- @BeforeClass
- public static void setupDB() throws IOException, SQLException {
- File dbDir = new File("h2dbtest");
- if (dbDir.exists()) {
- FileUtils.deleteDirectory(dbDir);
- }
-
- dbDir.mkdir();
-
- clearUnitTestDB();
- }
-
- @AfterClass
- public static void teardownDB() {
- }
-
- @Test
- public void testMySQLAutoCreate() throws Exception {
- String confDir = "unit/conf/dbtestmysql";
- System.out.println("1.***Now testing check");
- AzkabanDatabaseUpdater.main(new String[]{"-c",confDir});
-
- System.out.println("2.***Now testing update");
- AzkabanDatabaseUpdater.main(new String[]{ "-u", "-c",confDir});
-
- System.out.println("3.***Now testing check again");
- AzkabanDatabaseUpdater.main(new String[]{ "-c",confDir});
-
- System.out.println("4.***Now testing update again");
- AzkabanDatabaseUpdater.main(new String[]{ "-c",confDir, "-u"});
-
- System.out.println("5.***Now testing check again");
- AzkabanDatabaseUpdater.main(new String[]{ "-c",confDir});
- }
-
- @Test
- public void testH2AutoCreate() throws Exception {
- String confDir = "unit/conf/dbtesth2";
- System.out.println("1.***Now testing check");
- AzkabanDatabaseUpdater.main(new String[]{"-c",confDir});
-
- System.out.println("2.***Now testing update");
- AzkabanDatabaseUpdater.main(new String[]{ "-u", "-c",confDir});
-
- System.out.println("3.***Now testing check again");
- AzkabanDatabaseUpdater.main(new String[]{ "-c",confDir});
-
- System.out.println("4.***Now testing update again");
- AzkabanDatabaseUpdater.main(new String[]{ "-c",confDir, "-u"});
-
- System.out.println("5.***Now testing check again");
- AzkabanDatabaseUpdater.main(new String[]{ "-c",confDir});
- }
-
- private static void clearUnitTestDB() throws SQLException {
- Props props = new Props();
-
- props.put("database.type", "mysql");
- props.put("mysql.host", "localhost");
- props.put("mysql.port", "3306");
- props.put("mysql.database", "");
- props.put("mysql.user", "root");
- props.put("mysql.password", "");
- props.put("mysql.numconnections", 10);
-
- DataSource datasource = DataSourceUtils.getDataSource(props);
- QueryRunner runner = new QueryRunner(datasource);
- try {
- runner.update("drop database azkabanunittest");
- } catch (SQLException e) {
- }
- runner.update("create database azkabanunittest");
- }
+ @BeforeClass
+ public static void setupDB() throws IOException, SQLException {
+ File dbDir = new File("h2dbtest");
+ if (dbDir.exists()) {
+ FileUtils.deleteDirectory(dbDir);
+ }
+
+ dbDir.mkdir();
+
+ clearUnitTestDB();
+ }
+
+ @AfterClass
+ public static void teardownDB() {
+ }
+
+ @Test
+ public void testMySQLAutoCreate() throws Exception {
+ String confDir = "unit/conf/dbtestmysql";
+ System.out.println("1.***Now testing check");
+ AzkabanDatabaseUpdater.main(new String[] { "-c", confDir });
+
+ System.out.println("2.***Now testing update");
+ AzkabanDatabaseUpdater.main(new String[] { "-u", "-c", confDir });
+
+ System.out.println("3.***Now testing check again");
+ AzkabanDatabaseUpdater.main(new String[] { "-c", confDir });
+
+ System.out.println("4.***Now testing update again");
+ AzkabanDatabaseUpdater.main(new String[] { "-c", confDir, "-u" });
+
+ System.out.println("5.***Now testing check again");
+ AzkabanDatabaseUpdater.main(new String[] { "-c", confDir });
+ }
+
+ @Test
+ public void testH2AutoCreate() throws Exception {
+ String confDir = "unit/conf/dbtesth2";
+ System.out.println("1.***Now testing check");
+ AzkabanDatabaseUpdater.main(new String[] { "-c", confDir });
+
+ System.out.println("2.***Now testing update");
+ AzkabanDatabaseUpdater.main(new String[] { "-u", "-c", confDir });
+
+ System.out.println("3.***Now testing check again");
+ AzkabanDatabaseUpdater.main(new String[] { "-c", confDir });
+
+ System.out.println("4.***Now testing update again");
+ AzkabanDatabaseUpdater.main(new String[] { "-c", confDir, "-u" });
+
+ System.out.println("5.***Now testing check again");
+ AzkabanDatabaseUpdater.main(new String[] { "-c", confDir });
+ }
+
+ private static void clearUnitTestDB() throws SQLException {
+ Props props = new Props();
+
+ props.put("database.type", "mysql");
+ props.put("mysql.host", "localhost");
+ props.put("mysql.port", "3306");
+ props.put("mysql.database", "");
+ props.put("mysql.user", "root");
+ props.put("mysql.password", "");
+ props.put("mysql.numconnections", 10);
+
+ DataSource datasource = DataSourceUtils.getDataSource(props);
+ QueryRunner runner = new QueryRunner(datasource);
+ try {
+ runner.update("drop database azkabanunittest");
+ } catch (SQLException e) {
+ }
+ runner.update("create database azkabanunittest");
+ }
}
unit/java/azkaban/test/execapp/event/BlockingStatusTest.java 207(+104 -103)
diff --git a/unit/java/azkaban/test/execapp/event/BlockingStatusTest.java b/unit/java/azkaban/test/execapp/event/BlockingStatusTest.java
index 124f9a0..f764198 100644
--- a/unit/java/azkaban/test/execapp/event/BlockingStatusTest.java
+++ b/unit/java/azkaban/test/execapp/event/BlockingStatusTest.java
@@ -8,107 +8,108 @@ import azkaban.executor.Status;
public class BlockingStatusTest {
- public class WatchingThread extends Thread {
- private BlockingStatus status;
- private long diff = 0;
- public WatchingThread(BlockingStatus status) {
- this.status = status;
- }
-
- public void run() {
- long startTime = System.currentTimeMillis();
- status.blockOnFinishedStatus();
- diff = System.currentTimeMillis() - startTime;
- }
-
- public long getDiff() {
- return diff;
- }
- }
-
- @Test
- public void testFinishedBlock() {
- BlockingStatus status = new BlockingStatus(1, "test", Status.SKIPPED);
-
- WatchingThread thread = new WatchingThread(status);
- thread.start();
- try {
- thread.join();
- } catch (InterruptedException e) {
- e.printStackTrace();
- }
- System.out.println("Diff " + thread.getDiff());
- Assert.assertTrue(thread.getDiff() < 100);
- }
-
- @Test
- public void testUnfinishedBlock() throws InterruptedException {
- BlockingStatus status = new BlockingStatus(1, "test", Status.QUEUED);
-
- WatchingThread thread = new WatchingThread(status);
- thread.start();
-
- synchronized(this) {
- wait(3000);
- }
-
- status.changeStatus(Status.SUCCEEDED);
- thread.join();
-
- System.out.println("Diff " + thread.getDiff());
- Assert.assertTrue(thread.getDiff() >= 3000 && thread.getDiff() < 3100);
- }
-
- @Test
- public void testUnfinishedBlockSeveralChanges() throws InterruptedException {
- BlockingStatus status = new BlockingStatus(1, "test", Status.QUEUED);
-
- WatchingThread thread = new WatchingThread(status);
- thread.start();
-
- synchronized(this) {
- wait(3000);
- }
-
- status.changeStatus(Status.PAUSED);
-
- synchronized(this) {
- wait(1000);
- }
-
- status.changeStatus(Status.FAILED);
-
- thread.join(1000);
-
- System.out.println("Diff " + thread.getDiff());
- Assert.assertTrue(thread.getDiff() >= 4000 && thread.getDiff() < 4100);
- }
-
- @Test
- public void testMultipleWatchers() throws InterruptedException {
- BlockingStatus status = new BlockingStatus(1, "test", Status.QUEUED);
-
- WatchingThread thread1 = new WatchingThread(status);
- thread1.start();
-
- synchronized(this) {
- wait(2000);
- }
-
- WatchingThread thread2 = new WatchingThread(status);
- thread2.start();
-
- synchronized(this) {
- wait(2000);
- }
-
- status.changeStatus(Status.FAILED);
- thread2.join(1000);
- thread1.join(1000);
-
- System.out.println("Diff thread 1 " + thread1.getDiff());
- System.out.println("Diff thread 2 " + thread2.getDiff());
- Assert.assertTrue(thread1.getDiff() >= 4000 && thread1.getDiff() < 4100);
- Assert.assertTrue(thread2.getDiff() >= 2000 && thread2.getDiff() < 2100);
- }
+ public class WatchingThread extends Thread {
+ private BlockingStatus status;
+ private long diff = 0;
+
+ public WatchingThread(BlockingStatus status) {
+ this.status = status;
+ }
+
+ public void run() {
+ long startTime = System.currentTimeMillis();
+ status.blockOnFinishedStatus();
+ diff = System.currentTimeMillis() - startTime;
+ }
+
+ public long getDiff() {
+ return diff;
+ }
+ }
+
+ @Test
+ public void testFinishedBlock() {
+ BlockingStatus status = new BlockingStatus(1, "test", Status.SKIPPED);
+
+ WatchingThread thread = new WatchingThread(status);
+ thread.start();
+ try {
+ thread.join();
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ System.out.println("Diff " + thread.getDiff());
+ Assert.assertTrue(thread.getDiff() < 100);
+ }
+
+ @Test
+ public void testUnfinishedBlock() throws InterruptedException {
+ BlockingStatus status = new BlockingStatus(1, "test", Status.QUEUED);
+
+ WatchingThread thread = new WatchingThread(status);
+ thread.start();
+
+ synchronized (this) {
+ wait(3000);
+ }
+
+ status.changeStatus(Status.SUCCEEDED);
+ thread.join();
+
+ System.out.println("Diff " + thread.getDiff());
+ Assert.assertTrue(thread.getDiff() >= 3000 && thread.getDiff() < 3100);
+ }
+
+ @Test
+ public void testUnfinishedBlockSeveralChanges() throws InterruptedException {
+ BlockingStatus status = new BlockingStatus(1, "test", Status.QUEUED);
+
+ WatchingThread thread = new WatchingThread(status);
+ thread.start();
+
+ synchronized (this) {
+ wait(3000);
+ }
+
+ status.changeStatus(Status.PAUSED);
+
+ synchronized (this) {
+ wait(1000);
+ }
+
+ status.changeStatus(Status.FAILED);
+
+ thread.join(1000);
+
+ System.out.println("Diff " + thread.getDiff());
+ Assert.assertTrue(thread.getDiff() >= 4000 && thread.getDiff() < 4100);
+ }
+
+ @Test
+ public void testMultipleWatchers() throws InterruptedException {
+ BlockingStatus status = new BlockingStatus(1, "test", Status.QUEUED);
+
+ WatchingThread thread1 = new WatchingThread(status);
+ thread1.start();
+
+ synchronized (this) {
+ wait(2000);
+ }
+
+ WatchingThread thread2 = new WatchingThread(status);
+ thread2.start();
+
+ synchronized (this) {
+ wait(2000);
+ }
+
+ status.changeStatus(Status.FAILED);
+ thread2.join(1000);
+ thread1.join(1000);
+
+ System.out.println("Diff thread 1 " + thread1.getDiff());
+ System.out.println("Diff thread 2 " + thread2.getDiff());
+ Assert.assertTrue(thread1.getDiff() >= 4000 && thread1.getDiff() < 4100);
+ Assert.assertTrue(thread2.getDiff() >= 2000 && thread2.getDiff() < 2100);
+ }
}
diff --git a/unit/java/azkaban/test/execapp/event/LocalFlowWatcherTest.java b/unit/java/azkaban/test/execapp/event/LocalFlowWatcherTest.java
index fb13d04..2b07630 100644
--- a/unit/java/azkaban/test/execapp/event/LocalFlowWatcherTest.java
+++ b/unit/java/azkaban/test/execapp/event/LocalFlowWatcherTest.java
@@ -29,212 +29,232 @@ import azkaban.test.executor.JavaJob;
import azkaban.utils.JSONUtils;
public class LocalFlowWatcherTest {
- private File workingDir;
- private JobTypeManager jobtypeManager;
- private ProjectLoader fakeProjectLoader;
- private int dirVal= 0;
-
- @Before
- public void setUp() throws Exception {
- jobtypeManager = new JobTypeManager(null, null, this.getClass().getClassLoader());
- jobtypeManager.getJobTypePluginSet().addPluginClass("java", JavaJob.class);
- fakeProjectLoader = new MockProjectLoader(workingDir);
- }
-
- @After
- public void tearDown() throws IOException {
- }
-
- public File setupDirectory() throws IOException {
- System.out.println("Create temp dir");
- File workingDir = new File("_AzkabanTestDir_" + dirVal );
- if (workingDir.exists()) {
- FileUtils.deleteDirectory(workingDir);
- }
- workingDir.mkdirs();
- dirVal++;
-
- return workingDir;
- }
-
- @Test
- public void testBasicLocalFlowWatcher() throws Exception {
- MockExecutorLoader loader = new MockExecutorLoader();
-
- EventCollectorListener eventCollector = new EventCollectorListener();
-
- File workingDir1 = setupDirectory();
- FlowRunner runner1 = createFlowRunner(workingDir1, loader, eventCollector, "exec1", 1, null, null);
- Thread runner1Thread = new Thread(runner1);
-
- File workingDir2 = setupDirectory();
- LocalFlowWatcher watcher = new LocalFlowWatcher(runner1);
- FlowRunner runner2 = createFlowRunner(workingDir2, loader, eventCollector, "exec1", 2, watcher, 2);
- Thread runner2Thread = new Thread(runner2);
-
- runner1Thread.start();
- runner2Thread.start();
- runner2Thread.join();
-
- FileUtils.deleteDirectory(workingDir1);
- FileUtils.deleteDirectory(workingDir2);
-
- testPipelineLevel2(runner1.getExecutableFlow(), runner2.getExecutableFlow());
- }
-
- @Test
- public void testLevel1LocalFlowWatcher() throws Exception {
- MockExecutorLoader loader = new MockExecutorLoader();
-
- EventCollectorListener eventCollector = new EventCollectorListener();
-
- File workingDir1 = setupDirectory();
- FlowRunner runner1 = createFlowRunner(workingDir1, loader, eventCollector, "exec1", 1, null, null);
- Thread runner1Thread = new Thread(runner1);
-
- File workingDir2 = setupDirectory();
- LocalFlowWatcher watcher = new LocalFlowWatcher(runner1);
- FlowRunner runner2 = createFlowRunner(workingDir2, loader, eventCollector, "exec1", 2, watcher, 1);
- Thread runner2Thread = new Thread(runner2);
-
- runner1Thread.start();
- runner2Thread.start();
- runner2Thread.join();
-
- FileUtils.deleteDirectory(workingDir1);
- FileUtils.deleteDirectory(workingDir2);
-
- testPipelineLevel1(runner1.getExecutableFlow(), runner2.getExecutableFlow());
- }
-
- @Test
- public void testLevel2DiffLocalFlowWatcher() throws Exception {
- MockExecutorLoader loader = new MockExecutorLoader();
-
- EventCollectorListener eventCollector = new EventCollectorListener();
-
- File workingDir1 = setupDirectory();
- FlowRunner runner1 = createFlowRunner(workingDir1, loader, eventCollector, "exec1", 1, null, null);
- Thread runner1Thread = new Thread(runner1);
-
- File workingDir2 = setupDirectory();
- LocalFlowWatcher watcher = new LocalFlowWatcher(runner1);
- FlowRunner runner2 = createFlowRunner(workingDir2, loader, eventCollector, "exec1-mod", 2, watcher, 1);
- Thread runner2Thread = new Thread(runner2);
-
- runner1Thread.start();
- runner2Thread.start();
- runner2Thread.join();
-
- FileUtils.deleteDirectory(workingDir1);
- FileUtils.deleteDirectory(workingDir2);
-
- testPipelineLevel1(runner1.getExecutableFlow(), runner2.getExecutableFlow());
- }
-
- private void testPipelineLevel1(ExecutableFlow first, ExecutableFlow second) {
- for (ExecutableNode node: second.getExecutableNodes()) {
- Assert.assertEquals(node.getStatus(), Status.SUCCEEDED);
-
- // check it's start time is after the first's children.
- ExecutableNode watchedNode = first.getExecutableNode(node.getId());
- if (watchedNode == null) {
- continue;
- }
- Assert.assertEquals(watchedNode.getStatus(), Status.SUCCEEDED);
-
- System.out.println("Node " + node.getId() +
- " start: " + node.getStartTime() +
- " dependent on " + watchedNode.getId() +
- " " + watchedNode.getEndTime() +
- " diff: " + (node.getStartTime() - watchedNode.getEndTime()));
-
- Assert.assertTrue(node.getStartTime() >= watchedNode.getEndTime());
-
- long minParentDiff = 0;
- if (node.getInNodes().size() > 0) {
- minParentDiff = Long.MAX_VALUE;
- for (String dependency: node.getInNodes()) {
- ExecutableNode parent = second.getExecutableNode(dependency);
- long diff = node.getStartTime() - parent.getEndTime();
- minParentDiff = Math.min(minParentDiff, diff);
- }
- }
- long diff = node.getStartTime() - watchedNode.getEndTime();
- System.out.println(" minPipelineTimeDiff:" + diff + " minDependencyTimeDiff:" + minParentDiff);
- Assert.assertTrue(minParentDiff < 100 || diff < 100);
- }
- }
-
- private void testPipelineLevel2(ExecutableFlow first, ExecutableFlow second) {
- for (ExecutableNode node: second.getExecutableNodes()) {
- Assert.assertEquals(node.getStatus(), Status.SUCCEEDED);
-
- // check it's start time is after the first's children.
- ExecutableNode watchedNode = first.getExecutableNode(node.getId());
- if (watchedNode == null) {
- continue;
- }
- Assert.assertEquals(watchedNode.getStatus(), Status.SUCCEEDED);
-
- long minDiff = Long.MAX_VALUE;
- for (String watchedChild: watchedNode.getOutNodes()) {
- ExecutableNode child = first.getExecutableNode(watchedChild);
- if (child == null) {
- continue;
- }
- Assert.assertEquals(child.getStatus(), Status.SUCCEEDED);
- long diff = node.getStartTime() - child.getEndTime();
- minDiff = Math.min(minDiff, diff);
- System.out.println("Node " + node.getId() +
- " start: " + node.getStartTime() +
- " dependent on " + watchedChild + " " + child.getEndTime() +
- " diff: " + diff);
-
- Assert.assertTrue(node.getStartTime() >= child.getEndTime());
- }
-
- long minParentDiff = Long.MAX_VALUE;
- for (String dependency: node.getInNodes()) {
- ExecutableNode parent = second.getExecutableNode(dependency);
- long diff = node.getStartTime() - parent.getEndTime();
- minParentDiff = Math.min(minParentDiff, diff);
- }
- System.out.println(" minPipelineTimeDiff:" + minDiff + " minDependencyTimeDiff:" + minParentDiff);
- Assert.assertTrue(minParentDiff < 100 || minDiff < 100);
- }
- }
-
- private FlowRunner createFlowRunner(File workingDir, ExecutorLoader loader, EventCollectorListener eventCollector, String flowName, int execId, FlowWatcher watcher, Integer pipeline) throws Exception {
- File testDir = new File("unit/executions/exectest1");
- ExecutableFlow exFlow = prepareExecDir(workingDir, testDir, flowName, execId);
- ExecutionOptions option = exFlow.getExecutionOptions();
- if (watcher != null) {
- option.setPipelineLevel(pipeline);
- option.setPipelineExecutionId(watcher.getExecId());
- }
- //MockProjectLoader projectLoader = new MockProjectLoader(new File(exFlow.getExecutionPath()));
-
- loader.uploadExecutableFlow(exFlow);
- FlowRunner runner = new FlowRunner(exFlow, loader, fakeProjectLoader, jobtypeManager);
- runner.setFlowWatcher(watcher);
- runner.addListener(eventCollector);
-
- return runner;
- }
-
- private ExecutableFlow prepareExecDir(File workingDir, File execDir, String flowName, int execId) throws IOException {
- FileUtils.copyDirectory(execDir, workingDir);
-
- File jsonFlowFile = new File(workingDir, flowName + ".flow");
- @SuppressWarnings("unchecked")
- HashMap<String, Object> flowObj = (HashMap<String, Object>) JSONUtils.parseJSONFromFile(jsonFlowFile);
-
- Project project = new Project(1, "test");
- Flow flow = Flow.flowFromObject(flowObj);
- ExecutableFlow execFlow = new ExecutableFlow(project, flow);
- execFlow.setExecutionId(execId);
- execFlow.setExecutionPath(workingDir.getPath());
- return execFlow;
- }
+ private File workingDir;
+ private JobTypeManager jobtypeManager;
+ private ProjectLoader fakeProjectLoader;
+ private int dirVal = 0;
+
+ @Before
+ public void setUp() throws Exception {
+ jobtypeManager =
+ new JobTypeManager(null, null, this.getClass().getClassLoader());
+ jobtypeManager.getJobTypePluginSet().addPluginClass("java", JavaJob.class);
+ fakeProjectLoader = new MockProjectLoader(workingDir);
+ }
+
+ @After
+ public void tearDown() throws IOException {
+ }
+
+ public File setupDirectory() throws IOException {
+ System.out.println("Create temp dir");
+ File workingDir = new File("_AzkabanTestDir_" + dirVal);
+ if (workingDir.exists()) {
+ FileUtils.deleteDirectory(workingDir);
+ }
+ workingDir.mkdirs();
+ dirVal++;
+
+ return workingDir;
+ }
+
+ @Test
+ public void testBasicLocalFlowWatcher() throws Exception {
+ MockExecutorLoader loader = new MockExecutorLoader();
+
+ EventCollectorListener eventCollector = new EventCollectorListener();
+
+ File workingDir1 = setupDirectory();
+ FlowRunner runner1 =
+ createFlowRunner(workingDir1, loader, eventCollector, "exec1", 1, null,
+ null);
+ Thread runner1Thread = new Thread(runner1);
+
+ File workingDir2 = setupDirectory();
+ LocalFlowWatcher watcher = new LocalFlowWatcher(runner1);
+ FlowRunner runner2 =
+ createFlowRunner(workingDir2, loader, eventCollector, "exec1", 2,
+ watcher, 2);
+ Thread runner2Thread = new Thread(runner2);
+
+ runner1Thread.start();
+ runner2Thread.start();
+ runner2Thread.join();
+
+ FileUtils.deleteDirectory(workingDir1);
+ FileUtils.deleteDirectory(workingDir2);
+
+ testPipelineLevel2(runner1.getExecutableFlow(), runner2.getExecutableFlow());
+ }
+
+ @Test
+ public void testLevel1LocalFlowWatcher() throws Exception {
+ MockExecutorLoader loader = new MockExecutorLoader();
+
+ EventCollectorListener eventCollector = new EventCollectorListener();
+
+ File workingDir1 = setupDirectory();
+ FlowRunner runner1 =
+ createFlowRunner(workingDir1, loader, eventCollector, "exec1", 1, null,
+ null);
+ Thread runner1Thread = new Thread(runner1);
+
+ File workingDir2 = setupDirectory();
+ LocalFlowWatcher watcher = new LocalFlowWatcher(runner1);
+ FlowRunner runner2 =
+ createFlowRunner(workingDir2, loader, eventCollector, "exec1", 2,
+ watcher, 1);
+ Thread runner2Thread = new Thread(runner2);
+
+ runner1Thread.start();
+ runner2Thread.start();
+ runner2Thread.join();
+
+ FileUtils.deleteDirectory(workingDir1);
+ FileUtils.deleteDirectory(workingDir2);
+
+ testPipelineLevel1(runner1.getExecutableFlow(), runner2.getExecutableFlow());
+ }
+
+ @Test
+ public void testLevel2DiffLocalFlowWatcher() throws Exception {
+ MockExecutorLoader loader = new MockExecutorLoader();
+
+ EventCollectorListener eventCollector = new EventCollectorListener();
+
+ File workingDir1 = setupDirectory();
+ FlowRunner runner1 =
+ createFlowRunner(workingDir1, loader, eventCollector, "exec1", 1, null,
+ null);
+ Thread runner1Thread = new Thread(runner1);
+
+ File workingDir2 = setupDirectory();
+ LocalFlowWatcher watcher = new LocalFlowWatcher(runner1);
+ FlowRunner runner2 =
+ createFlowRunner(workingDir2, loader, eventCollector, "exec1-mod", 2,
+ watcher, 1);
+ Thread runner2Thread = new Thread(runner2);
+
+ runner1Thread.start();
+ runner2Thread.start();
+ runner2Thread.join();
+
+ FileUtils.deleteDirectory(workingDir1);
+ FileUtils.deleteDirectory(workingDir2);
+
+ testPipelineLevel1(runner1.getExecutableFlow(), runner2.getExecutableFlow());
+ }
+
+ private void testPipelineLevel1(ExecutableFlow first, ExecutableFlow second) {
+ for (ExecutableNode node : second.getExecutableNodes()) {
+ Assert.assertEquals(node.getStatus(), Status.SUCCEEDED);
+
+ // check it's start time is after the first's children.
+ ExecutableNode watchedNode = first.getExecutableNode(node.getId());
+ if (watchedNode == null) {
+ continue;
+ }
+ Assert.assertEquals(watchedNode.getStatus(), Status.SUCCEEDED);
+
+ System.out.println("Node " + node.getId() + " start: "
+ + node.getStartTime() + " dependent on " + watchedNode.getId() + " "
+ + watchedNode.getEndTime() + " diff: "
+ + (node.getStartTime() - watchedNode.getEndTime()));
+
+ Assert.assertTrue(node.getStartTime() >= watchedNode.getEndTime());
+
+ long minParentDiff = 0;
+ if (node.getInNodes().size() > 0) {
+ minParentDiff = Long.MAX_VALUE;
+ for (String dependency : node.getInNodes()) {
+ ExecutableNode parent = second.getExecutableNode(dependency);
+ long diff = node.getStartTime() - parent.getEndTime();
+ minParentDiff = Math.min(minParentDiff, diff);
+ }
+ }
+ long diff = node.getStartTime() - watchedNode.getEndTime();
+ System.out.println(" minPipelineTimeDiff:" + diff
+ + " minDependencyTimeDiff:" + minParentDiff);
+ Assert.assertTrue(minParentDiff < 100 || diff < 100);
+ }
+ }
+
+ private void testPipelineLevel2(ExecutableFlow first, ExecutableFlow second) {
+ for (ExecutableNode node : second.getExecutableNodes()) {
+ Assert.assertEquals(node.getStatus(), Status.SUCCEEDED);
+
+ // check it's start time is after the first's children.
+ ExecutableNode watchedNode = first.getExecutableNode(node.getId());
+ if (watchedNode == null) {
+ continue;
+ }
+ Assert.assertEquals(watchedNode.getStatus(), Status.SUCCEEDED);
+
+ long minDiff = Long.MAX_VALUE;
+ for (String watchedChild : watchedNode.getOutNodes()) {
+ ExecutableNode child = first.getExecutableNode(watchedChild);
+ if (child == null) {
+ continue;
+ }
+ Assert.assertEquals(child.getStatus(), Status.SUCCEEDED);
+ long diff = node.getStartTime() - child.getEndTime();
+ minDiff = Math.min(minDiff, diff);
+ System.out.println("Node " + node.getId() + " start: "
+ + node.getStartTime() + " dependent on " + watchedChild + " "
+ + child.getEndTime() + " diff: " + diff);
+
+ Assert.assertTrue(node.getStartTime() >= child.getEndTime());
+ }
+
+ long minParentDiff = Long.MAX_VALUE;
+ for (String dependency : node.getInNodes()) {
+ ExecutableNode parent = second.getExecutableNode(dependency);
+ long diff = node.getStartTime() - parent.getEndTime();
+ minParentDiff = Math.min(minParentDiff, diff);
+ }
+ System.out.println(" minPipelineTimeDiff:" + minDiff
+ + " minDependencyTimeDiff:" + minParentDiff);
+ Assert.assertTrue(minParentDiff < 100 || minDiff < 100);
+ }
+ }
+
+ private FlowRunner createFlowRunner(File workingDir, ExecutorLoader loader,
+ EventCollectorListener eventCollector, String flowName, int execId,
+ FlowWatcher watcher, Integer pipeline) throws Exception {
+ File testDir = new File("unit/executions/exectest1");
+ ExecutableFlow exFlow =
+ prepareExecDir(workingDir, testDir, flowName, execId);
+ ExecutionOptions option = exFlow.getExecutionOptions();
+ if (watcher != null) {
+ option.setPipelineLevel(pipeline);
+ option.setPipelineExecutionId(watcher.getExecId());
+ }
+ // MockProjectLoader projectLoader = new MockProjectLoader(new
+ // File(exFlow.getExecutionPath()));
+
+ loader.uploadExecutableFlow(exFlow);
+ FlowRunner runner =
+ new FlowRunner(exFlow, loader, fakeProjectLoader, jobtypeManager);
+ runner.setFlowWatcher(watcher);
+ runner.addListener(eventCollector);
+
+ return runner;
+ }
+
+ private ExecutableFlow prepareExecDir(File workingDir, File execDir,
+ String flowName, int execId) throws IOException {
+ FileUtils.copyDirectory(execDir, workingDir);
+
+ File jsonFlowFile = new File(workingDir, flowName + ".flow");
+ @SuppressWarnings("unchecked")
+ HashMap<String, Object> flowObj =
+ (HashMap<String, Object>) JSONUtils.parseJSONFromFile(jsonFlowFile);
+
+ Project project = new Project(1, "test");
+ Flow flow = Flow.flowFromObject(flowObj);
+ ExecutableFlow execFlow = new ExecutableFlow(project, flow);
+ execFlow.setExecutionId(execId);
+ execFlow.setExecutionPath(workingDir.getPath());
+ return execFlow;
+ }
}
diff --git a/unit/java/azkaban/test/execapp/event/RemoteFlowWatcherTest.java b/unit/java/azkaban/test/execapp/event/RemoteFlowWatcherTest.java
index 3764a81..27ede22 100644
--- a/unit/java/azkaban/test/execapp/event/RemoteFlowWatcherTest.java
+++ b/unit/java/azkaban/test/execapp/event/RemoteFlowWatcherTest.java
@@ -30,223 +30,243 @@ import azkaban.test.executor.JavaJob;
import azkaban.utils.JSONUtils;
public class RemoteFlowWatcherTest {
- private File workingDir;
- private JobTypeManager jobtypeManager;
- private ProjectLoader fakeProjectLoader;
- private int dirVal= 0;
-
- @Before
- public void setUp() throws Exception {
- jobtypeManager = new JobTypeManager(null, null, this.getClass().getClassLoader());
- jobtypeManager.getJobTypePluginSet().addPluginClass("java", JavaJob.class);
- fakeProjectLoader = new MockProjectLoader(workingDir);
- }
-
- @After
- public void tearDown() throws IOException {
- }
-
- public File setupDirectory() throws IOException {
- System.out.println("Create temp dir");
- File workingDir = new File("_AzkabanTestDir_" + dirVal );
- if (workingDir.exists()) {
- FileUtils.deleteDirectory(workingDir);
- }
- workingDir.mkdirs();
- dirVal++;
-
- return workingDir;
- }
-
- @Test
- public void testBasicRemoteFlowWatcher() throws Exception {
- MockExecutorLoader loader = new MockExecutorLoader();
-
- EventCollectorListener eventCollector = new EventCollectorListener();
-
- File workingDir1 = setupDirectory();
- FlowRunner runner1 = createFlowRunner(workingDir1, loader, eventCollector, "exec1", 1, null, null);
- Thread runner1Thread = new Thread(runner1);
-
- File workingDir2 = setupDirectory();
- RemoteFlowWatcher watcher = new RemoteFlowWatcher(1, loader, 100);
- FlowRunner runner2 = createFlowRunner(workingDir2, loader, eventCollector, "exec1", 2, watcher, 2);
- Thread runner2Thread = new Thread(runner2);
-
- printCurrentState("runner1 ", runner1.getExecutableFlow());
- runner1Thread.start();
- runner2Thread.start();
-
- runner2Thread.join();
-
- FileUtils.deleteDirectory(workingDir1);
- FileUtils.deleteDirectory(workingDir2);
-
- testPipelineLevel2(runner1.getExecutableFlow(), runner2.getExecutableFlow());
- }
-
- @Test
- public void testLevel1RemoteFlowWatcher() throws Exception {
- MockExecutorLoader loader = new MockExecutorLoader();
-
- EventCollectorListener eventCollector = new EventCollectorListener();
-
- File workingDir1 = setupDirectory();
- FlowRunner runner1 = createFlowRunner(workingDir1, loader, eventCollector, "exec1", 1, null, null);
- Thread runner1Thread = new Thread(runner1);
-
- File workingDir2 = setupDirectory();
- RemoteFlowWatcher watcher = new RemoteFlowWatcher(1, loader, 100);
- FlowRunner runner2 = createFlowRunner(workingDir2, loader, eventCollector, "exec1", 2, watcher, 1);
- Thread runner2Thread = new Thread(runner2);
-
- runner1Thread.start();
- runner2Thread.start();
- runner2Thread.join();
-
- FileUtils.deleteDirectory(workingDir1);
- FileUtils.deleteDirectory(workingDir2);
-
- testPipelineLevel1(runner1.getExecutableFlow(), runner2.getExecutableFlow());
- }
-
- @Test
- public void testLevel2DiffRemoteFlowWatcher() throws Exception {
- MockExecutorLoader loader = new MockExecutorLoader();
-
- EventCollectorListener eventCollector = new EventCollectorListener();
-
- File workingDir1 = setupDirectory();
- FlowRunner runner1 = createFlowRunner(workingDir1, loader, eventCollector, "exec1", 1, null, null);
- Thread runner1Thread = new Thread(runner1);
-
- File workingDir2 = setupDirectory();
-
- RemoteFlowWatcher watcher = new RemoteFlowWatcher(1, loader, 100);
- FlowRunner runner2 = createFlowRunner(workingDir2, loader, eventCollector, "exec1-mod", 2, watcher, 1);
- Thread runner2Thread = new Thread(runner2);
-
- runner1Thread.start();
- runner2Thread.start();
- runner2Thread.join();
-
- FileUtils.deleteDirectory(workingDir1);
- FileUtils.deleteDirectory(workingDir2);
-
- testPipelineLevel1(runner1.getExecutableFlow(), runner2.getExecutableFlow());
- }
-
- private void testPipelineLevel1(ExecutableFlow first, ExecutableFlow second) {
- for (ExecutableNode node: second.getExecutableNodes()) {
- Assert.assertEquals(node.getStatus(), Status.SUCCEEDED);
-
- // check it's start time is after the first's children.
- ExecutableNode watchedNode = first.getExecutableNode(node.getId());
- if (watchedNode == null) {
- continue;
- }
- Assert.assertEquals(watchedNode.getStatus(), Status.SUCCEEDED);
-
- System.out.println("Node " + node.getId() +
- " start: " + node.getStartTime() +
- " dependent on " + watchedNode.getId() +
- " " + watchedNode.getEndTime() +
- " diff: " + (node.getStartTime() - watchedNode.getEndTime()));
-
- Assert.assertTrue(node.getStartTime() >= watchedNode.getEndTime());
-
- long minParentDiff = 0;
- if (node.getInNodes().size() > 0) {
- minParentDiff = Long.MAX_VALUE;
- for (String dependency: node.getInNodes()) {
- ExecutableNode parent = second.getExecutableNode(dependency);
- long diff = node.getStartTime() - parent.getEndTime();
- minParentDiff = Math.min(minParentDiff, diff);
- }
- }
- long diff = node.getStartTime() - watchedNode.getEndTime();
- Assert.assertTrue(minParentDiff < 500 || diff < 500);
- }
- }
-
- private void testPipelineLevel2(ExecutableFlow first, ExecutableFlow second) {
- for (ExecutableNode node: second.getExecutableNodes()) {
- Assert.assertEquals(node.getStatus(), Status.SUCCEEDED);
-
- // check it's start time is after the first's children.
- ExecutableNode watchedNode = first.getExecutableNode(node.getId());
- if (watchedNode == null) {
- continue;
- }
- Assert.assertEquals(watchedNode.getStatus(), Status.SUCCEEDED);
-
- long minDiff = Long.MAX_VALUE;
- for (String watchedChild: watchedNode.getOutNodes()) {
- ExecutableNode child = first.getExecutableNode(watchedChild);
- if (child == null) {
- continue;
- }
- Assert.assertEquals(child.getStatus(), Status.SUCCEEDED);
- long diff = node.getStartTime() - child.getEndTime();
- minDiff = Math.min(minDiff, diff);
- System.out.println("Node " + node.getId() +
- " start: " + node.getStartTime() +
- " dependent on " + watchedChild + " " + child.getEndTime() +
- " diff: " + diff);
- Assert.assertTrue(node.getStartTime() >= child.getEndTime());
- }
-
- long minParentDiff = Long.MAX_VALUE;
- for (String dependency: node.getInNodes()) {
- ExecutableNode parent = second.getExecutableNode(dependency);
- long diff = node.getStartTime() - parent.getEndTime();
- minParentDiff = Math.min(minParentDiff, diff);
- }
- System.out.println(" minPipelineTimeDiff:" + minDiff + " minDependencyTimeDiff:" + minParentDiff);
- Assert.assertTrue(minParentDiff < 500 || minDiff < 500);
- }
- }
-
- private FlowRunner createFlowRunner(File workingDir, ExecutorLoader loader, EventCollectorListener eventCollector, String flowName, int execId, FlowWatcher watcher, Integer pipeline) throws Exception {
- File testDir = new File("unit/executions/exectest1");
- ExecutableFlow exFlow = prepareExecDir(workingDir, testDir, flowName, execId);
- ExecutionOptions options = exFlow.getExecutionOptions();
- if (watcher != null) {
- options.setPipelineLevel(pipeline);
- options.setPipelineExecutionId(watcher.getExecId());
- }
- //MockProjectLoader projectLoader = new MockProjectLoader(new File(exFlow.getExecutionPath()));
-
- loader.uploadExecutableFlow(exFlow);
- FlowRunner runner = new FlowRunner(exFlow, loader, fakeProjectLoader, jobtypeManager);
- runner.setFlowWatcher(watcher);
- runner.addListener(eventCollector);
-
- return runner;
- }
-
- private void printCurrentState(String prefix, ExecutableFlowBase flow) {
- for(ExecutableNode node: flow.getExecutableNodes()) {
-
- System.err.println(prefix + node.getNestedId() + "->" + node.getStatus().name());
- if (node instanceof ExecutableFlowBase) {
- printCurrentState(prefix, (ExecutableFlowBase)node);
- }
- }
- }
-
- private ExecutableFlow prepareExecDir(File workingDir, File execDir, String flowName, int execId) throws IOException {
- FileUtils.copyDirectory(execDir, workingDir);
-
- File jsonFlowFile = new File(workingDir, flowName + ".flow");
- @SuppressWarnings("unchecked")
- HashMap<String, Object> flowObj = (HashMap<String, Object>) JSONUtils.parseJSONFromFile(jsonFlowFile);
-
- Project project = new Project(1, "test");
- Flow flow = Flow.flowFromObject(flowObj);
- ExecutableFlow execFlow = new ExecutableFlow(project, flow);
- execFlow.setExecutionId(execId);
- execFlow.setExecutionPath(workingDir.getPath());
- return execFlow;
- }
+ private File workingDir;
+ private JobTypeManager jobtypeManager;
+ private ProjectLoader fakeProjectLoader;
+ private int dirVal = 0;
+
+ @Before
+ public void setUp() throws Exception {
+ jobtypeManager =
+ new JobTypeManager(null, null, this.getClass().getClassLoader());
+ jobtypeManager.getJobTypePluginSet().addPluginClass("java", JavaJob.class);
+ fakeProjectLoader = new MockProjectLoader(workingDir);
+ }
+
+ @After
+ public void tearDown() throws IOException {
+ }
+
+ public File setupDirectory() throws IOException {
+ System.out.println("Create temp dir");
+ File workingDir = new File("_AzkabanTestDir_" + dirVal);
+ if (workingDir.exists()) {
+ FileUtils.deleteDirectory(workingDir);
+ }
+ workingDir.mkdirs();
+ dirVal++;
+
+ return workingDir;
+ }
+
+ @Test
+ public void testBasicRemoteFlowWatcher() throws Exception {
+ MockExecutorLoader loader = new MockExecutorLoader();
+
+ EventCollectorListener eventCollector = new EventCollectorListener();
+
+ File workingDir1 = setupDirectory();
+ FlowRunner runner1 =
+ createFlowRunner(workingDir1, loader, eventCollector, "exec1", 1, null,
+ null);
+ Thread runner1Thread = new Thread(runner1);
+
+ File workingDir2 = setupDirectory();
+ RemoteFlowWatcher watcher = new RemoteFlowWatcher(1, loader, 100);
+ FlowRunner runner2 =
+ createFlowRunner(workingDir2, loader, eventCollector, "exec1", 2,
+ watcher, 2);
+ Thread runner2Thread = new Thread(runner2);
+
+ printCurrentState("runner1 ", runner1.getExecutableFlow());
+ runner1Thread.start();
+ runner2Thread.start();
+
+ runner2Thread.join();
+
+ FileUtils.deleteDirectory(workingDir1);
+ FileUtils.deleteDirectory(workingDir2);
+
+ testPipelineLevel2(runner1.getExecutableFlow(), runner2.getExecutableFlow());
+ }
+
+ @Test
+ public void testLevel1RemoteFlowWatcher() throws Exception {
+ MockExecutorLoader loader = new MockExecutorLoader();
+
+ EventCollectorListener eventCollector = new EventCollectorListener();
+
+ File workingDir1 = setupDirectory();
+ FlowRunner runner1 =
+ createFlowRunner(workingDir1, loader, eventCollector, "exec1", 1, null,
+ null);
+ Thread runner1Thread = new Thread(runner1);
+
+ File workingDir2 = setupDirectory();
+ RemoteFlowWatcher watcher = new RemoteFlowWatcher(1, loader, 100);
+ FlowRunner runner2 =
+ createFlowRunner(workingDir2, loader, eventCollector, "exec1", 2,
+ watcher, 1);
+ Thread runner2Thread = new Thread(runner2);
+
+ runner1Thread.start();
+ runner2Thread.start();
+ runner2Thread.join();
+
+ FileUtils.deleteDirectory(workingDir1);
+ FileUtils.deleteDirectory(workingDir2);
+
+ testPipelineLevel1(runner1.getExecutableFlow(), runner2.getExecutableFlow());
+ }
+
+ @Test
+ public void testLevel2DiffRemoteFlowWatcher() throws Exception {
+ MockExecutorLoader loader = new MockExecutorLoader();
+
+ EventCollectorListener eventCollector = new EventCollectorListener();
+
+ File workingDir1 = setupDirectory();
+ FlowRunner runner1 =
+ createFlowRunner(workingDir1, loader, eventCollector, "exec1", 1, null,
+ null);
+ Thread runner1Thread = new Thread(runner1);
+
+ File workingDir2 = setupDirectory();
+
+ RemoteFlowWatcher watcher = new RemoteFlowWatcher(1, loader, 100);
+ FlowRunner runner2 =
+ createFlowRunner(workingDir2, loader, eventCollector, "exec1-mod", 2,
+ watcher, 1);
+ Thread runner2Thread = new Thread(runner2);
+
+ runner1Thread.start();
+ runner2Thread.start();
+ runner2Thread.join();
+
+ FileUtils.deleteDirectory(workingDir1);
+ FileUtils.deleteDirectory(workingDir2);
+
+ testPipelineLevel1(runner1.getExecutableFlow(), runner2.getExecutableFlow());
+ }
+
+ private void testPipelineLevel1(ExecutableFlow first, ExecutableFlow second) {
+ for (ExecutableNode node : second.getExecutableNodes()) {
+ Assert.assertEquals(node.getStatus(), Status.SUCCEEDED);
+
+ // check it's start time is after the first's children.
+ ExecutableNode watchedNode = first.getExecutableNode(node.getId());
+ if (watchedNode == null) {
+ continue;
+ }
+ Assert.assertEquals(watchedNode.getStatus(), Status.SUCCEEDED);
+
+ System.out.println("Node " + node.getId() + " start: "
+ + node.getStartTime() + " dependent on " + watchedNode.getId() + " "
+ + watchedNode.getEndTime() + " diff: "
+ + (node.getStartTime() - watchedNode.getEndTime()));
+
+ Assert.assertTrue(node.getStartTime() >= watchedNode.getEndTime());
+
+ long minParentDiff = 0;
+ if (node.getInNodes().size() > 0) {
+ minParentDiff = Long.MAX_VALUE;
+ for (String dependency : node.getInNodes()) {
+ ExecutableNode parent = second.getExecutableNode(dependency);
+ long diff = node.getStartTime() - parent.getEndTime();
+ minParentDiff = Math.min(minParentDiff, diff);
+ }
+ }
+ long diff = node.getStartTime() - watchedNode.getEndTime();
+ Assert.assertTrue(minParentDiff < 500 || diff < 500);
+ }
+ }
+
+ private void testPipelineLevel2(ExecutableFlow first, ExecutableFlow second) {
+ for (ExecutableNode node : second.getExecutableNodes()) {
+ Assert.assertEquals(node.getStatus(), Status.SUCCEEDED);
+
+ // check it's start time is after the first's children.
+ ExecutableNode watchedNode = first.getExecutableNode(node.getId());
+ if (watchedNode == null) {
+ continue;
+ }
+ Assert.assertEquals(watchedNode.getStatus(), Status.SUCCEEDED);
+
+ long minDiff = Long.MAX_VALUE;
+ for (String watchedChild : watchedNode.getOutNodes()) {
+ ExecutableNode child = first.getExecutableNode(watchedChild);
+ if (child == null) {
+ continue;
+ }
+ Assert.assertEquals(child.getStatus(), Status.SUCCEEDED);
+ long diff = node.getStartTime() - child.getEndTime();
+ minDiff = Math.min(minDiff, diff);
+ System.out.println("Node " + node.getId() + " start: "
+ + node.getStartTime() + " dependent on " + watchedChild + " "
+ + child.getEndTime() + " diff: " + diff);
+ Assert.assertTrue(node.getStartTime() >= child.getEndTime());
+ }
+
+ long minParentDiff = Long.MAX_VALUE;
+ for (String dependency : node.getInNodes()) {
+ ExecutableNode parent = second.getExecutableNode(dependency);
+ long diff = node.getStartTime() - parent.getEndTime();
+ minParentDiff = Math.min(minParentDiff, diff);
+ }
+ System.out.println(" minPipelineTimeDiff:" + minDiff
+ + " minDependencyTimeDiff:" + minParentDiff);
+ Assert.assertTrue(minParentDiff < 500 || minDiff < 500);
+ }
+ }
+
+ private FlowRunner createFlowRunner(File workingDir, ExecutorLoader loader,
+ EventCollectorListener eventCollector, String flowName, int execId,
+ FlowWatcher watcher, Integer pipeline) throws Exception {
+ File testDir = new File("unit/executions/exectest1");
+ ExecutableFlow exFlow =
+ prepareExecDir(workingDir, testDir, flowName, execId);
+ ExecutionOptions options = exFlow.getExecutionOptions();
+ if (watcher != null) {
+ options.setPipelineLevel(pipeline);
+ options.setPipelineExecutionId(watcher.getExecId());
+ }
+ // MockProjectLoader projectLoader = new MockProjectLoader(new
+ // File(exFlow.getExecutionPath()));
+
+ loader.uploadExecutableFlow(exFlow);
+ FlowRunner runner =
+ new FlowRunner(exFlow, loader, fakeProjectLoader, jobtypeManager);
+ runner.setFlowWatcher(watcher);
+ runner.addListener(eventCollector);
+
+ return runner;
+ }
+
+ private void printCurrentState(String prefix, ExecutableFlowBase flow) {
+ for (ExecutableNode node : flow.getExecutableNodes()) {
+
+ System.err.println(prefix + node.getNestedId() + "->"
+ + node.getStatus().name());
+ if (node instanceof ExecutableFlowBase) {
+ printCurrentState(prefix, (ExecutableFlowBase) node);
+ }
+ }
+ }
+
+ private ExecutableFlow prepareExecDir(File workingDir, File execDir,
+ String flowName, int execId) throws IOException {
+ FileUtils.copyDirectory(execDir, workingDir);
+
+ File jsonFlowFile = new File(workingDir, flowName + ".flow");
+ @SuppressWarnings("unchecked")
+ HashMap<String, Object> flowObj =
+ (HashMap<String, Object>) JSONUtils.parseJSONFromFile(jsonFlowFile);
+
+ Project project = new Project(1, "test");
+ Flow flow = Flow.flowFromObject(flowObj);
+ ExecutableFlow execFlow = new ExecutableFlow(project, flow);
+ execFlow.setExecutionId(execId);
+ execFlow.setExecutionPath(workingDir.getPath());
+ return execFlow;
+ }
}
diff --git a/unit/java/azkaban/test/execapp/EventCollectorListener.java b/unit/java/azkaban/test/execapp/EventCollectorListener.java
index ba763d1..14399e5 100644
--- a/unit/java/azkaban/test/execapp/EventCollectorListener.java
+++ b/unit/java/azkaban/test/execapp/EventCollectorListener.java
@@ -9,61 +9,63 @@ import azkaban.execapp.event.Event.Type;
import azkaban.execapp.event.EventListener;
public class EventCollectorListener implements EventListener {
- private ArrayList<Event> eventList = new ArrayList<Event>();
- private HashSet<Event.Type> filterOutTypes = new HashSet<Event.Type>();
-
- public void setEventFilterOut(Event.Type ... types) {
- filterOutTypes.addAll(Arrays.asList(types));
- }
-
- @Override
- public void handleEvent(Event event) {
- if (!filterOutTypes.contains(event.getType())) {
- eventList.add(event);
- }
- }
+ private ArrayList<Event> eventList = new ArrayList<Event>();
+ private HashSet<Event.Type> filterOutTypes = new HashSet<Event.Type>();
- public ArrayList<Event> getEventList() {
- return eventList;
- }
-
- public void writeAllEvents() {
- for (Event event: eventList) {
- System.out.print(event.getType());
- System.out.print(",");
- }
- }
-
- public boolean checkOrdering() {
- long time = 0;
- for (Event event: eventList) {
- if (time > event.getTime()) {
- return false;
- }
- }
-
- return true;
- }
+ public void setEventFilterOut(Event.Type... types) {
+ filterOutTypes.addAll(Arrays.asList(types));
+ }
- public void checkEventExists(Type[] types) {
- int index = 0;
- for (Event event: eventList) {
- if (event.getRunner() == null) {
- continue;
- }
-
- if (index >= types.length) {
- throw new RuntimeException("More events than expected. Got " + event.getType());
- }
- Type type = types[index++];
+ @Override
+ public void handleEvent(Event event) {
+ if (!filterOutTypes.contains(event.getType())) {
+ eventList.add(event);
+ }
+ }
- if (type != event.getType()) {
- throw new RuntimeException("Got " + event.getType() + ", expected " + type + " index:" + index);
- }
- }
-
- if (types.length != index) {
- throw new RuntimeException("Not enough events.");
- }
- }
+ public ArrayList<Event> getEventList() {
+ return eventList;
+ }
+
+ public void writeAllEvents() {
+ for (Event event : eventList) {
+ System.out.print(event.getType());
+ System.out.print(",");
+ }
+ }
+
+ public boolean checkOrdering() {
+ long time = 0;
+ for (Event event : eventList) {
+ if (time > event.getTime()) {
+ return false;
+ }
+ }
+
+ return true;
+ }
+
+ public void checkEventExists(Type[] types) {
+ int index = 0;
+ for (Event event : eventList) {
+ if (event.getRunner() == null) {
+ continue;
+ }
+
+ if (index >= types.length) {
+ throw new RuntimeException("More events than expected. Got "
+ + event.getType());
+ }
+ Type type = types[index++];
+
+ if (type != event.getType()) {
+ throw new RuntimeException("Got " + event.getType() + ", expected "
+ + type + " index:" + index);
+ }
+ }
+
+ if (types.length != index) {
+ throw new RuntimeException("Not enough events.");
+ }
+ }
}
unit/java/azkaban/test/execapp/FlowRunnerPipelineTest.java 1199(+633 -566)
diff --git a/unit/java/azkaban/test/execapp/FlowRunnerPipelineTest.java b/unit/java/azkaban/test/execapp/FlowRunnerPipelineTest.java
index 49bb4b5..090ba60 100644
--- a/unit/java/azkaban/test/execapp/FlowRunnerPipelineTest.java
+++ b/unit/java/azkaban/test/execapp/FlowRunnerPipelineTest.java
@@ -50,573 +50,640 @@ import azkaban.utils.DirectoryFlowLoader;
* jobd=innerFlow2
* innerFlow2->innerJobA
* @author rpark
- *
*/
public class FlowRunnerPipelineTest {
- private File workingDir;
- private JobTypeManager jobtypeManager;
- private ProjectLoader fakeProjectLoader;
- private ExecutorLoader fakeExecutorLoader;
- private Logger logger = Logger.getLogger(FlowRunnerTest2.class);
- private Project project;
- private Map<String, Flow> flowMap;
- private static int id=101;
-
- public FlowRunnerPipelineTest() {
- }
-
- @Before
- public void setUp() throws Exception {
- System.out.println("Create temp dir");
- workingDir = new File("_AzkabanTestDir_" + System.currentTimeMillis());
- if (workingDir.exists()) {
- FileUtils.deleteDirectory(workingDir);
- }
- workingDir.mkdirs();
- jobtypeManager = new JobTypeManager(null, null, this.getClass().getClassLoader());
- JobTypePluginSet pluginSet = jobtypeManager.getJobTypePluginSet();
-
- pluginSet.addPluginClass("java", JavaJob.class);
- pluginSet.addPluginClass("test", InteractiveTestJob.class);
- fakeProjectLoader = new MockProjectLoader(workingDir);
- fakeExecutorLoader = new MockExecutorLoader();
- project = new Project(1, "testProject");
-
- File dir = new File("unit/executions/embedded2");
- prepareProject(dir);
-
- InteractiveTestJob.clearTestJobs();
- }
-
- @After
- public void tearDown() throws IOException {
- System.out.println("Teardown temp dir");
- if (workingDir != null) {
- FileUtils.deleteDirectory(workingDir);
- workingDir = null;
- }
- }
-
- @Test
- public void testBasicPipelineLevel1Run() throws Exception {
- EventCollectorListener eventCollector = new EventCollectorListener();
- FlowRunner previousRunner = createFlowRunner(eventCollector, "jobf", "prev");
-
- ExecutionOptions options = new ExecutionOptions();
- options.setPipelineExecutionId(previousRunner.getExecutableFlow().getExecutionId());
- options.setPipelineLevel(1);
- FlowWatcher watcher = new LocalFlowWatcher(previousRunner);
- FlowRunner pipelineRunner = createFlowRunner(eventCollector, "jobf", "pipe", options);
- pipelineRunner.setFlowWatcher(watcher);
-
- Map<String, Status> previousExpectedStateMap = new HashMap<String, Status>();
- Map<String, Status> pipelineExpectedStateMap = new HashMap<String, Status>();
- Map<String, ExecutableNode> previousNodeMap = new HashMap<String, ExecutableNode>();
- Map<String, ExecutableNode> pipelineNodeMap = new HashMap<String, ExecutableNode>();
-
- // 1. START FLOW
- ExecutableFlow pipelineFlow = pipelineRunner.getExecutableFlow();
- ExecutableFlow previousFlow = previousRunner.getExecutableFlow();
- createExpectedStateMap(previousFlow, previousExpectedStateMap, previousNodeMap);
- createExpectedStateMap(pipelineFlow, pipelineExpectedStateMap, pipelineNodeMap);
-
- Thread thread1 = runFlowRunnerInThread(previousRunner);
- pause(250);
- Thread thread2 = runFlowRunnerInThread(pipelineRunner);
- pause(500);
-
- previousExpectedStateMap.put("joba", Status.RUNNING);
- previousExpectedStateMap.put("joba1", Status.RUNNING);
- pipelineExpectedStateMap.put("joba", Status.QUEUED);
- pipelineExpectedStateMap.put("joba1", Status.QUEUED);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("prev:joba").succeedJob();
- pause(250);
- previousExpectedStateMap.put("joba", Status.SUCCEEDED);
- previousExpectedStateMap.put("jobb", Status.RUNNING);
- previousExpectedStateMap.put("jobb:innerJobA", Status.RUNNING);
- previousExpectedStateMap.put("jobd", Status.RUNNING);
- previousExpectedStateMap.put("jobc", Status.RUNNING);
- previousExpectedStateMap.put("jobd:innerJobA", Status.RUNNING);
- pipelineExpectedStateMap.put("joba", Status.RUNNING);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("prev:jobb:innerJobA").succeedJob();
- pause(250);
- previousExpectedStateMap.put("jobb:innerJobA", Status.SUCCEEDED);
- previousExpectedStateMap.put("jobb:innerJobB", Status.RUNNING);
- previousExpectedStateMap.put("jobb:innerJobC", Status.RUNNING);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("pipe:joba").succeedJob();
- pause(250);
- pipelineExpectedStateMap.put("joba", Status.SUCCEEDED);
- pipelineExpectedStateMap.put("jobb", Status.RUNNING);
- pipelineExpectedStateMap.put("jobd", Status.RUNNING);
- pipelineExpectedStateMap.put("jobc", Status.QUEUED);
- pipelineExpectedStateMap.put("jobd:innerJobA", Status.QUEUED);
- pipelineExpectedStateMap.put("jobb:innerJobA", Status.RUNNING);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("prev:jobd:innerJobA").succeedJob();
- pause(250);
- previousExpectedStateMap.put("jobd:innerJobA", Status.SUCCEEDED);
- previousExpectedStateMap.put("jobd:innerFlow2", Status.RUNNING);
- pipelineExpectedStateMap.put("jobd:innerJobA", Status.RUNNING);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- // Finish the previous d side
- InteractiveTestJob.getTestJob("prev:jobd:innerFlow2").succeedJob();
- pause(250);
- previousExpectedStateMap.put("jobd:innerFlow2", Status.SUCCEEDED);
- previousExpectedStateMap.put("jobd", Status.SUCCEEDED);
- compareStates(previousExpectedStateMap, previousNodeMap);
-
- InteractiveTestJob.getTestJob("prev:jobb:innerJobB").succeedJob();
- InteractiveTestJob.getTestJob("prev:jobb:innerJobC").succeedJob();
- InteractiveTestJob.getTestJob("prev:jobc").succeedJob();
- pause(250);
- InteractiveTestJob.getTestJob("pipe:jobb:innerJobA").succeedJob();
- pause(250);
- previousExpectedStateMap.put("jobb:innerJobB", Status.SUCCEEDED);
- previousExpectedStateMap.put("jobb:innerJobC", Status.SUCCEEDED);
- previousExpectedStateMap.put("jobb:innerFlow", Status.RUNNING);
- previousExpectedStateMap.put("jobc", Status.SUCCEEDED);
- pipelineExpectedStateMap.put("jobb:innerJobA", Status.SUCCEEDED);
- pipelineExpectedStateMap.put("jobc", Status.RUNNING);
- pipelineExpectedStateMap.put("jobb:innerJobB", Status.RUNNING);
- pipelineExpectedStateMap.put("jobb:innerJobC", Status.RUNNING);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("prev:jobb:innerFlow").succeedJob();
- InteractiveTestJob.getTestJob("pipe:jobc").succeedJob();
- pause(250);
- previousExpectedStateMap.put("jobb:innerFlow", Status.SUCCEEDED);
- previousExpectedStateMap.put("jobb", Status.SUCCEEDED);
- previousExpectedStateMap.put("jobe", Status.RUNNING);
- pipelineExpectedStateMap.put("jobc", Status.SUCCEEDED);
-
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("pipe:jobb:innerJobB").succeedJob();
- InteractiveTestJob.getTestJob("pipe:jobb:innerJobC").succeedJob();
- InteractiveTestJob.getTestJob("prev:jobe").succeedJob();
- pause(250);
- previousExpectedStateMap.put("jobe", Status.SUCCEEDED);
- pipelineExpectedStateMap.put("jobb:innerJobB", Status.SUCCEEDED);
- pipelineExpectedStateMap.put("jobb:innerJobC", Status.SUCCEEDED);
- pipelineExpectedStateMap.put("jobb:innerFlow", Status.RUNNING);
-
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("pipe:jobd:innerJobA").succeedJob();
- InteractiveTestJob.getTestJob("pipe:jobb:innerFlow").succeedJob();
- pause(250);
- pipelineExpectedStateMap.put("jobb", Status.SUCCEEDED);
- pipelineExpectedStateMap.put("jobd:innerJobA", Status.SUCCEEDED);
- pipelineExpectedStateMap.put("jobb:innerFlow", Status.SUCCEEDED);
- pipelineExpectedStateMap.put("jobd:innerFlow2", Status.RUNNING);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("pipe:jobd:innerFlow2").succeedJob();
- InteractiveTestJob.getTestJob("prev:joba1").succeedJob();
- pause(250);
- pipelineExpectedStateMap.put("jobd:innerFlow2", Status.SUCCEEDED);
- pipelineExpectedStateMap.put("jobd", Status.SUCCEEDED);
- previousExpectedStateMap.put("jobf", Status.RUNNING);
- previousExpectedStateMap.put("joba1", Status.SUCCEEDED);
- pipelineExpectedStateMap.put("joba1", Status.RUNNING);
- pipelineExpectedStateMap.put("jobe", Status.RUNNING);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
- compareStates(previousExpectedStateMap, previousNodeMap);
-
- InteractiveTestJob.getTestJob("pipe:jobe").succeedJob();
- InteractiveTestJob.getTestJob("prev:jobf").succeedJob();
- pause(250);
- pipelineExpectedStateMap.put("jobe", Status.SUCCEEDED);
- previousExpectedStateMap.put("jobf", Status.SUCCEEDED);
- Assert.assertEquals(Status.SUCCEEDED, previousFlow.getStatus());
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("pipe:joba1").succeedJob();
- pause(250);
- pipelineExpectedStateMap.put("joba1", Status.SUCCEEDED);
- pipelineExpectedStateMap.put("jobf", Status.RUNNING);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("pipe:jobf").succeedJob();
- pause(250);
- Assert.assertEquals(Status.SUCCEEDED, pipelineFlow.getStatus());
- Assert.assertFalse(thread1.isAlive());
- Assert.assertFalse(thread2.isAlive());
- }
-
- @Test
- public void testBasicPipelineLevel2Run() throws Exception {
- EventCollectorListener eventCollector = new EventCollectorListener();
- FlowRunner previousRunner = createFlowRunner(eventCollector, "pipelineFlow", "prev");
-
- ExecutionOptions options = new ExecutionOptions();
- options.setPipelineExecutionId(previousRunner.getExecutableFlow().getExecutionId());
- options.setPipelineLevel(2);
- FlowWatcher watcher = new LocalFlowWatcher(previousRunner);
- FlowRunner pipelineRunner = createFlowRunner(eventCollector, "pipelineFlow", "pipe", options);
- pipelineRunner.setFlowWatcher(watcher);
-
- Map<String, Status> previousExpectedStateMap = new HashMap<String, Status>();
- Map<String, Status> pipelineExpectedStateMap = new HashMap<String, Status>();
- Map<String, ExecutableNode> previousNodeMap = new HashMap<String, ExecutableNode>();
- Map<String, ExecutableNode> pipelineNodeMap = new HashMap<String, ExecutableNode>();
-
- // 1. START FLOW
- ExecutableFlow pipelineFlow = pipelineRunner.getExecutableFlow();
- ExecutableFlow previousFlow = previousRunner.getExecutableFlow();
- createExpectedStateMap(previousFlow, previousExpectedStateMap, previousNodeMap);
- createExpectedStateMap(pipelineFlow, pipelineExpectedStateMap, pipelineNodeMap);
-
- Thread thread1 = runFlowRunnerInThread(previousRunner);
- pause(250);
- Thread thread2 = runFlowRunnerInThread(pipelineRunner);
- pause(250);
-
- previousExpectedStateMap.put("pipeline1", Status.RUNNING);
- pipelineExpectedStateMap.put("pipeline1", Status.QUEUED);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("prev:pipeline1").succeedJob();
- pause(250);
- previousExpectedStateMap.put("pipeline1", Status.SUCCEEDED);
- previousExpectedStateMap.put("pipeline2", Status.RUNNING);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("prev:pipeline2").succeedJob();
- pause(250);
- previousExpectedStateMap.put("pipeline2", Status.SUCCEEDED);
- previousExpectedStateMap.put("pipelineEmbeddedFlow3", Status.RUNNING);
- previousExpectedStateMap.put("pipelineEmbeddedFlow3:innerJobA", Status.RUNNING);
- pipelineExpectedStateMap.put("pipeline1", Status.RUNNING);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("pipe:pipeline1").succeedJob();
- pause(250);
- pipelineExpectedStateMap.put("pipeline1", Status.SUCCEEDED);
- pipelineExpectedStateMap.put("pipeline2", Status.QUEUED);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("prev:pipelineEmbeddedFlow3:innerJobA").succeedJob();
- pause(250);
- previousExpectedStateMap.put("pipelineEmbeddedFlow3:innerJobA", Status.SUCCEEDED);
- previousExpectedStateMap.put("pipelineEmbeddedFlow3:innerJobB", Status.RUNNING);
- previousExpectedStateMap.put("pipelineEmbeddedFlow3:innerJobC", Status.RUNNING);
- pipelineExpectedStateMap.put("pipeline2", Status.RUNNING);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("pipe:pipeline2").succeedJob();
- pause(250);
- pipelineExpectedStateMap.put("pipeline2", Status.SUCCEEDED);
- pipelineExpectedStateMap.put("pipelineEmbeddedFlow3", Status.RUNNING);
- pipelineExpectedStateMap.put("pipelineEmbeddedFlow3:innerJobA", Status.QUEUED);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("prev:pipelineEmbeddedFlow3:innerJobB").succeedJob();
- pause(250);
- previousExpectedStateMap.put("pipelineEmbeddedFlow3:innerJobB", Status.SUCCEEDED);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("prev:pipelineEmbeddedFlow3:innerJobC").succeedJob();
- pause(250);
- previousExpectedStateMap.put("pipelineEmbeddedFlow3:innerFlow", Status.RUNNING);
- previousExpectedStateMap.put("pipelineEmbeddedFlow3:innerJobC", Status.SUCCEEDED);
- pipelineExpectedStateMap.put("pipelineEmbeddedFlow3:innerJobA", Status.RUNNING);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("pipe:pipelineEmbeddedFlow3:innerJobA").succeedJob();
- pause(250);
- pipelineExpectedStateMap.put("pipelineEmbeddedFlow3:innerJobA", Status.SUCCEEDED);
- pipelineExpectedStateMap.put("pipelineEmbeddedFlow3:innerJobC", Status.QUEUED);
- pipelineExpectedStateMap.put("pipelineEmbeddedFlow3:innerJobB", Status.QUEUED);
- previousExpectedStateMap.put("pipelineEmbeddedFlow3:innerFlow", Status.RUNNING);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("prev:pipelineEmbeddedFlow3:innerFlow").succeedJob();
- pause(250);
- previousExpectedStateMap.put("pipelineEmbeddedFlow3:innerFlow", Status.SUCCEEDED);
- previousExpectedStateMap.put("pipelineEmbeddedFlow3", Status.SUCCEEDED);
- previousExpectedStateMap.put("pipeline4", Status.RUNNING);
- pipelineExpectedStateMap.put("pipelineEmbeddedFlow3:innerJobC", Status.RUNNING);
- pipelineExpectedStateMap.put("pipelineEmbeddedFlow3:innerJobB", Status.RUNNING);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("pipe:pipelineEmbeddedFlow3:innerJobB").succeedJob();
- InteractiveTestJob.getTestJob("pipe:pipelineEmbeddedFlow3:innerJobC").succeedJob();
- pause(250);
- pipelineExpectedStateMap.put("pipelineEmbeddedFlow3:innerJobC", Status.SUCCEEDED);
- pipelineExpectedStateMap.put("pipelineEmbeddedFlow3:innerJobB", Status.SUCCEEDED);
- pipelineExpectedStateMap.put("pipelineEmbeddedFlow3:innerFlow", Status.QUEUED);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("prev:pipeline4").succeedJob();
- pause(250);
- previousExpectedStateMap.put("pipeline4", Status.SUCCEEDED);
- previousExpectedStateMap.put("pipelineFlow", Status.RUNNING);
- pipelineExpectedStateMap.put("pipelineEmbeddedFlow3:innerFlow", Status.RUNNING);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("prev:pipelineFlow").succeedJob();
- pause(250);
- previousExpectedStateMap.put("pipelineFlow", Status.SUCCEEDED);
- Assert.assertEquals(Status.SUCCEEDED, previousFlow.getStatus());
- Assert.assertFalse(thread1.isAlive());
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("pipe:pipelineEmbeddedFlow3:innerFlow").succeedJob();
- pause(250);
- pipelineExpectedStateMap.put("pipelineEmbeddedFlow3:innerFlow", Status.SUCCEEDED);
- pipelineExpectedStateMap.put("pipelineEmbeddedFlow3", Status.SUCCEEDED);
- pipelineExpectedStateMap.put("pipeline4", Status.RUNNING);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("pipe:pipeline4").succeedJob();
- pause(250);
- pipelineExpectedStateMap.put("pipeline4", Status.SUCCEEDED);
- pipelineExpectedStateMap.put("pipelineFlow", Status.RUNNING);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("pipe:pipelineFlow").succeedJob();
- pause(250);
- pipelineExpectedStateMap.put("pipelineFlow", Status.SUCCEEDED);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
- Assert.assertEquals(Status.SUCCEEDED, pipelineFlow.getStatus());
- Assert.assertFalse(thread2.isAlive());
- }
-
- @Test
- public void testBasicPipelineLevel2Run2() throws Exception {
- EventCollectorListener eventCollector = new EventCollectorListener();
- FlowRunner previousRunner = createFlowRunner(eventCollector, "pipeline1_2", "prev");
-
- ExecutionOptions options = new ExecutionOptions();
- options.setPipelineExecutionId(previousRunner.getExecutableFlow().getExecutionId());
- options.setPipelineLevel(2);
- FlowWatcher watcher = new LocalFlowWatcher(previousRunner);
- FlowRunner pipelineRunner = createFlowRunner(eventCollector, "pipeline1_2", "pipe", options);
- pipelineRunner.setFlowWatcher(watcher);
-
- Map<String, Status> previousExpectedStateMap = new HashMap<String, Status>();
- Map<String, Status> pipelineExpectedStateMap = new HashMap<String, Status>();
- Map<String, ExecutableNode> previousNodeMap = new HashMap<String, ExecutableNode>();
- Map<String, ExecutableNode> pipelineNodeMap = new HashMap<String, ExecutableNode>();
-
- // 1. START FLOW
- ExecutableFlow pipelineFlow = pipelineRunner.getExecutableFlow();
- ExecutableFlow previousFlow = previousRunner.getExecutableFlow();
- createExpectedStateMap(previousFlow, previousExpectedStateMap, previousNodeMap);
- createExpectedStateMap(pipelineFlow, pipelineExpectedStateMap, pipelineNodeMap);
-
- Thread thread1 = runFlowRunnerInThread(previousRunner);
- pause(250);
- Thread thread2 = runFlowRunnerInThread(pipelineRunner);
- pause(250);
-
- previousExpectedStateMap.put("pipeline1_1", Status.RUNNING);
- previousExpectedStateMap.put("pipeline1_1:innerJobA", Status.RUNNING);
- pipelineExpectedStateMap.put("pipeline1_1", Status.RUNNING);
- pipelineExpectedStateMap.put("pipeline1_1:innerJobA", Status.QUEUED);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("prev:pipeline1_1:innerJobA").succeedJob();
- pause(250);
- previousExpectedStateMap.put("pipeline1_1:innerJobA", Status.SUCCEEDED);
- previousExpectedStateMap.put("pipeline1_1:innerFlow2", Status.RUNNING);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("prev:pipeline1_1:innerFlow2").succeedJob();
- pause(250);
- previousExpectedStateMap.put("pipeline1_1", Status.SUCCEEDED);
- previousExpectedStateMap.put("pipeline1_1:innerFlow2", Status.SUCCEEDED);
- previousExpectedStateMap.put("pipeline1_2", Status.RUNNING);
- previousExpectedStateMap.put("pipeline1_2:innerJobA", Status.RUNNING);
- pipelineExpectedStateMap.put("pipeline1_1:innerJobA", Status.RUNNING);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("pipe:pipeline1_1:innerJobA").succeedJob();
- pause(250);
- pipelineExpectedStateMap.put("pipeline1_1:innerJobA", Status.SUCCEEDED);
- pipelineExpectedStateMap.put("pipeline1_1:innerFlow2", Status.QUEUED);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("prev:pipeline1_2:innerJobA").succeedJob();
- pause(250);
- previousExpectedStateMap.put("pipeline1_2:innerJobA", Status.SUCCEEDED);
- previousExpectedStateMap.put("pipeline1_2:innerFlow2", Status.RUNNING);
- pipelineExpectedStateMap.put("pipeline1_1:innerFlow2", Status.RUNNING);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("pipe:pipeline1_1:innerFlow2").succeedJob();
- pause(250);
- pipelineExpectedStateMap.put("pipeline1_1", Status.SUCCEEDED);
- pipelineExpectedStateMap.put("pipeline1_1:innerFlow2", Status.SUCCEEDED);
- pipelineExpectedStateMap.put("pipeline1_2", Status.RUNNING);
- pipelineExpectedStateMap.put("pipeline1_2:innerJobA", Status.QUEUED);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("pipe:pipeline1_1:innerFlow2").succeedJob();
- pause(250);
- pipelineExpectedStateMap.put("pipeline1_1", Status.SUCCEEDED);
- pipelineExpectedStateMap.put("pipeline1_1:innerFlow2", Status.SUCCEEDED);
- pipelineExpectedStateMap.put("pipeline1_2", Status.RUNNING);
- pipelineExpectedStateMap.put("pipeline1_2:innerJobA", Status.QUEUED);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("prev:pipeline1_2:innerFlow2").succeedJob();
- pause(250);
- previousExpectedStateMap.put("pipeline1_2:innerFlow2", Status.SUCCEEDED);
- previousExpectedStateMap.put("pipeline1_2", Status.SUCCEEDED);
- Assert.assertEquals(Status.SUCCEEDED, previousFlow.getStatus());
- Assert.assertFalse(thread1.isAlive());
- pipelineExpectedStateMap.put("pipeline1_2:innerJobA", Status.RUNNING);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("pipe:pipeline1_2:innerJobA").succeedJob();
- pause(250);
- pipelineExpectedStateMap.put("pipeline1_2:innerJobA", Status.SUCCEEDED);
- pipelineExpectedStateMap.put("pipeline1_2:innerFlow2", Status.RUNNING);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
-
- InteractiveTestJob.getTestJob("pipe:pipeline1_2:innerFlow2").succeedJob();
- pause(250);
- pipelineExpectedStateMap.put("pipeline1_2", Status.SUCCEEDED);
- pipelineExpectedStateMap.put("pipeline1_2:innerFlow2", Status.SUCCEEDED);
- compareStates(previousExpectedStateMap, previousNodeMap);
- compareStates(pipelineExpectedStateMap, pipelineNodeMap);
- Assert.assertEquals(Status.SUCCEEDED, pipelineFlow.getStatus());
- Assert.assertFalse(thread2.isAlive());
- }
-
- private Thread runFlowRunnerInThread(FlowRunner runner) {
- Thread thread = new Thread(runner);
- thread.start();
- return thread;
- }
-
- private void pause(long millisec) {
- synchronized(this) {
- try {
- wait(millisec);
- }
- catch (InterruptedException e) {
- }
- }
- }
-
- private void createExpectedStateMap(ExecutableFlowBase flow, Map<String, Status> expectedStateMap, Map<String, ExecutableNode> nodeMap) {
- for (ExecutableNode node: flow.getExecutableNodes()) {
- expectedStateMap.put(node.getNestedId(), node.getStatus());
- nodeMap.put(node.getNestedId(), node);
-
- if (node instanceof ExecutableFlowBase) {
- createExpectedStateMap((ExecutableFlowBase)node, expectedStateMap, nodeMap);
- }
- }
- }
-
- private void compareStates(Map<String, Status> expectedStateMap, Map<String, ExecutableNode> nodeMap) {
- for (String printedId: expectedStateMap.keySet()) {
- Status expectedStatus = expectedStateMap.get(printedId);
- ExecutableNode node = nodeMap.get(printedId);
- if (node == null) {
- System.out.println("id node: " + printedId + " doesn't exist.");
- }
- if (expectedStatus != node.getStatus()) {
- Assert.fail("Expected values do not match for " + printedId + ". Expected " + expectedStatus + ", instead received " + node.getStatus());
- }
- }
- }
-
- private void prepareProject(File directory) throws ProjectManagerException, IOException {
- DirectoryFlowLoader loader = new DirectoryFlowLoader(logger);
- loader.loadProjectFlow(directory);
- if (!loader.getErrors().isEmpty()) {
- for (String error: loader.getErrors()) {
- System.out.println(error);
- }
-
- throw new RuntimeException("Errors found in setup");
- }
-
- flowMap = loader.getFlowMap();
- project.setFlows(flowMap);
- FileUtils.copyDirectory(directory, workingDir);
- }
-
-// private void printCurrentState(String prefix, ExecutableFlowBase flow) {
-// for(ExecutableNode node: flow.getExecutableNodes()) {
-//
-// System.err.println(prefix + node.getNestedId() + "->" + node.getStatus().name());
-// if (node instanceof ExecutableFlowBase) {
-// printCurrentState(prefix, (ExecutableFlowBase)node);
-// }
-// }
-// }
-//
- private FlowRunner createFlowRunner(EventCollectorListener eventCollector, String flowName, String groupName) throws Exception {
- return createFlowRunner(eventCollector, flowName, groupName, new ExecutionOptions());
- }
-
- private FlowRunner createFlowRunner(EventCollectorListener eventCollector, String flowName, String groupName, ExecutionOptions options) throws Exception {
- Flow flow = flowMap.get(flowName);
-
- int exId = id++;
- ExecutableFlow exFlow = new ExecutableFlow(project, flow);
- exFlow.setExecutionPath(workingDir.getPath());
- exFlow.setExecutionId(exId);
-
- Map<String, String> flowParam = new HashMap<String, String>();
- flowParam.put("group", groupName);
- options.addAllFlowParameters(flowParam);
- exFlow.setExecutionOptions(options);
- fakeExecutorLoader.uploadExecutableFlow(exFlow);
-
- FlowRunner runner = new FlowRunner(fakeExecutorLoader.fetchExecutableFlow(exId), fakeExecutorLoader, fakeProjectLoader, jobtypeManager);
-
- runner.addListener(eventCollector);
-
- return runner;
- }
+ private File workingDir;
+ private JobTypeManager jobtypeManager;
+ private ProjectLoader fakeProjectLoader;
+ private ExecutorLoader fakeExecutorLoader;
+ private Logger logger = Logger.getLogger(FlowRunnerTest2.class);
+ private Project project;
+ private Map<String, Flow> flowMap;
+ private static int id = 101;
+
+ public FlowRunnerPipelineTest() {
+ }
+
+ @Before
+ public void setUp() throws Exception {
+ System.out.println("Create temp dir");
+ workingDir = new File("_AzkabanTestDir_" + System.currentTimeMillis());
+ if (workingDir.exists()) {
+ FileUtils.deleteDirectory(workingDir);
+ }
+ workingDir.mkdirs();
+ jobtypeManager =
+ new JobTypeManager(null, null, this.getClass().getClassLoader());
+ JobTypePluginSet pluginSet = jobtypeManager.getJobTypePluginSet();
+
+ pluginSet.addPluginClass("java", JavaJob.class);
+ pluginSet.addPluginClass("test", InteractiveTestJob.class);
+ fakeProjectLoader = new MockProjectLoader(workingDir);
+ fakeExecutorLoader = new MockExecutorLoader();
+ project = new Project(1, "testProject");
+
+ File dir = new File("unit/executions/embedded2");
+ prepareProject(dir);
+
+ InteractiveTestJob.clearTestJobs();
+ }
+
+ @After
+ public void tearDown() throws IOException {
+ System.out.println("Teardown temp dir");
+ if (workingDir != null) {
+ FileUtils.deleteDirectory(workingDir);
+ workingDir = null;
+ }
+ }
+
+ @Test
+ public void testBasicPipelineLevel1Run() throws Exception {
+ EventCollectorListener eventCollector = new EventCollectorListener();
+ FlowRunner previousRunner =
+ createFlowRunner(eventCollector, "jobf", "prev");
+
+ ExecutionOptions options = new ExecutionOptions();
+ options.setPipelineExecutionId(previousRunner.getExecutableFlow()
+ .getExecutionId());
+ options.setPipelineLevel(1);
+ FlowWatcher watcher = new LocalFlowWatcher(previousRunner);
+ FlowRunner pipelineRunner =
+ createFlowRunner(eventCollector, "jobf", "pipe", options);
+ pipelineRunner.setFlowWatcher(watcher);
+
+ Map<String, Status> previousExpectedStateMap =
+ new HashMap<String, Status>();
+ Map<String, Status> pipelineExpectedStateMap =
+ new HashMap<String, Status>();
+ Map<String, ExecutableNode> previousNodeMap =
+ new HashMap<String, ExecutableNode>();
+ Map<String, ExecutableNode> pipelineNodeMap =
+ new HashMap<String, ExecutableNode>();
+
+ // 1. START FLOW
+ ExecutableFlow pipelineFlow = pipelineRunner.getExecutableFlow();
+ ExecutableFlow previousFlow = previousRunner.getExecutableFlow();
+ createExpectedStateMap(previousFlow, previousExpectedStateMap,
+ previousNodeMap);
+ createExpectedStateMap(pipelineFlow, pipelineExpectedStateMap,
+ pipelineNodeMap);
+
+ Thread thread1 = runFlowRunnerInThread(previousRunner);
+ pause(250);
+ Thread thread2 = runFlowRunnerInThread(pipelineRunner);
+ pause(500);
+
+ previousExpectedStateMap.put("joba", Status.RUNNING);
+ previousExpectedStateMap.put("joba1", Status.RUNNING);
+ pipelineExpectedStateMap.put("joba", Status.QUEUED);
+ pipelineExpectedStateMap.put("joba1", Status.QUEUED);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("prev:joba").succeedJob();
+ pause(250);
+ previousExpectedStateMap.put("joba", Status.SUCCEEDED);
+ previousExpectedStateMap.put("jobb", Status.RUNNING);
+ previousExpectedStateMap.put("jobb:innerJobA", Status.RUNNING);
+ previousExpectedStateMap.put("jobd", Status.RUNNING);
+ previousExpectedStateMap.put("jobc", Status.RUNNING);
+ previousExpectedStateMap.put("jobd:innerJobA", Status.RUNNING);
+ pipelineExpectedStateMap.put("joba", Status.RUNNING);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("prev:jobb:innerJobA").succeedJob();
+ pause(250);
+ previousExpectedStateMap.put("jobb:innerJobA", Status.SUCCEEDED);
+ previousExpectedStateMap.put("jobb:innerJobB", Status.RUNNING);
+ previousExpectedStateMap.put("jobb:innerJobC", Status.RUNNING);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("pipe:joba").succeedJob();
+ pause(250);
+ pipelineExpectedStateMap.put("joba", Status.SUCCEEDED);
+ pipelineExpectedStateMap.put("jobb", Status.RUNNING);
+ pipelineExpectedStateMap.put("jobd", Status.RUNNING);
+ pipelineExpectedStateMap.put("jobc", Status.QUEUED);
+ pipelineExpectedStateMap.put("jobd:innerJobA", Status.QUEUED);
+ pipelineExpectedStateMap.put("jobb:innerJobA", Status.RUNNING);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("prev:jobd:innerJobA").succeedJob();
+ pause(250);
+ previousExpectedStateMap.put("jobd:innerJobA", Status.SUCCEEDED);
+ previousExpectedStateMap.put("jobd:innerFlow2", Status.RUNNING);
+ pipelineExpectedStateMap.put("jobd:innerJobA", Status.RUNNING);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ // Finish the previous d side
+ InteractiveTestJob.getTestJob("prev:jobd:innerFlow2").succeedJob();
+ pause(250);
+ previousExpectedStateMap.put("jobd:innerFlow2", Status.SUCCEEDED);
+ previousExpectedStateMap.put("jobd", Status.SUCCEEDED);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+
+ InteractiveTestJob.getTestJob("prev:jobb:innerJobB").succeedJob();
+ InteractiveTestJob.getTestJob("prev:jobb:innerJobC").succeedJob();
+ InteractiveTestJob.getTestJob("prev:jobc").succeedJob();
+ pause(250);
+ InteractiveTestJob.getTestJob("pipe:jobb:innerJobA").succeedJob();
+ pause(250);
+ previousExpectedStateMap.put("jobb:innerJobB", Status.SUCCEEDED);
+ previousExpectedStateMap.put("jobb:innerJobC", Status.SUCCEEDED);
+ previousExpectedStateMap.put("jobb:innerFlow", Status.RUNNING);
+ previousExpectedStateMap.put("jobc", Status.SUCCEEDED);
+ pipelineExpectedStateMap.put("jobb:innerJobA", Status.SUCCEEDED);
+ pipelineExpectedStateMap.put("jobc", Status.RUNNING);
+ pipelineExpectedStateMap.put("jobb:innerJobB", Status.RUNNING);
+ pipelineExpectedStateMap.put("jobb:innerJobC", Status.RUNNING);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("prev:jobb:innerFlow").succeedJob();
+ InteractiveTestJob.getTestJob("pipe:jobc").succeedJob();
+ pause(250);
+ previousExpectedStateMap.put("jobb:innerFlow", Status.SUCCEEDED);
+ previousExpectedStateMap.put("jobb", Status.SUCCEEDED);
+ previousExpectedStateMap.put("jobe", Status.RUNNING);
+ pipelineExpectedStateMap.put("jobc", Status.SUCCEEDED);
+
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("pipe:jobb:innerJobB").succeedJob();
+ InteractiveTestJob.getTestJob("pipe:jobb:innerJobC").succeedJob();
+ InteractiveTestJob.getTestJob("prev:jobe").succeedJob();
+ pause(250);
+ previousExpectedStateMap.put("jobe", Status.SUCCEEDED);
+ pipelineExpectedStateMap.put("jobb:innerJobB", Status.SUCCEEDED);
+ pipelineExpectedStateMap.put("jobb:innerJobC", Status.SUCCEEDED);
+ pipelineExpectedStateMap.put("jobb:innerFlow", Status.RUNNING);
+
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("pipe:jobd:innerJobA").succeedJob();
+ InteractiveTestJob.getTestJob("pipe:jobb:innerFlow").succeedJob();
+ pause(250);
+ pipelineExpectedStateMap.put("jobb", Status.SUCCEEDED);
+ pipelineExpectedStateMap.put("jobd:innerJobA", Status.SUCCEEDED);
+ pipelineExpectedStateMap.put("jobb:innerFlow", Status.SUCCEEDED);
+ pipelineExpectedStateMap.put("jobd:innerFlow2", Status.RUNNING);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("pipe:jobd:innerFlow2").succeedJob();
+ InteractiveTestJob.getTestJob("prev:joba1").succeedJob();
+ pause(250);
+ pipelineExpectedStateMap.put("jobd:innerFlow2", Status.SUCCEEDED);
+ pipelineExpectedStateMap.put("jobd", Status.SUCCEEDED);
+ previousExpectedStateMap.put("jobf", Status.RUNNING);
+ previousExpectedStateMap.put("joba1", Status.SUCCEEDED);
+ pipelineExpectedStateMap.put("joba1", Status.RUNNING);
+ pipelineExpectedStateMap.put("jobe", Status.RUNNING);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+
+ InteractiveTestJob.getTestJob("pipe:jobe").succeedJob();
+ InteractiveTestJob.getTestJob("prev:jobf").succeedJob();
+ pause(250);
+ pipelineExpectedStateMap.put("jobe", Status.SUCCEEDED);
+ previousExpectedStateMap.put("jobf", Status.SUCCEEDED);
+ Assert.assertEquals(Status.SUCCEEDED, previousFlow.getStatus());
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("pipe:joba1").succeedJob();
+ pause(250);
+ pipelineExpectedStateMap.put("joba1", Status.SUCCEEDED);
+ pipelineExpectedStateMap.put("jobf", Status.RUNNING);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("pipe:jobf").succeedJob();
+ pause(250);
+ Assert.assertEquals(Status.SUCCEEDED, pipelineFlow.getStatus());
+ Assert.assertFalse(thread1.isAlive());
+ Assert.assertFalse(thread2.isAlive());
+ }
+
+ @Test
+ public void testBasicPipelineLevel2Run() throws Exception {
+ EventCollectorListener eventCollector = new EventCollectorListener();
+ FlowRunner previousRunner =
+ createFlowRunner(eventCollector, "pipelineFlow", "prev");
+
+ ExecutionOptions options = new ExecutionOptions();
+ options.setPipelineExecutionId(previousRunner.getExecutableFlow()
+ .getExecutionId());
+ options.setPipelineLevel(2);
+ FlowWatcher watcher = new LocalFlowWatcher(previousRunner);
+ FlowRunner pipelineRunner =
+ createFlowRunner(eventCollector, "pipelineFlow", "pipe", options);
+ pipelineRunner.setFlowWatcher(watcher);
+
+ Map<String, Status> previousExpectedStateMap =
+ new HashMap<String, Status>();
+ Map<String, Status> pipelineExpectedStateMap =
+ new HashMap<String, Status>();
+ Map<String, ExecutableNode> previousNodeMap =
+ new HashMap<String, ExecutableNode>();
+ Map<String, ExecutableNode> pipelineNodeMap =
+ new HashMap<String, ExecutableNode>();
+
+ // 1. START FLOW
+ ExecutableFlow pipelineFlow = pipelineRunner.getExecutableFlow();
+ ExecutableFlow previousFlow = previousRunner.getExecutableFlow();
+ createExpectedStateMap(previousFlow, previousExpectedStateMap,
+ previousNodeMap);
+ createExpectedStateMap(pipelineFlow, pipelineExpectedStateMap,
+ pipelineNodeMap);
+
+ Thread thread1 = runFlowRunnerInThread(previousRunner);
+ pause(250);
+ Thread thread2 = runFlowRunnerInThread(pipelineRunner);
+ pause(250);
+
+ previousExpectedStateMap.put("pipeline1", Status.RUNNING);
+ pipelineExpectedStateMap.put("pipeline1", Status.QUEUED);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("prev:pipeline1").succeedJob();
+ pause(250);
+ previousExpectedStateMap.put("pipeline1", Status.SUCCEEDED);
+ previousExpectedStateMap.put("pipeline2", Status.RUNNING);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("prev:pipeline2").succeedJob();
+ pause(250);
+ previousExpectedStateMap.put("pipeline2", Status.SUCCEEDED);
+ previousExpectedStateMap.put("pipelineEmbeddedFlow3", Status.RUNNING);
+ previousExpectedStateMap.put("pipelineEmbeddedFlow3:innerJobA",
+ Status.RUNNING);
+ pipelineExpectedStateMap.put("pipeline1", Status.RUNNING);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("pipe:pipeline1").succeedJob();
+ pause(250);
+ pipelineExpectedStateMap.put("pipeline1", Status.SUCCEEDED);
+ pipelineExpectedStateMap.put("pipeline2", Status.QUEUED);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("prev:pipelineEmbeddedFlow3:innerJobA")
+ .succeedJob();
+ pause(250);
+ previousExpectedStateMap.put("pipelineEmbeddedFlow3:innerJobA",
+ Status.SUCCEEDED);
+ previousExpectedStateMap.put("pipelineEmbeddedFlow3:innerJobB",
+ Status.RUNNING);
+ previousExpectedStateMap.put("pipelineEmbeddedFlow3:innerJobC",
+ Status.RUNNING);
+ pipelineExpectedStateMap.put("pipeline2", Status.RUNNING);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("pipe:pipeline2").succeedJob();
+ pause(250);
+ pipelineExpectedStateMap.put("pipeline2", Status.SUCCEEDED);
+ pipelineExpectedStateMap.put("pipelineEmbeddedFlow3", Status.RUNNING);
+ pipelineExpectedStateMap.put("pipelineEmbeddedFlow3:innerJobA",
+ Status.QUEUED);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("prev:pipelineEmbeddedFlow3:innerJobB")
+ .succeedJob();
+ pause(250);
+ previousExpectedStateMap.put("pipelineEmbeddedFlow3:innerJobB",
+ Status.SUCCEEDED);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("prev:pipelineEmbeddedFlow3:innerJobC")
+ .succeedJob();
+ pause(250);
+ previousExpectedStateMap.put("pipelineEmbeddedFlow3:innerFlow",
+ Status.RUNNING);
+ previousExpectedStateMap.put("pipelineEmbeddedFlow3:innerJobC",
+ Status.SUCCEEDED);
+ pipelineExpectedStateMap.put("pipelineEmbeddedFlow3:innerJobA",
+ Status.RUNNING);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("pipe:pipelineEmbeddedFlow3:innerJobA")
+ .succeedJob();
+ pause(250);
+ pipelineExpectedStateMap.put("pipelineEmbeddedFlow3:innerJobA",
+ Status.SUCCEEDED);
+ pipelineExpectedStateMap.put("pipelineEmbeddedFlow3:innerJobC",
+ Status.QUEUED);
+ pipelineExpectedStateMap.put("pipelineEmbeddedFlow3:innerJobB",
+ Status.QUEUED);
+ previousExpectedStateMap.put("pipelineEmbeddedFlow3:innerFlow",
+ Status.RUNNING);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("prev:pipelineEmbeddedFlow3:innerFlow")
+ .succeedJob();
+ pause(250);
+ previousExpectedStateMap.put("pipelineEmbeddedFlow3:innerFlow",
+ Status.SUCCEEDED);
+ previousExpectedStateMap.put("pipelineEmbeddedFlow3", Status.SUCCEEDED);
+ previousExpectedStateMap.put("pipeline4", Status.RUNNING);
+ pipelineExpectedStateMap.put("pipelineEmbeddedFlow3:innerJobC",
+ Status.RUNNING);
+ pipelineExpectedStateMap.put("pipelineEmbeddedFlow3:innerJobB",
+ Status.RUNNING);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("pipe:pipelineEmbeddedFlow3:innerJobB")
+ .succeedJob();
+ InteractiveTestJob.getTestJob("pipe:pipelineEmbeddedFlow3:innerJobC")
+ .succeedJob();
+ pause(250);
+ pipelineExpectedStateMap.put("pipelineEmbeddedFlow3:innerJobC",
+ Status.SUCCEEDED);
+ pipelineExpectedStateMap.put("pipelineEmbeddedFlow3:innerJobB",
+ Status.SUCCEEDED);
+ pipelineExpectedStateMap.put("pipelineEmbeddedFlow3:innerFlow",
+ Status.QUEUED);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("prev:pipeline4").succeedJob();
+ pause(250);
+ previousExpectedStateMap.put("pipeline4", Status.SUCCEEDED);
+ previousExpectedStateMap.put("pipelineFlow", Status.RUNNING);
+ pipelineExpectedStateMap.put("pipelineEmbeddedFlow3:innerFlow",
+ Status.RUNNING);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("prev:pipelineFlow").succeedJob();
+ pause(250);
+ previousExpectedStateMap.put("pipelineFlow", Status.SUCCEEDED);
+ Assert.assertEquals(Status.SUCCEEDED, previousFlow.getStatus());
+ Assert.assertFalse(thread1.isAlive());
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("pipe:pipelineEmbeddedFlow3:innerFlow")
+ .succeedJob();
+ pause(250);
+ pipelineExpectedStateMap.put("pipelineEmbeddedFlow3:innerFlow",
+ Status.SUCCEEDED);
+ pipelineExpectedStateMap.put("pipelineEmbeddedFlow3", Status.SUCCEEDED);
+ pipelineExpectedStateMap.put("pipeline4", Status.RUNNING);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("pipe:pipeline4").succeedJob();
+ pause(250);
+ pipelineExpectedStateMap.put("pipeline4", Status.SUCCEEDED);
+ pipelineExpectedStateMap.put("pipelineFlow", Status.RUNNING);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("pipe:pipelineFlow").succeedJob();
+ pause(250);
+ pipelineExpectedStateMap.put("pipelineFlow", Status.SUCCEEDED);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+ Assert.assertEquals(Status.SUCCEEDED, pipelineFlow.getStatus());
+ Assert.assertFalse(thread2.isAlive());
+ }
+
+ @Test
+ public void testBasicPipelineLevel2Run2() throws Exception {
+ EventCollectorListener eventCollector = new EventCollectorListener();
+ FlowRunner previousRunner =
+ createFlowRunner(eventCollector, "pipeline1_2", "prev");
+
+ ExecutionOptions options = new ExecutionOptions();
+ options.setPipelineExecutionId(previousRunner.getExecutableFlow()
+ .getExecutionId());
+ options.setPipelineLevel(2);
+ FlowWatcher watcher = new LocalFlowWatcher(previousRunner);
+ FlowRunner pipelineRunner =
+ createFlowRunner(eventCollector, "pipeline1_2", "pipe", options);
+ pipelineRunner.setFlowWatcher(watcher);
+
+ Map<String, Status> previousExpectedStateMap =
+ new HashMap<String, Status>();
+ Map<String, Status> pipelineExpectedStateMap =
+ new HashMap<String, Status>();
+ Map<String, ExecutableNode> previousNodeMap =
+ new HashMap<String, ExecutableNode>();
+ Map<String, ExecutableNode> pipelineNodeMap =
+ new HashMap<String, ExecutableNode>();
+
+ // 1. START FLOW
+ ExecutableFlow pipelineFlow = pipelineRunner.getExecutableFlow();
+ ExecutableFlow previousFlow = previousRunner.getExecutableFlow();
+ createExpectedStateMap(previousFlow, previousExpectedStateMap,
+ previousNodeMap);
+ createExpectedStateMap(pipelineFlow, pipelineExpectedStateMap,
+ pipelineNodeMap);
+
+ Thread thread1 = runFlowRunnerInThread(previousRunner);
+ pause(250);
+ Thread thread2 = runFlowRunnerInThread(pipelineRunner);
+ pause(250);
+
+ previousExpectedStateMap.put("pipeline1_1", Status.RUNNING);
+ previousExpectedStateMap.put("pipeline1_1:innerJobA", Status.RUNNING);
+ pipelineExpectedStateMap.put("pipeline1_1", Status.RUNNING);
+ pipelineExpectedStateMap.put("pipeline1_1:innerJobA", Status.QUEUED);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("prev:pipeline1_1:innerJobA").succeedJob();
+ pause(250);
+ previousExpectedStateMap.put("pipeline1_1:innerJobA", Status.SUCCEEDED);
+ previousExpectedStateMap.put("pipeline1_1:innerFlow2", Status.RUNNING);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("prev:pipeline1_1:innerFlow2").succeedJob();
+ pause(250);
+ previousExpectedStateMap.put("pipeline1_1", Status.SUCCEEDED);
+ previousExpectedStateMap.put("pipeline1_1:innerFlow2", Status.SUCCEEDED);
+ previousExpectedStateMap.put("pipeline1_2", Status.RUNNING);
+ previousExpectedStateMap.put("pipeline1_2:innerJobA", Status.RUNNING);
+ pipelineExpectedStateMap.put("pipeline1_1:innerJobA", Status.RUNNING);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("pipe:pipeline1_1:innerJobA").succeedJob();
+ pause(250);
+ pipelineExpectedStateMap.put("pipeline1_1:innerJobA", Status.SUCCEEDED);
+ pipelineExpectedStateMap.put("pipeline1_1:innerFlow2", Status.QUEUED);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("prev:pipeline1_2:innerJobA").succeedJob();
+ pause(250);
+ previousExpectedStateMap.put("pipeline1_2:innerJobA", Status.SUCCEEDED);
+ previousExpectedStateMap.put("pipeline1_2:innerFlow2", Status.RUNNING);
+ pipelineExpectedStateMap.put("pipeline1_1:innerFlow2", Status.RUNNING);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("pipe:pipeline1_1:innerFlow2").succeedJob();
+ pause(250);
+ pipelineExpectedStateMap.put("pipeline1_1", Status.SUCCEEDED);
+ pipelineExpectedStateMap.put("pipeline1_1:innerFlow2", Status.SUCCEEDED);
+ pipelineExpectedStateMap.put("pipeline1_2", Status.RUNNING);
+ pipelineExpectedStateMap.put("pipeline1_2:innerJobA", Status.QUEUED);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("pipe:pipeline1_1:innerFlow2").succeedJob();
+ pause(250);
+ pipelineExpectedStateMap.put("pipeline1_1", Status.SUCCEEDED);
+ pipelineExpectedStateMap.put("pipeline1_1:innerFlow2", Status.SUCCEEDED);
+ pipelineExpectedStateMap.put("pipeline1_2", Status.RUNNING);
+ pipelineExpectedStateMap.put("pipeline1_2:innerJobA", Status.QUEUED);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("prev:pipeline1_2:innerFlow2").succeedJob();
+ pause(250);
+ previousExpectedStateMap.put("pipeline1_2:innerFlow2", Status.SUCCEEDED);
+ previousExpectedStateMap.put("pipeline1_2", Status.SUCCEEDED);
+ Assert.assertEquals(Status.SUCCEEDED, previousFlow.getStatus());
+ Assert.assertFalse(thread1.isAlive());
+ pipelineExpectedStateMap.put("pipeline1_2:innerJobA", Status.RUNNING);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("pipe:pipeline1_2:innerJobA").succeedJob();
+ pause(250);
+ pipelineExpectedStateMap.put("pipeline1_2:innerJobA", Status.SUCCEEDED);
+ pipelineExpectedStateMap.put("pipeline1_2:innerFlow2", Status.RUNNING);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+
+ InteractiveTestJob.getTestJob("pipe:pipeline1_2:innerFlow2").succeedJob();
+ pause(250);
+ pipelineExpectedStateMap.put("pipeline1_2", Status.SUCCEEDED);
+ pipelineExpectedStateMap.put("pipeline1_2:innerFlow2", Status.SUCCEEDED);
+ compareStates(previousExpectedStateMap, previousNodeMap);
+ compareStates(pipelineExpectedStateMap, pipelineNodeMap);
+ Assert.assertEquals(Status.SUCCEEDED, pipelineFlow.getStatus());
+ Assert.assertFalse(thread2.isAlive());
+ }
+
+ private Thread runFlowRunnerInThread(FlowRunner runner) {
+ Thread thread = new Thread(runner);
+ thread.start();
+ return thread;
+ }
+
+ private void pause(long millisec) {
+ synchronized (this) {
+ try {
+ wait(millisec);
+ } catch (InterruptedException e) {
+ }
+ }
+ }
+
+ private void createExpectedStateMap(ExecutableFlowBase flow,
+ Map<String, Status> expectedStateMap, Map<String, ExecutableNode> nodeMap) {
+ for (ExecutableNode node : flow.getExecutableNodes()) {
+ expectedStateMap.put(node.getNestedId(), node.getStatus());
+ nodeMap.put(node.getNestedId(), node);
+
+ if (node instanceof ExecutableFlowBase) {
+ createExpectedStateMap((ExecutableFlowBase) node, expectedStateMap,
+ nodeMap);
+ }
+ }
+ }
+
+ private void compareStates(Map<String, Status> expectedStateMap,
+ Map<String, ExecutableNode> nodeMap) {
+ for (String printedId : expectedStateMap.keySet()) {
+ Status expectedStatus = expectedStateMap.get(printedId);
+ ExecutableNode node = nodeMap.get(printedId);
+ if (node == null) {
+ System.out.println("id node: " + printedId + " doesn't exist.");
+ }
+ if (expectedStatus != node.getStatus()) {
+ Assert.fail("Expected values do not match for " + printedId
+ + ". Expected " + expectedStatus + ", instead received "
+ + node.getStatus());
+ }
+ }
+ }
+
+ private void prepareProject(File directory) throws ProjectManagerException,
+ IOException {
+ DirectoryFlowLoader loader = new DirectoryFlowLoader(logger);
+ loader.loadProjectFlow(directory);
+ if (!loader.getErrors().isEmpty()) {
+ for (String error : loader.getErrors()) {
+ System.out.println(error);
+ }
+
+ throw new RuntimeException("Errors found in setup");
+ }
+
+ flowMap = loader.getFlowMap();
+ project.setFlows(flowMap);
+ FileUtils.copyDirectory(directory, workingDir);
+ }
+
+ // private void printCurrentState(String prefix, ExecutableFlowBase flow) {
+ // for (ExecutableNode node: flow.getExecutableNodes()) {
+ // System.err.println(prefix + node.getNestedId() + "->" +
+ // node.getStatus().name());
+ // if (node instanceof ExecutableFlowBase) {
+ // printCurrentState(prefix, (ExecutableFlowBase)node);
+ // }
+ // }
+ // }
+ //
+ private FlowRunner createFlowRunner(EventCollectorListener eventCollector,
+ String flowName, String groupName) throws Exception {
+ return createFlowRunner(eventCollector, flowName, groupName,
+ new ExecutionOptions());
+ }
+
+ private FlowRunner createFlowRunner(EventCollectorListener eventCollector,
+ String flowName, String groupName, ExecutionOptions options)
+ throws Exception {
+ Flow flow = flowMap.get(flowName);
+
+ int exId = id++;
+ ExecutableFlow exFlow = new ExecutableFlow(project, flow);
+ exFlow.setExecutionPath(workingDir.getPath());
+ exFlow.setExecutionId(exId);
+
+ Map<String, String> flowParam = new HashMap<String, String>();
+ flowParam.put("group", groupName);
+ options.addAllFlowParameters(flowParam);
+ exFlow.setExecutionOptions(options);
+ fakeExecutorLoader.uploadExecutableFlow(exFlow);
+
+ FlowRunner runner =
+ new FlowRunner(fakeExecutorLoader.fetchExecutableFlow(exId),
+ fakeExecutorLoader, fakeProjectLoader, jobtypeManager);
+
+ runner.addListener(eventCollector);
+
+ return runner;
+ }
}
diff --git a/unit/java/azkaban/test/execapp/FlowRunnerPropertyResolutionTest.java b/unit/java/azkaban/test/execapp/FlowRunnerPropertyResolutionTest.java
index 04adf21..7ef430d 100644
--- a/unit/java/azkaban/test/execapp/FlowRunnerPropertyResolutionTest.java
+++ b/unit/java/azkaban/test/execapp/FlowRunnerPropertyResolutionTest.java
@@ -63,190 +63,203 @@ import azkaban.utils.Props;
* job2 and 4 are in nested directories so should have different shared properties than other jobs.
*/
public class FlowRunnerPropertyResolutionTest {
- private File workingDir;
- private JobTypeManager jobtypeManager;
- private ProjectLoader fakeProjectLoader;
- private ExecutorLoader fakeExecutorLoader;
- private Logger logger = Logger.getLogger(FlowRunnerTest2.class);
- private Project project;
- private Map<String, Flow> flowMap;
- private static int id=101;
-
- @Before
- public void setUp() throws Exception {
- System.out.println("Create temp dir");
- workingDir = new File("_AzkabanTestDir_" + System.currentTimeMillis());
- if (workingDir.exists()) {
- FileUtils.deleteDirectory(workingDir);
- }
- workingDir.mkdirs();
- jobtypeManager = new JobTypeManager(null, null, this.getClass().getClassLoader());
- jobtypeManager.getJobTypePluginSet().addPluginClass("java", JavaJob.class);
- jobtypeManager.getJobTypePluginSet().addPluginClass("test", InteractiveTestJob.class);
- fakeProjectLoader = new MockProjectLoader(workingDir);
- fakeExecutorLoader = new MockExecutorLoader();
- project = new Project(1, "testProject");
-
- File dir = new File("unit/executions/execpropstest");
- prepareProject(dir);
-
- InteractiveTestJob.clearTestJobs();
- }
-
- @After
- public void tearDown() throws IOException {
- System.out.println("Teardown temp dir");
- if (workingDir != null) {
- FileUtils.deleteDirectory(workingDir);
- workingDir = null;
- }
- }
-
- /**
- * Tests the basic flow resolution. Flow is defined in execpropstest
- * @throws Exception
- */
- @Test
- public void testPropertyResolution() throws Exception {
- HashMap<String, String> flowProps = new HashMap<String,String>();
- flowProps.put("props7", "flow7");
- flowProps.put("props6", "flow6");
- flowProps.put("props5", "flow5");
- FlowRunner runner = createFlowRunner("job3", flowProps);
- Map<String, ExecutableNode> nodeMap = new HashMap<String, ExecutableNode>();
- createNodeMap(runner.getExecutableFlow(), nodeMap);
-
- // 1. Start flow. Job 2 should start
- Thread thread = runFlowRunnerInThread(runner);
- pause(250);
-
- // Job 2 is a normal job.
- // Only the flow overrides and the shared properties matter
- ExecutableNode node = nodeMap.get("job2");
- Props job2Props = node.getInputProps();
- Assert.assertEquals("shared1", job2Props.get("props1"));
- Assert.assertEquals("job2", job2Props.get("props2"));
- Assert.assertEquals("moo3", job2Props.get("props3"));
- Assert.assertEquals("job7", job2Props.get("props7"));
- Assert.assertEquals("flow5", job2Props.get("props5"));
- Assert.assertEquals("flow6", job2Props.get("props6"));
- Assert.assertEquals("shared4", job2Props.get("props4"));
- Assert.assertEquals("shared8", job2Props.get("props8"));
-
- // Job 1 is inside another flow, and is nested in a different directory
- // The priority order should be: job1->innerflow->job2.output->flow.overrides->job1 shared props
- Props job2Generated = new Props();
- job2Generated.put("props6","gjob6");
- job2Generated.put("props9","gjob9");
- job2Generated.put("props10","gjob10");
- InteractiveTestJob.getTestJob("job2").succeedJob(job2Generated);
- pause(250);
- node = nodeMap.get("innerflow:job1");
- Props job1Props = node.getInputProps();
- Assert.assertEquals("job1", job1Props.get("props1"));
- Assert.assertEquals("job2", job1Props.get("props2"));
- Assert.assertEquals("job8", job1Props.get("props8"));
- Assert.assertEquals("gjob9", job1Props.get("props9"));
- Assert.assertEquals("gjob10", job1Props.get("props10"));
- Assert.assertEquals("innerflow6", job1Props.get("props6"));
- Assert.assertEquals("innerflow5", job1Props.get("props5"));
- Assert.assertEquals("flow7", job1Props.get("props7"));
- Assert.assertEquals("moo3", job1Props.get("props3"));
- Assert.assertEquals("moo4", job1Props.get("props4"));
-
- // Job 4 is inside another flow and takes output from job 1
- // The priority order should be: job4->job1.output->innerflow->job2.output->flow.overrides->job4 shared props
- Props job1GeneratedProps = new Props();
- job1GeneratedProps.put("props9", "g2job9");
- job1GeneratedProps.put("props7", "g2job7");
- InteractiveTestJob.getTestJob("innerflow:job1").succeedJob(job1GeneratedProps);
- pause(250);
- node = nodeMap.get("innerflow:job4");
- Props job4Props = node.getInputProps();
- Assert.assertEquals("job8", job4Props.get("props8"));
- Assert.assertEquals("job9", job4Props.get("props9"));
- Assert.assertEquals("g2job7", job4Props.get("props7"));
- Assert.assertEquals("innerflow5", job4Props.get("props5"));
- Assert.assertEquals("innerflow6", job4Props.get("props6"));
- Assert.assertEquals("gjob10", job4Props.get("props10"));
- Assert.assertEquals("shared4", job4Props.get("props4"));
- Assert.assertEquals("shared1", job4Props.get("props1"));
- Assert.assertEquals("shared2", job4Props.get("props2"));
- Assert.assertEquals("moo3", job4Props.get("props3"));
-
- // Job 3 is a normal job taking props from an embedded flow
- // The priority order should be: job3->innerflow.output->flow.overrides->job3.sharedprops
- Props job4GeneratedProps = new Props();
- job4GeneratedProps.put("props9", "g4job9");
- job4GeneratedProps.put("props6", "g4job6");
- InteractiveTestJob.getTestJob("innerflow:job4").succeedJob(job4GeneratedProps);
- pause(250);
- node = nodeMap.get("job3");
- Props job3Props = node.getInputProps();
- Assert.assertEquals("job3", job3Props.get("props3"));
- Assert.assertEquals("g4job6", job3Props.get("props6"));
- Assert.assertEquals("g4job9", job3Props.get("props9"));
- Assert.assertEquals("flow7", job3Props.get("props7"));
- Assert.assertEquals("flow5", job3Props.get("props5"));
- Assert.assertEquals("shared1", job3Props.get("props1"));
- Assert.assertEquals("shared2", job3Props.get("props2"));
- Assert.assertEquals("moo4", job3Props.get("props4"));
- }
-
- private void prepareProject(File directory) throws ProjectManagerException, IOException {
- DirectoryFlowLoader loader = new DirectoryFlowLoader(logger);
- loader.loadProjectFlow(directory);
- if (!loader.getErrors().isEmpty()) {
- for (String error: loader.getErrors()) {
- System.out.println(error);
- }
-
- throw new RuntimeException("Errors found in setup");
- }
-
- flowMap = loader.getFlowMap();
- project.setFlows(flowMap);
- FileUtils.copyDirectory(directory, workingDir);
- }
-
- private FlowRunner createFlowRunner(String flowName, HashMap<String, String> flowParams) throws Exception {
- Flow flow = flowMap.get(flowName);
-
- int exId = id++;
- ExecutableFlow exFlow = new ExecutableFlow(project, flow);
- exFlow.setExecutionPath(workingDir.getPath());
- exFlow.setExecutionId(exId);
-
- exFlow.getExecutionOptions().addAllFlowParameters(flowParams);
- fakeExecutorLoader.uploadExecutableFlow(exFlow);
-
- FlowRunner runner = new FlowRunner(fakeExecutorLoader.fetchExecutableFlow(exId), fakeExecutorLoader, fakeProjectLoader, jobtypeManager);
- return runner;
- }
-
- private void createNodeMap(ExecutableFlowBase flow, Map<String, ExecutableNode> nodeMap) {
- for (ExecutableNode node: flow.getExecutableNodes()) {
- nodeMap.put(node.getNestedId(), node);
-
- if (node instanceof ExecutableFlowBase) {
- createNodeMap((ExecutableFlowBase)node, nodeMap);
- }
- }
- }
-
- private Thread runFlowRunnerInThread(FlowRunner runner) {
- Thread thread = new Thread(runner);
- thread.start();
- return thread;
- }
-
- private void pause(long millisec) {
- synchronized(this) {
- try {
- wait(millisec);
- }
- catch (InterruptedException e) {
- }
- }
- }
+ private File workingDir;
+ private JobTypeManager jobtypeManager;
+ private ProjectLoader fakeProjectLoader;
+ private ExecutorLoader fakeExecutorLoader;
+ private Logger logger = Logger.getLogger(FlowRunnerTest2.class);
+ private Project project;
+ private Map<String, Flow> flowMap;
+ private static int id = 101;
+
+ @Before
+ public void setUp() throws Exception {
+ System.out.println("Create temp dir");
+ workingDir = new File("_AzkabanTestDir_" + System.currentTimeMillis());
+ if (workingDir.exists()) {
+ FileUtils.deleteDirectory(workingDir);
+ }
+ workingDir.mkdirs();
+ jobtypeManager =
+ new JobTypeManager(null, null, this.getClass().getClassLoader());
+ jobtypeManager.getJobTypePluginSet().addPluginClass("java", JavaJob.class);
+ jobtypeManager.getJobTypePluginSet().addPluginClass("test",
+ InteractiveTestJob.class);
+ fakeProjectLoader = new MockProjectLoader(workingDir);
+ fakeExecutorLoader = new MockExecutorLoader();
+ project = new Project(1, "testProject");
+
+ File dir = new File("unit/executions/execpropstest");
+ prepareProject(dir);
+
+ InteractiveTestJob.clearTestJobs();
+ }
+
+ @After
+ public void tearDown() throws IOException {
+ System.out.println("Teardown temp dir");
+ if (workingDir != null) {
+ FileUtils.deleteDirectory(workingDir);
+ workingDir = null;
+ }
+ }
+
+ /**
+ * Tests the basic flow resolution. Flow is defined in execpropstest
+ *
+ * @throws Exception
+ */
+ @Test
+ public void testPropertyResolution() throws Exception {
+ HashMap<String, String> flowProps = new HashMap<String, String>();
+ flowProps.put("props7", "flow7");
+ flowProps.put("props6", "flow6");
+ flowProps.put("props5", "flow5");
+ FlowRunner runner = createFlowRunner("job3", flowProps);
+ Map<String, ExecutableNode> nodeMap = new HashMap<String, ExecutableNode>();
+ createNodeMap(runner.getExecutableFlow(), nodeMap);
+
+ // 1. Start flow. Job 2 should start
+ Thread thread = runFlowRunnerInThread(runner);
+ pause(250);
+
+ // Job 2 is a normal job.
+ // Only the flow overrides and the shared properties matter
+ ExecutableNode node = nodeMap.get("job2");
+ Props job2Props = node.getInputProps();
+ Assert.assertEquals("shared1", job2Props.get("props1"));
+ Assert.assertEquals("job2", job2Props.get("props2"));
+ Assert.assertEquals("moo3", job2Props.get("props3"));
+ Assert.assertEquals("job7", job2Props.get("props7"));
+ Assert.assertEquals("flow5", job2Props.get("props5"));
+ Assert.assertEquals("flow6", job2Props.get("props6"));
+ Assert.assertEquals("shared4", job2Props.get("props4"));
+ Assert.assertEquals("shared8", job2Props.get("props8"));
+
+ // Job 1 is inside another flow, and is nested in a different directory
+ // The priority order should be:
+ // job1->innerflow->job2.output->flow.overrides->job1 shared props
+ Props job2Generated = new Props();
+ job2Generated.put("props6", "gjob6");
+ job2Generated.put("props9", "gjob9");
+ job2Generated.put("props10", "gjob10");
+ InteractiveTestJob.getTestJob("job2").succeedJob(job2Generated);
+ pause(250);
+ node = nodeMap.get("innerflow:job1");
+ Props job1Props = node.getInputProps();
+ Assert.assertEquals("job1", job1Props.get("props1"));
+ Assert.assertEquals("job2", job1Props.get("props2"));
+ Assert.assertEquals("job8", job1Props.get("props8"));
+ Assert.assertEquals("gjob9", job1Props.get("props9"));
+ Assert.assertEquals("gjob10", job1Props.get("props10"));
+ Assert.assertEquals("innerflow6", job1Props.get("props6"));
+ Assert.assertEquals("innerflow5", job1Props.get("props5"));
+ Assert.assertEquals("flow7", job1Props.get("props7"));
+ Assert.assertEquals("moo3", job1Props.get("props3"));
+ Assert.assertEquals("moo4", job1Props.get("props4"));
+
+ // Job 4 is inside another flow and takes output from job 1
+ // The priority order should be:
+ // job4->job1.output->innerflow->job2.output->flow.overrides->job4 shared
+ // props
+ Props job1GeneratedProps = new Props();
+ job1GeneratedProps.put("props9", "g2job9");
+ job1GeneratedProps.put("props7", "g2job7");
+ InteractiveTestJob.getTestJob("innerflow:job1").succeedJob(
+ job1GeneratedProps);
+ pause(250);
+ node = nodeMap.get("innerflow:job4");
+ Props job4Props = node.getInputProps();
+ Assert.assertEquals("job8", job4Props.get("props8"));
+ Assert.assertEquals("job9", job4Props.get("props9"));
+ Assert.assertEquals("g2job7", job4Props.get("props7"));
+ Assert.assertEquals("innerflow5", job4Props.get("props5"));
+ Assert.assertEquals("innerflow6", job4Props.get("props6"));
+ Assert.assertEquals("gjob10", job4Props.get("props10"));
+ Assert.assertEquals("shared4", job4Props.get("props4"));
+ Assert.assertEquals("shared1", job4Props.get("props1"));
+ Assert.assertEquals("shared2", job4Props.get("props2"));
+ Assert.assertEquals("moo3", job4Props.get("props3"));
+
+ // Job 3 is a normal job taking props from an embedded flow
+ // The priority order should be:
+ // job3->innerflow.output->flow.overrides->job3.sharedprops
+ Props job4GeneratedProps = new Props();
+ job4GeneratedProps.put("props9", "g4job9");
+ job4GeneratedProps.put("props6", "g4job6");
+ InteractiveTestJob.getTestJob("innerflow:job4").succeedJob(
+ job4GeneratedProps);
+ pause(250);
+ node = nodeMap.get("job3");
+ Props job3Props = node.getInputProps();
+ Assert.assertEquals("job3", job3Props.get("props3"));
+ Assert.assertEquals("g4job6", job3Props.get("props6"));
+ Assert.assertEquals("g4job9", job3Props.get("props9"));
+ Assert.assertEquals("flow7", job3Props.get("props7"));
+ Assert.assertEquals("flow5", job3Props.get("props5"));
+ Assert.assertEquals("shared1", job3Props.get("props1"));
+ Assert.assertEquals("shared2", job3Props.get("props2"));
+ Assert.assertEquals("moo4", job3Props.get("props4"));
+ }
+
+ private void prepareProject(File directory) throws ProjectManagerException,
+ IOException {
+ DirectoryFlowLoader loader = new DirectoryFlowLoader(logger);
+ loader.loadProjectFlow(directory);
+ if (!loader.getErrors().isEmpty()) {
+ for (String error : loader.getErrors()) {
+ System.out.println(error);
+ }
+
+ throw new RuntimeException("Errors found in setup");
+ }
+
+ flowMap = loader.getFlowMap();
+ project.setFlows(flowMap);
+ FileUtils.copyDirectory(directory, workingDir);
+ }
+
+ private FlowRunner createFlowRunner(String flowName,
+ HashMap<String, String> flowParams) throws Exception {
+ Flow flow = flowMap.get(flowName);
+
+ int exId = id++;
+ ExecutableFlow exFlow = new ExecutableFlow(project, flow);
+ exFlow.setExecutionPath(workingDir.getPath());
+ exFlow.setExecutionId(exId);
+
+ exFlow.getExecutionOptions().addAllFlowParameters(flowParams);
+ fakeExecutorLoader.uploadExecutableFlow(exFlow);
+
+ FlowRunner runner =
+ new FlowRunner(fakeExecutorLoader.fetchExecutableFlow(exId),
+ fakeExecutorLoader, fakeProjectLoader, jobtypeManager);
+ return runner;
+ }
+
+ private void createNodeMap(ExecutableFlowBase flow,
+ Map<String, ExecutableNode> nodeMap) {
+ for (ExecutableNode node : flow.getExecutableNodes()) {
+ nodeMap.put(node.getNestedId(), node);
+
+ if (node instanceof ExecutableFlowBase) {
+ createNodeMap((ExecutableFlowBase) node, nodeMap);
+ }
+ }
+ }
+
+ private Thread runFlowRunnerInThread(FlowRunner runner) {
+ Thread thread = new Thread(runner);
+ thread.start();
+ return thread;
+ }
+
+ private void pause(long millisec) {
+ synchronized (this) {
+ try {
+ wait(millisec);
+ } catch (InterruptedException e) {
+ }
+ }
+ }
}
\ No newline at end of file
unit/java/azkaban/test/execapp/FlowRunnerTest.java 846(+439 -407)
diff --git a/unit/java/azkaban/test/execapp/FlowRunnerTest.java b/unit/java/azkaban/test/execapp/FlowRunnerTest.java
index 911a48b..65afd19 100644
--- a/unit/java/azkaban/test/execapp/FlowRunnerTest.java
+++ b/unit/java/azkaban/test/execapp/FlowRunnerTest.java
@@ -30,411 +30,443 @@ import azkaban.test.executor.JavaJob;
import azkaban.utils.JSONUtils;
public class FlowRunnerTest {
- private File workingDir;
- private JobTypeManager jobtypeManager;
- private ProjectLoader fakeProjectLoader;
- public FlowRunnerTest() {
-
- }
-
- @Before
- public void setUp() throws Exception {
- System.out.println("Create temp dir");
- synchronized ( this) {
- workingDir = new File("_AzkabanTestDir_" + System.currentTimeMillis());
- if (workingDir.exists()) {
- FileUtils.deleteDirectory(workingDir);
- }
- workingDir.mkdirs();
- }
- jobtypeManager = new JobTypeManager(null, null, this.getClass().getClassLoader());
- JobTypePluginSet pluginSet = jobtypeManager.getJobTypePluginSet();
- pluginSet.addPluginClass("java", JavaJob.class);
- pluginSet.addPluginClass("test", InteractiveTestJob.class);
- fakeProjectLoader = new MockProjectLoader(workingDir);
-
- InteractiveTestJob.clearTestJobs();
- }
-
- @After
- public void tearDown() throws IOException {
- System.out.println("Teardown temp dir");
- synchronized ( this) {
- if (workingDir != null) {
- FileUtils.deleteDirectory(workingDir);
- workingDir = null;
- }
- }
- }
-
- @Test
- public void exec1Normal() throws Exception {
- MockExecutorLoader loader = new MockExecutorLoader();
- //just making compile. may not work at all.
-
- EventCollectorListener eventCollector = new EventCollectorListener();
- eventCollector.setEventFilterOut(Event.Type.JOB_FINISHED, Event.Type.JOB_STARTED, Event.Type.JOB_STATUS_CHANGED);
- FlowRunner runner = createFlowRunner(loader, eventCollector, "exec1");
-
- Assert.assertTrue(!runner.isKilled());
- runner.run();
- ExecutableFlow exFlow = runner.getExecutableFlow();
- Assert.assertTrue(exFlow.getStatus() == Status.SUCCEEDED);
- compareFinishedRuntime(runner);
-
- testStatus(exFlow, "job1", Status.SUCCEEDED);
- testStatus(exFlow, "job2", Status.SUCCEEDED);
- testStatus(exFlow, "job3", Status.SUCCEEDED);
- testStatus(exFlow, "job4", Status.SUCCEEDED);
- testStatus(exFlow, "job5", Status.SUCCEEDED);
- testStatus(exFlow, "job6", Status.SUCCEEDED);
- testStatus(exFlow, "job7", Status.SUCCEEDED);
- testStatus(exFlow, "job8", Status.SUCCEEDED);
- testStatus(exFlow, "job10", Status.SUCCEEDED);
-
- try {
- eventCollector.checkEventExists(new Type[] {Type.FLOW_STARTED, Type.FLOW_FINISHED});
- }
- catch (Exception e) {
- System.out.println(e.getMessage());
-
- Assert.fail(e.getMessage());
- }
- }
-
- @Test
- public void exec1Disabled() throws Exception {
- MockExecutorLoader loader = new MockExecutorLoader();
- EventCollectorListener eventCollector = new EventCollectorListener();
- eventCollector.setEventFilterOut(Event.Type.JOB_FINISHED, Event.Type.JOB_STARTED, Event.Type.JOB_STATUS_CHANGED);
- File testDir = new File("unit/executions/exectest1");
- ExecutableFlow exFlow = prepareExecDir(testDir, "exec1", 1);
-
- // Disable couple in the middle and at the end.
- exFlow.getExecutableNode("job1").setStatus(Status.DISABLED);
- exFlow.getExecutableNode("job6").setStatus(Status.DISABLED);
- exFlow.getExecutableNode("job5").setStatus(Status.DISABLED);
- exFlow.getExecutableNode("job10").setStatus(Status.DISABLED);
-
- FlowRunner runner = createFlowRunner(exFlow, loader, eventCollector);
-
- Assert.assertTrue(!runner.isKilled());
- Assert.assertTrue(exFlow.getStatus() == Status.READY);
- runner.run();
-
- exFlow = runner.getExecutableFlow();
- compareFinishedRuntime(runner);
-
- Assert.assertTrue(exFlow.getStatus() == Status.SUCCEEDED);
-
- testStatus(exFlow, "job1", Status.SKIPPED);
- testStatus(exFlow, "job2", Status.SUCCEEDED);
- testStatus(exFlow, "job3", Status.SUCCEEDED);
- testStatus(exFlow, "job4", Status.SUCCEEDED);
- testStatus(exFlow, "job5", Status.SKIPPED);
- testStatus(exFlow, "job6", Status.SKIPPED);
- testStatus(exFlow, "job7", Status.SUCCEEDED);
- testStatus(exFlow, "job8", Status.SUCCEEDED);
- testStatus(exFlow, "job10", Status.SKIPPED);
-
- try {
- eventCollector.checkEventExists(new Type[] {Type.FLOW_STARTED, Type.FLOW_FINISHED});
- }
- catch (Exception e) {
- System.out.println(e.getMessage());
-
- Assert.fail(e.getMessage());
- }
- }
-
- @Test
- public void exec1Failed() throws Exception {
- MockExecutorLoader loader = new MockExecutorLoader();
- EventCollectorListener eventCollector = new EventCollectorListener();
- eventCollector.setEventFilterOut(Event.Type.JOB_FINISHED, Event.Type.JOB_STARTED, Event.Type.JOB_STATUS_CHANGED);
- File testDir = new File("unit/executions/exectest1");
- ExecutableFlow flow = prepareExecDir(testDir, "exec2", 1);
-
- FlowRunner runner = createFlowRunner(flow, loader, eventCollector);
-
- runner.run();
- ExecutableFlow exFlow = runner.getExecutableFlow();
- Assert.assertTrue(!runner.isKilled());
- Assert.assertTrue("Flow status " + exFlow.getStatus(), exFlow.getStatus() == Status.FAILED);
-
- testStatus(exFlow, "job1", Status.SUCCEEDED);
- testStatus(exFlow, "job2d", Status.FAILED);
- testStatus(exFlow, "job3", Status.CANCELLED);
- testStatus(exFlow, "job4", Status.CANCELLED);
- testStatus(exFlow, "job5", Status.CANCELLED);
- testStatus(exFlow, "job6", Status.SUCCEEDED);
- testStatus(exFlow, "job7", Status.CANCELLED);
- testStatus(exFlow, "job8", Status.CANCELLED);
- testStatus(exFlow, "job9", Status.CANCELLED);
- testStatus(exFlow, "job10", Status.CANCELLED);
-
- try {
- eventCollector.checkEventExists(new Type[] {Type.FLOW_STARTED, Type.FLOW_FINISHED});
- }
- catch (Exception e) {
- System.out.println(e.getMessage());
-
- Assert.fail(e.getMessage());
- }
- }
-
- @Test
- public void exec1FailedKillAll() throws Exception {
- MockExecutorLoader loader = new MockExecutorLoader();
- EventCollectorListener eventCollector = new EventCollectorListener();
- eventCollector.setEventFilterOut(Event.Type.JOB_FINISHED, Event.Type.JOB_STARTED, Event.Type.JOB_STATUS_CHANGED);
- File testDir = new File("unit/executions/exectest1");
- ExecutableFlow flow = prepareExecDir(testDir, "exec2", 1);
- flow.getExecutionOptions().setFailureAction(FailureAction.CANCEL_ALL);
-
-
- FlowRunner runner = createFlowRunner(flow, loader, eventCollector);
-
- runner.run();
- ExecutableFlow exFlow = runner.getExecutableFlow();
-
- Assert.assertTrue(runner.isKilled());
-
- Assert.assertTrue("Expected flow " + Status.FAILED + " instead " + exFlow.getStatus(), exFlow.getStatus() == Status.FAILED);
-
- synchronized(this) {
- try {
- wait(500);
- } catch(InterruptedException e) {
-
- }
- }
-
- testStatus(exFlow, "job1", Status.SUCCEEDED);
- testStatus(exFlow, "job2d", Status.FAILED);
- testStatus(exFlow, "job3", Status.CANCELLED);
- testStatus(exFlow, "job4", Status.CANCELLED);
- testStatus(exFlow, "job5", Status.CANCELLED);
- testStatus(exFlow, "job6", Status.KILLED);
- testStatus(exFlow, "job7", Status.CANCELLED);
- testStatus(exFlow, "job8", Status.CANCELLED);
- testStatus(exFlow, "job9", Status.CANCELLED);
- testStatus(exFlow, "job10", Status.CANCELLED);
-
- try {
- eventCollector.checkEventExists(new Type[] {Type.FLOW_STARTED, Type.FLOW_FINISHED});
- }
- catch (Exception e) {
- System.out.println(e.getMessage());
- eventCollector.writeAllEvents();
- Assert.fail(e.getMessage());
- }
- }
-
- @Test
- public void exec1FailedFinishRest() throws Exception {
- MockExecutorLoader loader = new MockExecutorLoader();
- EventCollectorListener eventCollector = new EventCollectorListener();
- eventCollector.setEventFilterOut(Event.Type.JOB_FINISHED, Event.Type.JOB_STARTED, Event.Type.JOB_STATUS_CHANGED);
- File testDir = new File("unit/executions/exectest1");
- ExecutableFlow flow = prepareExecDir(testDir, "exec3", 1);
- flow.getExecutionOptions().setFailureAction(FailureAction.FINISH_ALL_POSSIBLE);
- FlowRunner runner = createFlowRunner(flow, loader, eventCollector);
-
- runner.run();
- ExecutableFlow exFlow = runner.getExecutableFlow();
- Assert.assertTrue("Expected flow " + Status.FAILED + " instead " + exFlow.getStatus(), exFlow.getStatus() == Status.FAILED);
-
- synchronized(this) {
- try {
- wait(500);
- } catch(InterruptedException e) {
- }
- }
-
- testStatus(exFlow, "job1", Status.SUCCEEDED);
- testStatus(exFlow, "job2d", Status.FAILED);
- testStatus(exFlow, "job3", Status.SUCCEEDED);
- testStatus(exFlow, "job4", Status.CANCELLED);
- testStatus(exFlow, "job5", Status.CANCELLED);
- testStatus(exFlow, "job6", Status.CANCELLED);
- testStatus(exFlow, "job7", Status.SUCCEEDED);
- testStatus(exFlow, "job8", Status.SUCCEEDED);
- testStatus(exFlow, "job9", Status.SUCCEEDED);
- testStatus(exFlow, "job10", Status.CANCELLED);
-
- try {
- eventCollector.checkEventExists(new Type[] {Type.FLOW_STARTED, Type.FLOW_FINISHED});
- }
- catch (Exception e) {
- System.out.println(e.getMessage());
- eventCollector.writeAllEvents();
- Assert.fail(e.getMessage());
- }
- }
-
- @Test
- public void execAndCancel() throws Exception {
- MockExecutorLoader loader = new MockExecutorLoader();
- EventCollectorListener eventCollector = new EventCollectorListener();
- eventCollector.setEventFilterOut(Event.Type.JOB_FINISHED, Event.Type.JOB_STARTED, Event.Type.JOB_STATUS_CHANGED);
- FlowRunner runner = createFlowRunner(loader, eventCollector, "exec1");
-
- Assert.assertTrue(!runner.isKilled());
- Thread thread = new Thread(runner);
- thread.start();
-
- synchronized(this) {
- try {
- wait(5000);
- } catch (InterruptedException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
-
- runner.kill("me");
- Assert.assertTrue(runner.isKilled());
- }
-
-
- synchronized(this) {
- // Wait for cleanup.
- try {
- wait(2000);
- } catch (InterruptedException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
- }
- ExecutableFlow exFlow = runner.getExecutableFlow();
- testStatus(exFlow, "job1", Status.SUCCEEDED);
- testStatus(exFlow, "job2", Status.SUCCEEDED);
- testStatus(exFlow, "job5", Status.CANCELLED);
- testStatus(exFlow, "job7", Status.CANCELLED);
- testStatus(exFlow, "job8", Status.CANCELLED);
- testStatus(exFlow, "job10", Status.CANCELLED);
- testStatus(exFlow, "job3", Status.KILLED);
- testStatus(exFlow, "job4", Status.KILLED);
- testStatus(exFlow, "job6", Status.KILLED);
-
- Assert.assertTrue("Expected FAILED status instead got " + exFlow.getStatus(),exFlow.getStatus() == Status.KILLED);
-
- try {
- eventCollector.checkEventExists(new Type[] {Type.FLOW_STARTED, Type.FLOW_FINISHED});
- }
- catch (Exception e) {
- System.out.println(e.getMessage());
- eventCollector.writeAllEvents();
- Assert.fail(e.getMessage());
- }
- }
-
- @Test
- public void execRetries() throws Exception {
- MockExecutorLoader loader = new MockExecutorLoader();
- EventCollectorListener eventCollector = new EventCollectorListener();
- eventCollector.setEventFilterOut(Event.Type.JOB_FINISHED, Event.Type.JOB_STARTED, Event.Type.JOB_STATUS_CHANGED);
- FlowRunner runner = createFlowRunner(loader, eventCollector, "exec4-retry");
-
- runner.run();
-
- ExecutableFlow exFlow = runner.getExecutableFlow();
- testStatus(exFlow, "job-retry", Status.SUCCEEDED);
- testStatus(exFlow, "job-pass", Status.SUCCEEDED);
- testStatus(exFlow, "job-retry-fail", Status.FAILED);
- testAttempts(exFlow,"job-retry", 3);
- testAttempts(exFlow, "job-pass", 0);
- testAttempts(exFlow, "job-retry-fail", 2);
-
- Assert.assertTrue("Expected FAILED status instead got " + exFlow.getStatus(),exFlow.getStatus() == Status.FAILED);
- }
-
- private void testStatus(ExecutableFlow flow, String name, Status status) {
- ExecutableNode node = flow.getExecutableNode(name);
-
- if (node.getStatus() != status) {
- Assert.fail("Status of job " + node.getId() + " is " + node.getStatus() + " not " + status + " as expected.");
- }
- }
-
- private void testAttempts(ExecutableFlow flow, String name, int attempt) {
- ExecutableNode node = flow.getExecutableNode(name);
-
- if (node.getAttempt() != attempt) {
- Assert.fail("Expected " + attempt + " got " + node.getAttempt() + " attempts " + name );
- }
- }
-
- private ExecutableFlow prepareExecDir(File execDir, String flowName, int execId) throws IOException {
- synchronized ( this) {
- FileUtils.copyDirectory(execDir, workingDir);
- }
-
- File jsonFlowFile = new File(workingDir, flowName + ".flow");
- @SuppressWarnings("unchecked")
- HashMap<String, Object> flowObj = (HashMap<String, Object>) JSONUtils.parseJSONFromFile(jsonFlowFile);
-
- Project project = new Project(1, "myproject");
- project.setVersion(2);
-
- Flow flow = Flow.flowFromObject(flowObj);
- ExecutableFlow execFlow = new ExecutableFlow(project, flow);
- execFlow.setExecutionId(execId);
- execFlow.setExecutionPath(workingDir.getPath());
- return execFlow;
- }
-
- private void compareFinishedRuntime(FlowRunner runner) throws Exception {
- ExecutableFlow flow = runner.getExecutableFlow();
- for (String flowName: flow.getStartNodes()) {
- ExecutableNode node = flow.getExecutableNode(flowName);
- compareStartFinishTimes(flow, node, 0);
- }
- }
-
- private void compareStartFinishTimes(ExecutableFlow flow, ExecutableNode node, long previousEndTime) throws Exception {
- long startTime = node.getStartTime();
- long endTime = node.getEndTime();
-
- // If start time is < 0, so will the endtime.
- if (startTime <= 0) {
- Assert.assertTrue(endTime <=0);
- return;
- }
-
- //System.out.println("Node " + node.getJobId() + " start:" + startTime + " end:" + endTime + " previous:" + previousEndTime);
- Assert.assertTrue("Checking start and end times", startTime > 0 && endTime >= startTime);
- Assert.assertTrue("Start time for " + node.getId() + " is " + startTime +" and less than " + previousEndTime, startTime >= previousEndTime);
-
- for (String outNode : node.getOutNodes()) {
- ExecutableNode childNode = flow.getExecutableNode(outNode);
- compareStartFinishTimes(flow, childNode, endTime);
- }
- }
-
- private FlowRunner createFlowRunner(ExecutableFlow flow, ExecutorLoader loader, EventCollectorListener eventCollector) throws Exception {
- //File testDir = new File("unit/executions/exectest1");
- //MockProjectLoader projectLoader = new MockProjectLoader(new File(flow.getExecutionPath()));
-
- loader.uploadExecutableFlow(flow);
- FlowRunner runner = new FlowRunner(flow, loader, fakeProjectLoader, jobtypeManager);
-
- runner.addListener(eventCollector);
-
- return runner;
- }
-
- private FlowRunner createFlowRunner(ExecutorLoader loader, EventCollectorListener eventCollector, String flowName) throws Exception {
- File testDir = new File("unit/executions/exectest1");
- ExecutableFlow exFlow = prepareExecDir(testDir, flowName, 1);
- //MockProjectLoader projectLoader = new MockProjectLoader(new File(exFlow.getExecutionPath()));
-
- loader.uploadExecutableFlow(exFlow);
-
- FlowRunner runner = new FlowRunner(exFlow, loader, fakeProjectLoader, jobtypeManager);
-
- runner.addListener(eventCollector);
-
- return runner;
- }
+ private File workingDir;
+ private JobTypeManager jobtypeManager;
+ private ProjectLoader fakeProjectLoader;
+
+ public FlowRunnerTest() {
+
+ }
+
+ @Before
+ public void setUp() throws Exception {
+ System.out.println("Create temp dir");
+ synchronized (this) {
+ workingDir = new File("_AzkabanTestDir_" + System.currentTimeMillis());
+ if (workingDir.exists()) {
+ FileUtils.deleteDirectory(workingDir);
+ }
+ workingDir.mkdirs();
+ }
+ jobtypeManager =
+ new JobTypeManager(null, null, this.getClass().getClassLoader());
+ JobTypePluginSet pluginSet = jobtypeManager.getJobTypePluginSet();
+ pluginSet.addPluginClass("java", JavaJob.class);
+ pluginSet.addPluginClass("test", InteractiveTestJob.class);
+ fakeProjectLoader = new MockProjectLoader(workingDir);
+
+ InteractiveTestJob.clearTestJobs();
+ }
+
+ @After
+ public void tearDown() throws IOException {
+ System.out.println("Teardown temp dir");
+ synchronized (this) {
+ if (workingDir != null) {
+ FileUtils.deleteDirectory(workingDir);
+ workingDir = null;
+ }
+ }
+ }
+
+ @Test
+ public void exec1Normal() throws Exception {
+ MockExecutorLoader loader = new MockExecutorLoader();
+ // just making compile. may not work at all.
+
+ EventCollectorListener eventCollector = new EventCollectorListener();
+ eventCollector.setEventFilterOut(Event.Type.JOB_FINISHED,
+ Event.Type.JOB_STARTED, Event.Type.JOB_STATUS_CHANGED);
+ FlowRunner runner = createFlowRunner(loader, eventCollector, "exec1");
+
+ Assert.assertTrue(!runner.isKilled());
+ runner.run();
+ ExecutableFlow exFlow = runner.getExecutableFlow();
+ Assert.assertTrue(exFlow.getStatus() == Status.SUCCEEDED);
+ compareFinishedRuntime(runner);
+
+ testStatus(exFlow, "job1", Status.SUCCEEDED);
+ testStatus(exFlow, "job2", Status.SUCCEEDED);
+ testStatus(exFlow, "job3", Status.SUCCEEDED);
+ testStatus(exFlow, "job4", Status.SUCCEEDED);
+ testStatus(exFlow, "job5", Status.SUCCEEDED);
+ testStatus(exFlow, "job6", Status.SUCCEEDED);
+ testStatus(exFlow, "job7", Status.SUCCEEDED);
+ testStatus(exFlow, "job8", Status.SUCCEEDED);
+ testStatus(exFlow, "job10", Status.SUCCEEDED);
+
+ try {
+ eventCollector.checkEventExists(new Type[] { Type.FLOW_STARTED,
+ Type.FLOW_FINISHED });
+ } catch (Exception e) {
+ System.out.println(e.getMessage());
+
+ Assert.fail(e.getMessage());
+ }
+ }
+
+ @Test
+ public void exec1Disabled() throws Exception {
+ MockExecutorLoader loader = new MockExecutorLoader();
+ EventCollectorListener eventCollector = new EventCollectorListener();
+ eventCollector.setEventFilterOut(Event.Type.JOB_FINISHED,
+ Event.Type.JOB_STARTED, Event.Type.JOB_STATUS_CHANGED);
+ File testDir = new File("unit/executions/exectest1");
+ ExecutableFlow exFlow = prepareExecDir(testDir, "exec1", 1);
+
+ // Disable couple in the middle and at the end.
+ exFlow.getExecutableNode("job1").setStatus(Status.DISABLED);
+ exFlow.getExecutableNode("job6").setStatus(Status.DISABLED);
+ exFlow.getExecutableNode("job5").setStatus(Status.DISABLED);
+ exFlow.getExecutableNode("job10").setStatus(Status.DISABLED);
+
+ FlowRunner runner = createFlowRunner(exFlow, loader, eventCollector);
+
+ Assert.assertTrue(!runner.isKilled());
+ Assert.assertTrue(exFlow.getStatus() == Status.READY);
+ runner.run();
+
+ exFlow = runner.getExecutableFlow();
+ compareFinishedRuntime(runner);
+
+ Assert.assertTrue(exFlow.getStatus() == Status.SUCCEEDED);
+
+ testStatus(exFlow, "job1", Status.SKIPPED);
+ testStatus(exFlow, "job2", Status.SUCCEEDED);
+ testStatus(exFlow, "job3", Status.SUCCEEDED);
+ testStatus(exFlow, "job4", Status.SUCCEEDED);
+ testStatus(exFlow, "job5", Status.SKIPPED);
+ testStatus(exFlow, "job6", Status.SKIPPED);
+ testStatus(exFlow, "job7", Status.SUCCEEDED);
+ testStatus(exFlow, "job8", Status.SUCCEEDED);
+ testStatus(exFlow, "job10", Status.SKIPPED);
+
+ try {
+ eventCollector.checkEventExists(new Type[] { Type.FLOW_STARTED,
+ Type.FLOW_FINISHED });
+ } catch (Exception e) {
+ System.out.println(e.getMessage());
+
+ Assert.fail(e.getMessage());
+ }
+ }
+
+ @Test
+ public void exec1Failed() throws Exception {
+ MockExecutorLoader loader = new MockExecutorLoader();
+ EventCollectorListener eventCollector = new EventCollectorListener();
+ eventCollector.setEventFilterOut(Event.Type.JOB_FINISHED,
+ Event.Type.JOB_STARTED, Event.Type.JOB_STATUS_CHANGED);
+ File testDir = new File("unit/executions/exectest1");
+ ExecutableFlow flow = prepareExecDir(testDir, "exec2", 1);
+
+ FlowRunner runner = createFlowRunner(flow, loader, eventCollector);
+
+ runner.run();
+ ExecutableFlow exFlow = runner.getExecutableFlow();
+ Assert.assertTrue(!runner.isKilled());
+ Assert.assertTrue("Flow status " + exFlow.getStatus(),
+ exFlow.getStatus() == Status.FAILED);
+
+ testStatus(exFlow, "job1", Status.SUCCEEDED);
+ testStatus(exFlow, "job2d", Status.FAILED);
+ testStatus(exFlow, "job3", Status.CANCELLED);
+ testStatus(exFlow, "job4", Status.CANCELLED);
+ testStatus(exFlow, "job5", Status.CANCELLED);
+ testStatus(exFlow, "job6", Status.SUCCEEDED);
+ testStatus(exFlow, "job7", Status.CANCELLED);
+ testStatus(exFlow, "job8", Status.CANCELLED);
+ testStatus(exFlow, "job9", Status.CANCELLED);
+ testStatus(exFlow, "job10", Status.CANCELLED);
+
+ try {
+ eventCollector.checkEventExists(new Type[] { Type.FLOW_STARTED,
+ Type.FLOW_FINISHED });
+ } catch (Exception e) {
+ System.out.println(e.getMessage());
+
+ Assert.fail(e.getMessage());
+ }
+ }
+
+ @Test
+ public void exec1FailedKillAll() throws Exception {
+ MockExecutorLoader loader = new MockExecutorLoader();
+ EventCollectorListener eventCollector = new EventCollectorListener();
+ eventCollector.setEventFilterOut(Event.Type.JOB_FINISHED,
+ Event.Type.JOB_STARTED, Event.Type.JOB_STATUS_CHANGED);
+ File testDir = new File("unit/executions/exectest1");
+ ExecutableFlow flow = prepareExecDir(testDir, "exec2", 1);
+ flow.getExecutionOptions().setFailureAction(FailureAction.CANCEL_ALL);
+
+ FlowRunner runner = createFlowRunner(flow, loader, eventCollector);
+
+ runner.run();
+ ExecutableFlow exFlow = runner.getExecutableFlow();
+
+ Assert.assertTrue(runner.isKilled());
+
+ Assert.assertTrue(
+ "Expected flow " + Status.FAILED + " instead " + exFlow.getStatus(),
+ exFlow.getStatus() == Status.FAILED);
+
+ synchronized (this) {
+ try {
+ wait(500);
+ } catch (InterruptedException e) {
+
+ }
+ }
+
+ testStatus(exFlow, "job1", Status.SUCCEEDED);
+ testStatus(exFlow, "job2d", Status.FAILED);
+ testStatus(exFlow, "job3", Status.CANCELLED);
+ testStatus(exFlow, "job4", Status.CANCELLED);
+ testStatus(exFlow, "job5", Status.CANCELLED);
+ testStatus(exFlow, "job6", Status.KILLED);
+ testStatus(exFlow, "job7", Status.CANCELLED);
+ testStatus(exFlow, "job8", Status.CANCELLED);
+ testStatus(exFlow, "job9", Status.CANCELLED);
+ testStatus(exFlow, "job10", Status.CANCELLED);
+
+ try {
+ eventCollector.checkEventExists(new Type[] { Type.FLOW_STARTED,
+ Type.FLOW_FINISHED });
+ } catch (Exception e) {
+ System.out.println(e.getMessage());
+ eventCollector.writeAllEvents();
+ Assert.fail(e.getMessage());
+ }
+ }
+
+ @Test
+ public void exec1FailedFinishRest() throws Exception {
+ MockExecutorLoader loader = new MockExecutorLoader();
+ EventCollectorListener eventCollector = new EventCollectorListener();
+ eventCollector.setEventFilterOut(Event.Type.JOB_FINISHED,
+ Event.Type.JOB_STARTED, Event.Type.JOB_STATUS_CHANGED);
+ File testDir = new File("unit/executions/exectest1");
+ ExecutableFlow flow = prepareExecDir(testDir, "exec3", 1);
+ flow.getExecutionOptions().setFailureAction(
+ FailureAction.FINISH_ALL_POSSIBLE);
+ FlowRunner runner = createFlowRunner(flow, loader, eventCollector);
+
+ runner.run();
+ ExecutableFlow exFlow = runner.getExecutableFlow();
+ Assert.assertTrue(
+ "Expected flow " + Status.FAILED + " instead " + exFlow.getStatus(),
+ exFlow.getStatus() == Status.FAILED);
+
+ synchronized (this) {
+ try {
+ wait(500);
+ } catch (InterruptedException e) {
+ }
+ }
+
+ testStatus(exFlow, "job1", Status.SUCCEEDED);
+ testStatus(exFlow, "job2d", Status.FAILED);
+ testStatus(exFlow, "job3", Status.SUCCEEDED);
+ testStatus(exFlow, "job4", Status.CANCELLED);
+ testStatus(exFlow, "job5", Status.CANCELLED);
+ testStatus(exFlow, "job6", Status.CANCELLED);
+ testStatus(exFlow, "job7", Status.SUCCEEDED);
+ testStatus(exFlow, "job8", Status.SUCCEEDED);
+ testStatus(exFlow, "job9", Status.SUCCEEDED);
+ testStatus(exFlow, "job10", Status.CANCELLED);
+
+ try {
+ eventCollector.checkEventExists(new Type[] { Type.FLOW_STARTED,
+ Type.FLOW_FINISHED });
+ } catch (Exception e) {
+ System.out.println(e.getMessage());
+ eventCollector.writeAllEvents();
+ Assert.fail(e.getMessage());
+ }
+ }
+
+ @Test
+ public void execAndCancel() throws Exception {
+ MockExecutorLoader loader = new MockExecutorLoader();
+ EventCollectorListener eventCollector = new EventCollectorListener();
+ eventCollector.setEventFilterOut(Event.Type.JOB_FINISHED,
+ Event.Type.JOB_STARTED, Event.Type.JOB_STATUS_CHANGED);
+ FlowRunner runner = createFlowRunner(loader, eventCollector, "exec1");
+
+ Assert.assertTrue(!runner.isKilled());
+ Thread thread = new Thread(runner);
+ thread.start();
+
+ synchronized (this) {
+ try {
+ wait(5000);
+ } catch (InterruptedException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+
+ runner.kill("me");
+ Assert.assertTrue(runner.isKilled());
+ }
+
+ synchronized (this) {
+ // Wait for cleanup.
+ try {
+ wait(2000);
+ } catch (InterruptedException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ }
+ ExecutableFlow exFlow = runner.getExecutableFlow();
+ testStatus(exFlow, "job1", Status.SUCCEEDED);
+ testStatus(exFlow, "job2", Status.SUCCEEDED);
+ testStatus(exFlow, "job5", Status.CANCELLED);
+ testStatus(exFlow, "job7", Status.CANCELLED);
+ testStatus(exFlow, "job8", Status.CANCELLED);
+ testStatus(exFlow, "job10", Status.CANCELLED);
+ testStatus(exFlow, "job3", Status.KILLED);
+ testStatus(exFlow, "job4", Status.KILLED);
+ testStatus(exFlow, "job6", Status.KILLED);
+
+ Assert.assertTrue(
+ "Expected FAILED status instead got " + exFlow.getStatus(),
+ exFlow.getStatus() == Status.KILLED);
+
+ try {
+ eventCollector.checkEventExists(new Type[] { Type.FLOW_STARTED,
+ Type.FLOW_FINISHED });
+ } catch (Exception e) {
+ System.out.println(e.getMessage());
+ eventCollector.writeAllEvents();
+ Assert.fail(e.getMessage());
+ }
+ }
+
+ @Test
+ public void execRetries() throws Exception {
+ MockExecutorLoader loader = new MockExecutorLoader();
+ EventCollectorListener eventCollector = new EventCollectorListener();
+ eventCollector.setEventFilterOut(Event.Type.JOB_FINISHED,
+ Event.Type.JOB_STARTED, Event.Type.JOB_STATUS_CHANGED);
+ FlowRunner runner = createFlowRunner(loader, eventCollector, "exec4-retry");
+
+ runner.run();
+
+ ExecutableFlow exFlow = runner.getExecutableFlow();
+ testStatus(exFlow, "job-retry", Status.SUCCEEDED);
+ testStatus(exFlow, "job-pass", Status.SUCCEEDED);
+ testStatus(exFlow, "job-retry-fail", Status.FAILED);
+ testAttempts(exFlow, "job-retry", 3);
+ testAttempts(exFlow, "job-pass", 0);
+ testAttempts(exFlow, "job-retry-fail", 2);
+
+ Assert.assertTrue(
+ "Expected FAILED status instead got " + exFlow.getStatus(),
+ exFlow.getStatus() == Status.FAILED);
+ }
+
+ private void testStatus(ExecutableFlow flow, String name, Status status) {
+ ExecutableNode node = flow.getExecutableNode(name);
+
+ if (node.getStatus() != status) {
+ Assert.fail("Status of job " + node.getId() + " is " + node.getStatus()
+ + " not " + status + " as expected.");
+ }
+ }
+
+ private void testAttempts(ExecutableFlow flow, String name, int attempt) {
+ ExecutableNode node = flow.getExecutableNode(name);
+
+ if (node.getAttempt() != attempt) {
+ Assert.fail("Expected " + attempt + " got " + node.getAttempt()
+ + " attempts " + name);
+ }
+ }
+
+ private ExecutableFlow prepareExecDir(File execDir, String flowName,
+ int execId) throws IOException {
+ synchronized (this) {
+ FileUtils.copyDirectory(execDir, workingDir);
+ }
+
+ File jsonFlowFile = new File(workingDir, flowName + ".flow");
+ @SuppressWarnings("unchecked")
+ HashMap<String, Object> flowObj =
+ (HashMap<String, Object>) JSONUtils.parseJSONFromFile(jsonFlowFile);
+
+ Project project = new Project(1, "myproject");
+ project.setVersion(2);
+
+ Flow flow = Flow.flowFromObject(flowObj);
+ ExecutableFlow execFlow = new ExecutableFlow(project, flow);
+ execFlow.setExecutionId(execId);
+ execFlow.setExecutionPath(workingDir.getPath());
+ return execFlow;
+ }
+
+ private void compareFinishedRuntime(FlowRunner runner) throws Exception {
+ ExecutableFlow flow = runner.getExecutableFlow();
+ for (String flowName : flow.getStartNodes()) {
+ ExecutableNode node = flow.getExecutableNode(flowName);
+ compareStartFinishTimes(flow, node, 0);
+ }
+ }
+
+ private void compareStartFinishTimes(ExecutableFlow flow,
+ ExecutableNode node, long previousEndTime) throws Exception {
+ long startTime = node.getStartTime();
+ long endTime = node.getEndTime();
+
+ // If start time is < 0, so will the endtime.
+ if (startTime <= 0) {
+ Assert.assertTrue(endTime <= 0);
+ return;
+ }
+
+ // System.out.println("Node " + node.getJobId() + " start:" + startTime +
+ // " end:" + endTime + " previous:" + previousEndTime);
+ Assert.assertTrue("Checking start and end times", startTime > 0
+ && endTime >= startTime);
+ Assert.assertTrue("Start time for " + node.getId() + " is " + startTime
+ + " and less than " + previousEndTime, startTime >= previousEndTime);
+
+ for (String outNode : node.getOutNodes()) {
+ ExecutableNode childNode = flow.getExecutableNode(outNode);
+ compareStartFinishTimes(flow, childNode, endTime);
+ }
+ }
+
+ private FlowRunner createFlowRunner(ExecutableFlow flow,
+ ExecutorLoader loader, EventCollectorListener eventCollector)
+ throws Exception {
+ // File testDir = new File("unit/executions/exectest1");
+ // MockProjectLoader projectLoader = new MockProjectLoader(new
+ // File(flow.getExecutionPath()));
+
+ loader.uploadExecutableFlow(flow);
+ FlowRunner runner =
+ new FlowRunner(flow, loader, fakeProjectLoader, jobtypeManager);
+
+ runner.addListener(eventCollector);
+
+ return runner;
+ }
+
+ private FlowRunner createFlowRunner(ExecutorLoader loader,
+ EventCollectorListener eventCollector, String flowName) throws Exception {
+ File testDir = new File("unit/executions/exectest1");
+ ExecutableFlow exFlow = prepareExecDir(testDir, flowName, 1);
+ // MockProjectLoader projectLoader = new MockProjectLoader(new
+ // File(exFlow.getExecutionPath()));
+
+ loader.uploadExecutableFlow(exFlow);
+
+ FlowRunner runner =
+ new FlowRunner(exFlow, loader, fakeProjectLoader, jobtypeManager);
+
+ runner.addListener(eventCollector);
+
+ return runner;
+ }
}
diff --git a/unit/java/azkaban/test/execapp/FlowRunnerTest2.java b/unit/java/azkaban/test/execapp/FlowRunnerTest2.java
index bc31261..cadc24b 100644
--- a/unit/java/azkaban/test/execapp/FlowRunnerTest2.java
+++ b/unit/java/azkaban/test/execapp/FlowRunnerTest2.java
@@ -33,8 +33,9 @@ import azkaban.utils.Props;
/**
* Test the flow run, especially with embedded flows.
*
- * This test uses executions/embedded2. It also mainly uses the flow named jobf. The test is designed to
- * control success/failures explicitly so we don't have to time the flow exactly.
+ * This test uses executions/embedded2. It also mainly uses the flow named
+ * jobf. The test is designed to control success/failures explicitly so we
+ * don't have to time the flow exactly.
*
* Flow jobf looks like the following:
*
@@ -69,7 +70,8 @@ import azkaban.utils.Props;
* |
* innerFlow2
*
- * The following tests checks each stage of the flow run by forcing jobs to succeed or fail.
+ * The following tests checks each stage of the flow run by forcing jobs to
+ * succeed or fail.
*/
public class FlowRunnerTest2 {
private File workingDir;
unit/java/azkaban/test/execapp/JobRunnerTest.java 693(+355 -338)
diff --git a/unit/java/azkaban/test/execapp/JobRunnerTest.java b/unit/java/azkaban/test/execapp/JobRunnerTest.java
index abee6f0..e9f6064 100644
--- a/unit/java/azkaban/test/execapp/JobRunnerTest.java
+++ b/unit/java/azkaban/test/execapp/JobRunnerTest.java
@@ -25,343 +25,360 @@ import azkaban.test.executor.SleepJavaJob;
import azkaban.utils.Props;
public class JobRunnerTest {
- private File workingDir;
- private JobTypeManager jobtypeManager;
- private Logger logger = Logger.getLogger("JobRunnerTest");
-
- public JobRunnerTest() {
-
- }
-
- @Before
- public void setUp() throws Exception {
- System.out.println("Create temp dir");
- workingDir = new File("_AzkabanTestDir_" + System.currentTimeMillis());
- if (workingDir.exists()) {
- FileUtils.deleteDirectory(workingDir);
- }
- workingDir.mkdirs();
- jobtypeManager = new JobTypeManager(null, null, this.getClass().getClassLoader());
-
- jobtypeManager.getJobTypePluginSet().addPluginClass("java", JavaJob.class);
- }
-
- @After
- public void tearDown() throws IOException {
- System.out.println("Teardown temp dir");
- if (workingDir != null) {
- FileUtils.deleteDirectory(workingDir);
- workingDir = null;
- }
- }
-
- @Test
- public void testBasicRun() {
- MockExecutorLoader loader = new MockExecutorLoader();
- EventCollectorListener eventCollector = new EventCollectorListener();
- JobRunner runner = createJobRunner(1, "testJob", 1, false, loader, eventCollector);
- ExecutableNode node = runner.getNode();
-
- eventCollector.handleEvent(Event.create(null, Event.Type.JOB_STARTED));
- Assert.assertTrue(runner.getStatus() != Status.SUCCEEDED || runner.getStatus() != Status.FAILED);
-
- runner.run();
- eventCollector.handleEvent(Event.create(null, Event.Type.JOB_FINISHED));
-
- Assert.assertTrue(runner.getStatus() == node.getStatus());
- Assert.assertTrue("Node status is " + node.getStatus(), node.getStatus() == Status.SUCCEEDED);
- Assert.assertTrue(node.getStartTime() > 0 && node.getEndTime() > 0);
- Assert.assertTrue( node.getEndTime() - node.getStartTime() > 1000);
-
- File logFile = new File(runner.getLogFilePath());
- Props outputProps = runner.getNode().getOutputProps();
- Assert.assertTrue(outputProps != null);
- Assert.assertTrue(logFile.exists());
-
- Assert.assertTrue(loader.getNodeUpdateCount(node.getId()) == 3);
-
- Assert.assertTrue(eventCollector.checkOrdering());
- try {
- eventCollector.checkEventExists(new Type[] {Type.JOB_STARTED, Type.JOB_STATUS_CHANGED, Type.JOB_FINISHED});
- }
- catch (Exception e) {
- Assert.fail(e.getMessage());
- }
- }
-
- @Test
- public void testFailedRun() {
- MockExecutorLoader loader = new MockExecutorLoader();
- EventCollectorListener eventCollector = new EventCollectorListener();
- JobRunner runner = createJobRunner(1, "testJob", 1, true, loader, eventCollector);
- ExecutableNode node = runner.getNode();
-
- Assert.assertTrue(runner.getStatus() != Status.SUCCEEDED || runner.getStatus() != Status.FAILED);
- runner.run();
-
- Assert.assertTrue(runner.getStatus() == node.getStatus());
- Assert.assertTrue(node.getStatus() == Status.FAILED);
- Assert.assertTrue(node.getStartTime() > 0 && node.getEndTime() > 0);
- Assert.assertTrue(node.getEndTime() - node.getStartTime() > 1000);
-
- File logFile = new File(runner.getLogFilePath());
- Props outputProps = runner.getNode().getOutputProps();
- Assert.assertTrue(outputProps == null);
- Assert.assertTrue(logFile.exists());
- Assert.assertTrue(eventCollector.checkOrdering());
- Assert.assertTrue(!runner.isKilled());
- Assert.assertTrue(loader.getNodeUpdateCount(node.getId()) == 3);
-
- try {
- eventCollector.checkEventExists(new Type[] {Type.JOB_STARTED, Type.JOB_STATUS_CHANGED, Type.JOB_FINISHED});
- }
- catch (Exception e) {
- Assert.fail(e.getMessage());
- }
- }
-
- @Test
- public void testDisabledRun() {
- MockExecutorLoader loader = new MockExecutorLoader();
- EventCollectorListener eventCollector = new EventCollectorListener();
- JobRunner runner = createJobRunner(1, "testJob", 1, false, loader, eventCollector);
- ExecutableNode node = runner.getNode();
-
- node.setStatus(Status.DISABLED);
-
- // Should be disabled.
- Assert.assertTrue(runner.getStatus() == Status.DISABLED);
- runner.run();
-
- Assert.assertTrue(runner.getStatus() == node.getStatus());
- Assert.assertTrue(node.getStatus() == Status.SKIPPED);
- Assert.assertTrue(node.getStartTime() > 0 && node.getEndTime() > 0);
- // Give it 10 ms to fail.
- Assert.assertTrue( node.getEndTime() - node.getStartTime() < 10);
-
- // Log file and output files should not exist.
- Props outputProps = runner.getNode().getOutputProps();
- Assert.assertTrue(outputProps == null);
- Assert.assertTrue(runner.getLogFilePath() == null);
- Assert.assertTrue(eventCollector.checkOrdering());
-
- Assert.assertTrue(loader.getNodeUpdateCount(node.getId()) == null);
-
- try {
- eventCollector.checkEventExists(new Type[] {Type.JOB_STARTED, Type.JOB_FINISHED});
- }
- catch (Exception e) {
- Assert.fail(e.getMessage());
- }
- }
-
- @Test
- public void testPreKilledRun() {
- MockExecutorLoader loader = new MockExecutorLoader();
- EventCollectorListener eventCollector = new EventCollectorListener();
- JobRunner runner = createJobRunner(1, "testJob", 1, false, loader, eventCollector);
- ExecutableNode node = runner.getNode();
-
- node.setStatus(Status.KILLED);
-
- // Should be killed.
- Assert.assertTrue(runner.getStatus() == Status.KILLED);
- runner.run();
-
- // Should just skip the run and not change
- Assert.assertTrue(runner.getStatus() == node.getStatus());
- Assert.assertTrue(node.getStatus() == Status.KILLED);
- Assert.assertTrue(node.getStartTime() > 0 && node.getEndTime() > 0);
- // Give it 10 ms to fail.
- Assert.assertTrue( node.getEndTime() - node.getStartTime() < 10);
-
- Assert.assertTrue(loader.getNodeUpdateCount(node.getId()) == null);
-
- // Log file and output files should not exist.
- Props outputProps = runner.getNode().getOutputProps();
- Assert.assertTrue(outputProps == null);
- Assert.assertTrue(runner.getLogFilePath() == null);
- Assert.assertTrue(!runner.isKilled());
- try {
- eventCollector.checkEventExists(new Type[] {Type.JOB_STARTED, Type.JOB_FINISHED});
- }
- catch (Exception e) {
- Assert.fail(e.getMessage());
- }
- }
-
- @Test
- public void testCancelRun() {
- MockExecutorLoader loader = new MockExecutorLoader();
- EventCollectorListener eventCollector = new EventCollectorListener();
- JobRunner runner = createJobRunner(13, "testJob", 10, false, loader, eventCollector);
- ExecutableNode node = runner.getNode();
-
- Assert.assertTrue(runner.getStatus() != Status.SUCCEEDED || runner.getStatus() != Status.FAILED);
-
- Thread thread = new Thread(runner);
- thread.start();
-
- synchronized(this) {
- try {
- wait(2000);
- } catch (InterruptedException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
- runner.kill();
- try {
- wait(500);
- } catch (InterruptedException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
- }
-
- Assert.assertTrue(runner.getStatus() == node.getStatus());
- Assert.assertTrue("Status is " + node.getStatus(), node.getStatus() == Status.KILLED);
- Assert.assertTrue(node.getStartTime() > 0 && node.getEndTime() > 0);
- // Give it 10 ms to fail.
- Assert.assertTrue(node.getEndTime() - node.getStartTime() < 3000);
- Assert.assertTrue(loader.getNodeUpdateCount(node.getId()) == 3);
-
- // Log file and output files should not exist.
- File logFile = new File(runner.getLogFilePath());
- Props outputProps = runner.getNode().getOutputProps();
- Assert.assertTrue(outputProps == null);
- Assert.assertTrue(logFile.exists());
- Assert.assertTrue(eventCollector.checkOrdering());
- Assert.assertTrue(runner.isKilled());
- try {
- eventCollector.checkEventExists(new Type[] {Type.JOB_STARTED, Type.JOB_STATUS_CHANGED, Type.JOB_FINISHED});
- }
- catch (Exception e) {
- System.out.println(e.getMessage());
-
- Assert.fail(e.getMessage());
- }
- }
-
- @Test
- public void testDelayedExecutionJob() {
- MockExecutorLoader loader = new MockExecutorLoader();
- EventCollectorListener eventCollector = new EventCollectorListener();
- JobRunner runner = createJobRunner(1, "testJob", 1, false, loader, eventCollector);
- runner.setDelayStart(5000);
- long startTime = System.currentTimeMillis();
- ExecutableNode node = runner.getNode();
-
- eventCollector.handleEvent(Event.create(null, Event.Type.JOB_STARTED));
- Assert.assertTrue(runner.getStatus() != Status.SUCCEEDED || runner.getStatus() != Status.FAILED);
-
- runner.run();
- eventCollector.handleEvent(Event.create(null, Event.Type.JOB_FINISHED));
-
- Assert.assertTrue(runner.getStatus() == node.getStatus());
- Assert.assertTrue("Node status is " + node.getStatus(), node.getStatus() == Status.SUCCEEDED);
- Assert.assertTrue(node.getStartTime() > 0 && node.getEndTime() > 0);
- Assert.assertTrue( node.getEndTime() - node.getStartTime() > 1000);
- Assert.assertTrue(node.getStartTime() - startTime >= 5000);
-
- File logFile = new File(runner.getLogFilePath());
- Props outputProps = runner.getNode().getOutputProps();
- Assert.assertTrue(outputProps != null);
- Assert.assertTrue(logFile.exists());
- Assert.assertFalse(runner.isKilled());
- Assert.assertTrue(loader.getNodeUpdateCount(node.getId()) == 3);
-
- Assert.assertTrue(eventCollector.checkOrdering());
- try {
- eventCollector.checkEventExists(new Type[] {Type.JOB_STARTED, Type.JOB_STATUS_CHANGED, Type.JOB_FINISHED});
- }
- catch (Exception e) {
- Assert.fail(e.getMessage());
- }
- }
-
- @Test
- public void testDelayedExecutionCancelledJob() {
- MockExecutorLoader loader = new MockExecutorLoader();
- EventCollectorListener eventCollector = new EventCollectorListener();
- JobRunner runner = createJobRunner(1, "testJob", 1, false, loader, eventCollector);
- runner.setDelayStart(5000);
- long startTime = System.currentTimeMillis();
- ExecutableNode node = runner.getNode();
-
- eventCollector.handleEvent(Event.create(null, Event.Type.JOB_STARTED));
- Assert.assertTrue(runner.getStatus() != Status.SUCCEEDED || runner.getStatus() != Status.FAILED);
-
- Thread thread = new Thread(runner);
- thread.start();
-
- synchronized(this) {
- try {
- wait(2000);
- } catch (InterruptedException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
- runner.kill();
- try {
- wait(500);
- } catch (InterruptedException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
- }
-
- eventCollector.handleEvent(Event.create(null, Event.Type.JOB_FINISHED));
-
- Assert.assertTrue(runner.getStatus() == node.getStatus());
- Assert.assertTrue("Node status is " + node.getStatus(), node.getStatus() == Status.KILLED);
- Assert.assertTrue(node.getStartTime() > 0 && node.getEndTime() > 0);
- Assert.assertTrue( node.getEndTime() - node.getStartTime() < 1000);
- Assert.assertTrue(node.getStartTime() - startTime >= 2000);
- Assert.assertTrue(node.getStartTime() - startTime <= 5000);
- Assert.assertTrue(runner.isKilled());
-
- File logFile = new File(runner.getLogFilePath());
- Props outputProps = runner.getNode().getOutputProps();
- Assert.assertTrue(outputProps == null);
- Assert.assertTrue(logFile.exists());
-
- Assert.assertTrue(eventCollector.checkOrdering());
- try {
- eventCollector.checkEventExists(new Type[] {Type.JOB_FINISHED});
- }
- catch (Exception e) {
- Assert.fail(e.getMessage());
- }
- }
-
- private Props createProps( int sleepSec, boolean fail) {
- Props props = new Props();
- props.put("type", "java");
-
- props.put(JavaJob.JOB_CLASS, SleepJavaJob.class.getName());
- props.put("seconds", sleepSec);
- props.put(ProcessJob.WORKING_DIR, workingDir.getPath());
- props.put("fail", String.valueOf(fail));
-
- return props;
- }
-
- private JobRunner createJobRunner(int execId, String name, int time, boolean fail, ExecutorLoader loader, EventCollectorListener listener) {
- ExecutableFlow flow = new ExecutableFlow();
- flow.setExecutionId(execId);
- ExecutableNode node = new ExecutableNode();
- node.setId(name);
- node.setParentFlow(flow);
-
- Props props = createProps(time, fail);
- node.setInputProps(props);
- HashSet<String> proxyUsers = new HashSet<String>();
- proxyUsers.add(flow.getSubmitUser());
- JobRunner runner = new JobRunner(node, workingDir, loader, jobtypeManager);
- runner.setLogSettings(logger, "5MB", 4);
-
- runner.addListener(listener);
- return runner;
- }
+ private File workingDir;
+ private JobTypeManager jobtypeManager;
+ private Logger logger = Logger.getLogger("JobRunnerTest");
+
+ public JobRunnerTest() {
+
+ }
+
+ @Before
+ public void setUp() throws Exception {
+ System.out.println("Create temp dir");
+ workingDir = new File("_AzkabanTestDir_" + System.currentTimeMillis());
+ if (workingDir.exists()) {
+ FileUtils.deleteDirectory(workingDir);
+ }
+ workingDir.mkdirs();
+ jobtypeManager =
+ new JobTypeManager(null, null, this.getClass().getClassLoader());
+
+ jobtypeManager.getJobTypePluginSet().addPluginClass("java", JavaJob.class);
+ }
+
+ @After
+ public void tearDown() throws IOException {
+ System.out.println("Teardown temp dir");
+ if (workingDir != null) {
+ FileUtils.deleteDirectory(workingDir);
+ workingDir = null;
+ }
+ }
+
+ @Test
+ public void testBasicRun() {
+ MockExecutorLoader loader = new MockExecutorLoader();
+ EventCollectorListener eventCollector = new EventCollectorListener();
+ JobRunner runner =
+ createJobRunner(1, "testJob", 1, false, loader, eventCollector);
+ ExecutableNode node = runner.getNode();
+
+ eventCollector.handleEvent(Event.create(null, Event.Type.JOB_STARTED));
+ Assert.assertTrue(runner.getStatus() != Status.SUCCEEDED
+ || runner.getStatus() != Status.FAILED);
+
+ runner.run();
+ eventCollector.handleEvent(Event.create(null, Event.Type.JOB_FINISHED));
+
+ Assert.assertTrue(runner.getStatus() == node.getStatus());
+ Assert.assertTrue("Node status is " + node.getStatus(),
+ node.getStatus() == Status.SUCCEEDED);
+ Assert.assertTrue(node.getStartTime() > 0 && node.getEndTime() > 0);
+ Assert.assertTrue(node.getEndTime() - node.getStartTime() > 1000);
+
+ File logFile = new File(runner.getLogFilePath());
+ Props outputProps = runner.getNode().getOutputProps();
+ Assert.assertTrue(outputProps != null);
+ Assert.assertTrue(logFile.exists());
+
+ Assert.assertTrue(loader.getNodeUpdateCount(node.getId()) == 3);
+
+ Assert.assertTrue(eventCollector.checkOrdering());
+ try {
+ eventCollector.checkEventExists(new Type[] { Type.JOB_STARTED,
+ Type.JOB_STATUS_CHANGED, Type.JOB_FINISHED });
+ } catch (Exception e) {
+ Assert.fail(e.getMessage());
+ }
+ }
+
+ @Test
+ public void testFailedRun() {
+ MockExecutorLoader loader = new MockExecutorLoader();
+ EventCollectorListener eventCollector = new EventCollectorListener();
+ JobRunner runner =
+ createJobRunner(1, "testJob", 1, true, loader, eventCollector);
+ ExecutableNode node = runner.getNode();
+
+ Assert.assertTrue(runner.getStatus() != Status.SUCCEEDED
+ || runner.getStatus() != Status.FAILED);
+ runner.run();
+
+ Assert.assertTrue(runner.getStatus() == node.getStatus());
+ Assert.assertTrue(node.getStatus() == Status.FAILED);
+ Assert.assertTrue(node.getStartTime() > 0 && node.getEndTime() > 0);
+ Assert.assertTrue(node.getEndTime() - node.getStartTime() > 1000);
+
+ File logFile = new File(runner.getLogFilePath());
+ Props outputProps = runner.getNode().getOutputProps();
+ Assert.assertTrue(outputProps == null);
+ Assert.assertTrue(logFile.exists());
+ Assert.assertTrue(eventCollector.checkOrdering());
+ Assert.assertTrue(!runner.isKilled());
+ Assert.assertTrue(loader.getNodeUpdateCount(node.getId()) == 3);
+
+ try {
+ eventCollector.checkEventExists(new Type[] { Type.JOB_STARTED,
+ Type.JOB_STATUS_CHANGED, Type.JOB_FINISHED });
+ } catch (Exception e) {
+ Assert.fail(e.getMessage());
+ }
+ }
+
+ @Test
+ public void testDisabledRun() {
+ MockExecutorLoader loader = new MockExecutorLoader();
+ EventCollectorListener eventCollector = new EventCollectorListener();
+ JobRunner runner =
+ createJobRunner(1, "testJob", 1, false, loader, eventCollector);
+ ExecutableNode node = runner.getNode();
+
+ node.setStatus(Status.DISABLED);
+
+ // Should be disabled.
+ Assert.assertTrue(runner.getStatus() == Status.DISABLED);
+ runner.run();
+
+ Assert.assertTrue(runner.getStatus() == node.getStatus());
+ Assert.assertTrue(node.getStatus() == Status.SKIPPED);
+ Assert.assertTrue(node.getStartTime() > 0 && node.getEndTime() > 0);
+ // Give it 10 ms to fail.
+ Assert.assertTrue(node.getEndTime() - node.getStartTime() < 10);
+
+ // Log file and output files should not exist.
+ Props outputProps = runner.getNode().getOutputProps();
+ Assert.assertTrue(outputProps == null);
+ Assert.assertTrue(runner.getLogFilePath() == null);
+ Assert.assertTrue(eventCollector.checkOrdering());
+
+ Assert.assertTrue(loader.getNodeUpdateCount(node.getId()) == null);
+
+ try {
+ eventCollector.checkEventExists(new Type[] { Type.JOB_STARTED,
+ Type.JOB_FINISHED });
+ } catch (Exception e) {
+ Assert.fail(e.getMessage());
+ }
+ }
+
+ @Test
+ public void testPreKilledRun() {
+ MockExecutorLoader loader = new MockExecutorLoader();
+ EventCollectorListener eventCollector = new EventCollectorListener();
+ JobRunner runner =
+ createJobRunner(1, "testJob", 1, false, loader, eventCollector);
+ ExecutableNode node = runner.getNode();
+
+ node.setStatus(Status.KILLED);
+
+ // Should be killed.
+ Assert.assertTrue(runner.getStatus() == Status.KILLED);
+ runner.run();
+
+ // Should just skip the run and not change
+ Assert.assertTrue(runner.getStatus() == node.getStatus());
+ Assert.assertTrue(node.getStatus() == Status.KILLED);
+ Assert.assertTrue(node.getStartTime() > 0 && node.getEndTime() > 0);
+ // Give it 10 ms to fail.
+ Assert.assertTrue(node.getEndTime() - node.getStartTime() < 10);
+
+ Assert.assertTrue(loader.getNodeUpdateCount(node.getId()) == null);
+
+ // Log file and output files should not exist.
+ Props outputProps = runner.getNode().getOutputProps();
+ Assert.assertTrue(outputProps == null);
+ Assert.assertTrue(runner.getLogFilePath() == null);
+ Assert.assertTrue(!runner.isKilled());
+ try {
+ eventCollector.checkEventExists(new Type[] { Type.JOB_STARTED,
+ Type.JOB_FINISHED });
+ } catch (Exception e) {
+ Assert.fail(e.getMessage());
+ }
+ }
+
+ @Test
+ public void testCancelRun() {
+ MockExecutorLoader loader = new MockExecutorLoader();
+ EventCollectorListener eventCollector = new EventCollectorListener();
+ JobRunner runner =
+ createJobRunner(13, "testJob", 10, false, loader, eventCollector);
+ ExecutableNode node = runner.getNode();
+
+ Assert.assertTrue(runner.getStatus() != Status.SUCCEEDED
+ || runner.getStatus() != Status.FAILED);
+
+ Thread thread = new Thread(runner);
+ thread.start();
+
+ synchronized (this) {
+ try {
+ wait(2000);
+ } catch (InterruptedException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ runner.kill();
+ try {
+ wait(500);
+ } catch (InterruptedException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ }
+
+ Assert.assertTrue(runner.getStatus() == node.getStatus());
+ Assert.assertTrue("Status is " + node.getStatus(),
+ node.getStatus() == Status.KILLED);
+ Assert.assertTrue(node.getStartTime() > 0 && node.getEndTime() > 0);
+ // Give it 10 ms to fail.
+ Assert.assertTrue(node.getEndTime() - node.getStartTime() < 3000);
+ Assert.assertTrue(loader.getNodeUpdateCount(node.getId()) == 3);
+
+ // Log file and output files should not exist.
+ File logFile = new File(runner.getLogFilePath());
+ Props outputProps = runner.getNode().getOutputProps();
+ Assert.assertTrue(outputProps == null);
+ Assert.assertTrue(logFile.exists());
+ Assert.assertTrue(eventCollector.checkOrdering());
+ Assert.assertTrue(runner.isKilled());
+ try {
+ eventCollector.checkEventExists(new Type[] { Type.JOB_STARTED,
+ Type.JOB_STATUS_CHANGED, Type.JOB_FINISHED });
+ } catch (Exception e) {
+ System.out.println(e.getMessage());
+
+ Assert.fail(e.getMessage());
+ }
+ }
+
+ @Test
+ public void testDelayedExecutionJob() {
+ MockExecutorLoader loader = new MockExecutorLoader();
+ EventCollectorListener eventCollector = new EventCollectorListener();
+ JobRunner runner =
+ createJobRunner(1, "testJob", 1, false, loader, eventCollector);
+ runner.setDelayStart(5000);
+ long startTime = System.currentTimeMillis();
+ ExecutableNode node = runner.getNode();
+
+ eventCollector.handleEvent(Event.create(null, Event.Type.JOB_STARTED));
+ Assert.assertTrue(runner.getStatus() != Status.SUCCEEDED
+ || runner.getStatus() != Status.FAILED);
+
+ runner.run();
+ eventCollector.handleEvent(Event.create(null, Event.Type.JOB_FINISHED));
+
+ Assert.assertTrue(runner.getStatus() == node.getStatus());
+ Assert.assertTrue("Node status is " + node.getStatus(),
+ node.getStatus() == Status.SUCCEEDED);
+ Assert.assertTrue(node.getStartTime() > 0 && node.getEndTime() > 0);
+ Assert.assertTrue(node.getEndTime() - node.getStartTime() > 1000);
+ Assert.assertTrue(node.getStartTime() - startTime >= 5000);
+
+ File logFile = new File(runner.getLogFilePath());
+ Props outputProps = runner.getNode().getOutputProps();
+ Assert.assertTrue(outputProps != null);
+ Assert.assertTrue(logFile.exists());
+ Assert.assertFalse(runner.isKilled());
+ Assert.assertTrue(loader.getNodeUpdateCount(node.getId()) == 3);
+
+ Assert.assertTrue(eventCollector.checkOrdering());
+ try {
+ eventCollector.checkEventExists(new Type[] { Type.JOB_STARTED,
+ Type.JOB_STATUS_CHANGED, Type.JOB_FINISHED });
+ } catch (Exception e) {
+ Assert.fail(e.getMessage());
+ }
+ }
+
+ @Test
+ public void testDelayedExecutionCancelledJob() {
+ MockExecutorLoader loader = new MockExecutorLoader();
+ EventCollectorListener eventCollector = new EventCollectorListener();
+ JobRunner runner =
+ createJobRunner(1, "testJob", 1, false, loader, eventCollector);
+ runner.setDelayStart(5000);
+ long startTime = System.currentTimeMillis();
+ ExecutableNode node = runner.getNode();
+
+ eventCollector.handleEvent(Event.create(null, Event.Type.JOB_STARTED));
+ Assert.assertTrue(runner.getStatus() != Status.SUCCEEDED
+ || runner.getStatus() != Status.FAILED);
+
+ Thread thread = new Thread(runner);
+ thread.start();
+
+ synchronized (this) {
+ try {
+ wait(2000);
+ } catch (InterruptedException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ runner.kill();
+ try {
+ wait(500);
+ } catch (InterruptedException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ }
+
+ eventCollector.handleEvent(Event.create(null, Event.Type.JOB_FINISHED));
+
+ Assert.assertTrue(runner.getStatus() == node.getStatus());
+ Assert.assertTrue("Node status is " + node.getStatus(),
+ node.getStatus() == Status.KILLED);
+ Assert.assertTrue(node.getStartTime() > 0 && node.getEndTime() > 0);
+ Assert.assertTrue(node.getEndTime() - node.getStartTime() < 1000);
+ Assert.assertTrue(node.getStartTime() - startTime >= 2000);
+ Assert.assertTrue(node.getStartTime() - startTime <= 5000);
+ Assert.assertTrue(runner.isKilled());
+
+ File logFile = new File(runner.getLogFilePath());
+ Props outputProps = runner.getNode().getOutputProps();
+ Assert.assertTrue(outputProps == null);
+ Assert.assertTrue(logFile.exists());
+
+ Assert.assertTrue(eventCollector.checkOrdering());
+ try {
+ eventCollector.checkEventExists(new Type[] { Type.JOB_FINISHED });
+ } catch (Exception e) {
+ Assert.fail(e.getMessage());
+ }
+ }
+
+ private Props createProps(int sleepSec, boolean fail) {
+ Props props = new Props();
+ props.put("type", "java");
+
+ props.put(JavaJob.JOB_CLASS, SleepJavaJob.class.getName());
+ props.put("seconds", sleepSec);
+ props.put(ProcessJob.WORKING_DIR, workingDir.getPath());
+ props.put("fail", String.valueOf(fail));
+
+ return props;
+ }
+
+ private JobRunner createJobRunner(int execId, String name, int time,
+ boolean fail, ExecutorLoader loader, EventCollectorListener listener) {
+ ExecutableFlow flow = new ExecutableFlow();
+ flow.setExecutionId(execId);
+ ExecutableNode node = new ExecutableNode();
+ node.setId(name);
+ node.setParentFlow(flow);
+
+ Props props = createProps(time, fail);
+ node.setInputProps(props);
+ HashSet<String> proxyUsers = new HashSet<String>();
+ proxyUsers.add(flow.getSubmitUser());
+ JobRunner runner = new JobRunner(node, workingDir, loader, jobtypeManager);
+ runner.setLogSettings(logger, "5MB", 4);
+
+ runner.addListener(listener);
+ return runner;
+ }
}
\ No newline at end of file
unit/java/azkaban/test/execapp/MockExecutorLoader.java 420(+221 -199)
diff --git a/unit/java/azkaban/test/execapp/MockExecutorLoader.java b/unit/java/azkaban/test/execapp/MockExecutorLoader.java
index 92c1dee..e966aad 100644
--- a/unit/java/azkaban/test/execapp/MockExecutorLoader.java
+++ b/unit/java/azkaban/test/execapp/MockExecutorLoader.java
@@ -18,204 +18,226 @@ import azkaban.utils.Props;
public class MockExecutorLoader implements ExecutorLoader {
- HashMap<Integer, ExecutableFlow> flows = new HashMap<Integer, ExecutableFlow>();
- HashMap<String, ExecutableNode> nodes = new HashMap<String, ExecutableNode>();
- HashMap<Integer, ExecutionReference> refs = new HashMap<Integer, ExecutionReference>();
- int flowUpdateCount = 0;
- HashMap<String, Integer> jobUpdateCount = new HashMap<String,Integer>();
- Map<Integer, Pair<ExecutionReference, ExecutableFlow>> activeFlows = new HashMap<Integer, Pair<ExecutionReference,ExecutableFlow>>();
-
- @Override
- public void uploadExecutableFlow(ExecutableFlow flow) throws ExecutorManagerException {
- flows.put(flow.getExecutionId(), flow);
- flowUpdateCount++;
- }
-
- @Override
- public ExecutableFlow fetchExecutableFlow(int execId) throws ExecutorManagerException {
- ExecutableFlow flow = flows.get(execId);
- return ExecutableFlow.createExecutableFlowFromObject(flow.toObject());
- }
-
- @Override
- public Map<Integer, Pair<ExecutionReference, ExecutableFlow>> fetchActiveFlows() throws ExecutorManagerException {
- return activeFlows;
- }
-
- @Override
- public List<ExecutableFlow> fetchFlowHistory(int projectId, String flowId, int skip, int num) throws ExecutorManagerException {
- return null;
- }
-
- @Override
- public void addActiveExecutableReference(ExecutionReference ref) throws ExecutorManagerException {
- refs.put(ref.getExecId(), ref);
- }
-
- @Override
- public void removeActiveExecutableReference(int execId) throws ExecutorManagerException {
- refs.remove(execId);
- }
-
- public boolean hasActiveExecutableReference(int execId) {
- return refs.containsKey(execId);
- }
-
- @Override
- public void uploadLogFile(int execId, String name, int attempt, File... files) throws ExecutorManagerException {
-
- }
-
- @Override
- public void updateExecutableFlow(ExecutableFlow flow) throws ExecutorManagerException {
- ExecutableFlow toUpdate = flows.get(flow.getExecutionId());
-
- toUpdate.applyUpdateObject((Map<String,Object>)flow.toUpdateObject(0));
- flowUpdateCount++;
- }
-
- @Override
- public void uploadExecutableNode(ExecutableNode node, Props inputParams) throws ExecutorManagerException {
- ExecutableNode exNode = new ExecutableNode();
- exNode.fillExecutableFromMapObject(node.toObject());
-
- nodes.put(node.getId(), exNode);
- jobUpdateCount.put(node.getId(), 1);
- }
-
- @Override
- public void updateExecutableNode(ExecutableNode node) throws ExecutorManagerException {
- ExecutableNode foundNode = nodes.get(node.getId());
- foundNode.setEndTime(node.getEndTime());
- foundNode.setStartTime(node.getStartTime());
- foundNode.setStatus(node.getStatus());
- foundNode.setUpdateTime(node.getUpdateTime());
-
- Integer value = jobUpdateCount.get(node.getId());
- if (value == null) {
- throw new ExecutorManagerException("The node has not been uploaded");
- }
- else {
- jobUpdateCount.put(node.getId(), ++value);
- }
-
- flowUpdateCount++;
- }
-
- @Override
- public int fetchNumExecutableFlows(int projectId, String flowId) throws ExecutorManagerException {
- return 0;
- }
-
- @Override
- public int fetchNumExecutableFlows() throws ExecutorManagerException {
- // TODO Auto-generated method stub
- return 0;
- }
-
- public int getFlowUpdateCount() {
- return flowUpdateCount;
- }
-
- public Integer getNodeUpdateCount(String jobId) {
- return jobUpdateCount.get(jobId);
- }
-
- @Override
- public ExecutableJobInfo fetchJobInfo(int execId, String jobId, int attempt)
- throws ExecutorManagerException {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public boolean updateExecutableReference(int execId, long updateTime) throws ExecutorManagerException {
- // TODO Auto-generated method stub
- return true;
- }
-
- @Override
- public LogData fetchLogs(int execId, String name, int attempt, int startByte, int endByte) throws ExecutorManagerException {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public List<ExecutableFlow> fetchFlowHistory(int skip, int num) throws ExecutorManagerException {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public List<ExecutableFlow> fetchFlowHistory(String projectContains, String flowContains, String userNameContains, int status,
- long startData, long endData, int skip, int num) throws ExecutorManagerException {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public List<ExecutableJobInfo> fetchJobHistory(int projectId, String jobId, int skip, int size)
- throws ExecutorManagerException {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public int fetchNumExecutableNodes(int projectId, String jobId) throws ExecutorManagerException {
- // TODO Auto-generated method stub
- return 0;
- }
-
- @Override
- public Props fetchExecutionJobInputProps(int execId, String jobId) throws ExecutorManagerException {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public Props fetchExecutionJobOutputProps(int execId, String jobId) throws ExecutorManagerException {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public Pair<Props, Props> fetchExecutionJobProps(int execId, String jobId) throws ExecutorManagerException {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public List<ExecutableJobInfo> fetchJobInfoAttempts(int execId, String jobId) throws ExecutorManagerException {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public int removeExecutionLogsByTime(long millis)
- throws ExecutorManagerException {
- // TODO Auto-generated method stub
- return 0;
- }
-
- @Override
- public List<ExecutableFlow> fetchFlowHistory(int projectId, String flowId, int skip, int num, Status status) throws ExecutorManagerException {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public List<Object> fetchAttachments(int execId, String name, int attempt)
- throws ExecutorManagerException {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public void uploadAttachmentFile(ExecutableNode node, File file)
- throws ExecutorManagerException {
- // TODO Auto-generated method stub
-
- }
-
+ HashMap<Integer, ExecutableFlow> flows =
+ new HashMap<Integer, ExecutableFlow>();
+ HashMap<String, ExecutableNode> nodes = new HashMap<String, ExecutableNode>();
+ HashMap<Integer, ExecutionReference> refs =
+ new HashMap<Integer, ExecutionReference>();
+ int flowUpdateCount = 0;
+ HashMap<String, Integer> jobUpdateCount = new HashMap<String, Integer>();
+ Map<Integer, Pair<ExecutionReference, ExecutableFlow>> activeFlows =
+ new HashMap<Integer, Pair<ExecutionReference, ExecutableFlow>>();
+
+ @Override
+ public void uploadExecutableFlow(ExecutableFlow flow)
+ throws ExecutorManagerException {
+ flows.put(flow.getExecutionId(), flow);
+ flowUpdateCount++;
+ }
+
+ @Override
+ public ExecutableFlow fetchExecutableFlow(int execId)
+ throws ExecutorManagerException {
+ ExecutableFlow flow = flows.get(execId);
+ return ExecutableFlow.createExecutableFlowFromObject(flow.toObject());
+ }
+
+ @Override
+ public Map<Integer, Pair<ExecutionReference, ExecutableFlow>> fetchActiveFlows()
+ throws ExecutorManagerException {
+ return activeFlows;
+ }
+
+ @Override
+ public List<ExecutableFlow> fetchFlowHistory(int projectId, String flowId,
+ int skip, int num) throws ExecutorManagerException {
+ return null;
+ }
+
+ @Override
+ public void addActiveExecutableReference(ExecutionReference ref)
+ throws ExecutorManagerException {
+ refs.put(ref.getExecId(), ref);
+ }
+
+ @Override
+ public void removeActiveExecutableReference(int execId)
+ throws ExecutorManagerException {
+ refs.remove(execId);
+ }
+
+ public boolean hasActiveExecutableReference(int execId) {
+ return refs.containsKey(execId);
+ }
+
+ @Override
+ public void uploadLogFile(int execId, String name, int attempt, File... files)
+ throws ExecutorManagerException {
+
+ }
+
+ @Override
+ public void updateExecutableFlow(ExecutableFlow flow)
+ throws ExecutorManagerException {
+ ExecutableFlow toUpdate = flows.get(flow.getExecutionId());
+
+ toUpdate.applyUpdateObject((Map<String, Object>) flow.toUpdateObject(0));
+ flowUpdateCount++;
+ }
+
+ @Override
+ public void uploadExecutableNode(ExecutableNode node, Props inputParams)
+ throws ExecutorManagerException {
+ ExecutableNode exNode = new ExecutableNode();
+ exNode.fillExecutableFromMapObject(node.toObject());
+
+ nodes.put(node.getId(), exNode);
+ jobUpdateCount.put(node.getId(), 1);
+ }
+
+ @Override
+ public void updateExecutableNode(ExecutableNode node)
+ throws ExecutorManagerException {
+ ExecutableNode foundNode = nodes.get(node.getId());
+ foundNode.setEndTime(node.getEndTime());
+ foundNode.setStartTime(node.getStartTime());
+ foundNode.setStatus(node.getStatus());
+ foundNode.setUpdateTime(node.getUpdateTime());
+
+ Integer value = jobUpdateCount.get(node.getId());
+ if (value == null) {
+ throw new ExecutorManagerException("The node has not been uploaded");
+ } else {
+ jobUpdateCount.put(node.getId(), ++value);
+ }
+
+ flowUpdateCount++;
+ }
+
+ @Override
+ public int fetchNumExecutableFlows(int projectId, String flowId)
+ throws ExecutorManagerException {
+ return 0;
+ }
+
+ @Override
+ public int fetchNumExecutableFlows() throws ExecutorManagerException {
+ // TODO Auto-generated method stub
+ return 0;
+ }
+
+ public int getFlowUpdateCount() {
+ return flowUpdateCount;
+ }
+
+ public Integer getNodeUpdateCount(String jobId) {
+ return jobUpdateCount.get(jobId);
+ }
+
+ @Override
+ public ExecutableJobInfo fetchJobInfo(int execId, String jobId, int attempt)
+ throws ExecutorManagerException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public boolean updateExecutableReference(int execId, long updateTime)
+ throws ExecutorManagerException {
+ // TODO Auto-generated method stub
+ return true;
+ }
+
+ @Override
+ public LogData fetchLogs(int execId, String name, int attempt, int startByte,
+ int endByte) throws ExecutorManagerException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public List<ExecutableFlow> fetchFlowHistory(int skip, int num)
+ throws ExecutorManagerException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public List<ExecutableFlow> fetchFlowHistory(String projectContains,
+ String flowContains, String userNameContains, int status, long startData,
+ long endData, int skip, int num) throws ExecutorManagerException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public List<ExecutableJobInfo> fetchJobHistory(int projectId, String jobId,
+ int skip, int size) throws ExecutorManagerException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public int fetchNumExecutableNodes(int projectId, String jobId)
+ throws ExecutorManagerException {
+ // TODO Auto-generated method stub
+ return 0;
+ }
+
+ @Override
+ public Props fetchExecutionJobInputProps(int execId, String jobId)
+ throws ExecutorManagerException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public Props fetchExecutionJobOutputProps(int execId, String jobId)
+ throws ExecutorManagerException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public Pair<Props, Props> fetchExecutionJobProps(int execId, String jobId)
+ throws ExecutorManagerException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public List<ExecutableJobInfo> fetchJobInfoAttempts(int execId, String jobId)
+ throws ExecutorManagerException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public int removeExecutionLogsByTime(long millis)
+ throws ExecutorManagerException {
+ // TODO Auto-generated method stub
+ return 0;
+ }
+
+ @Override
+ public List<ExecutableFlow> fetchFlowHistory(int projectId, String flowId,
+ int skip, int num, Status status) throws ExecutorManagerException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public List<Object> fetchAttachments(int execId, String name, int attempt)
+ throws ExecutorManagerException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public void uploadAttachmentFile(ExecutableNode node, File file)
+ throws ExecutorManagerException {
+ // TODO Auto-generated method stub
+
+ }
}
\ No newline at end of file
unit/java/azkaban/test/execapp/MockProjectLoader.java 422(+212 -210)
diff --git a/unit/java/azkaban/test/execapp/MockProjectLoader.java b/unit/java/azkaban/test/execapp/MockProjectLoader.java
index ef76de7..4e44eb9 100644
--- a/unit/java/azkaban/test/execapp/MockProjectLoader.java
+++ b/unit/java/azkaban/test/execapp/MockProjectLoader.java
@@ -20,214 +20,216 @@ import azkaban.utils.Props;
import azkaban.utils.Triple;
public class MockProjectLoader implements ProjectLoader {
- public File dir;
-
- public MockProjectLoader(File dir) {
- this.dir = dir;
- }
-
- @Override
- public List<Project> fetchAllActiveProjects()
- throws ProjectManagerException {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public Project fetchProjectById(int id) throws ProjectManagerException {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public Project createNewProject(String name, String description,
- User creator) throws ProjectManagerException {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public void removeProject(Project project, String user)
- throws ProjectManagerException {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void updatePermission(Project project, String name, Permission perm,
- boolean isGroup) throws ProjectManagerException {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void updateDescription(Project project, String description,
- String user) throws ProjectManagerException {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public boolean postEvent(Project project, EventType type, String user,
- String message) {
- // TODO Auto-generated method stub
- return false;
- }
-
- @Override
- public List<ProjectLogEvent> getProjectEvents(Project project, int num,
- int skip) throws ProjectManagerException {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public void uploadProjectFile(Project project, int version,
- String filetype, String filename, File localFile, String user)
- throws ProjectManagerException {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public ProjectFileHandler getUploadedFile(Project project, int version)
- throws ProjectManagerException {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public ProjectFileHandler getUploadedFile(int projectId, int version)
- throws ProjectManagerException {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public void changeProjectVersion(Project project, int version, String user)
- throws ProjectManagerException {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void uploadFlows(Project project, int version, Collection<Flow> flows)
- throws ProjectManagerException {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void uploadFlow(Project project, int version, Flow flow)
- throws ProjectManagerException {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public Flow fetchFlow(Project project, String flowId)
- throws ProjectManagerException {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public List<Flow> fetchAllProjectFlows(Project project)
- throws ProjectManagerException {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public int getLatestProjectVersion(Project project)
- throws ProjectManagerException {
- // TODO Auto-generated method stub
- return 0;
- }
-
- @Override
- public void uploadProjectProperty(Project project, Props props)
- throws ProjectManagerException {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void uploadProjectProperties(Project project, List<Props> properties)
- throws ProjectManagerException {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public Props fetchProjectProperty(Project project, String propsName)
- throws ProjectManagerException {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public Map<String, Props> fetchProjectProperties(int projectId, int version)
- throws ProjectManagerException {
- Map<String, Props> propertyMap = new HashMap<String, Props>();
- for (File file: dir.listFiles()) {
- String name = file.getName();
- if (name.endsWith(".job") || name.endsWith(".properties")) {
- try {
- Props props = new Props(null, file);
- propertyMap.put(name, props);
- } catch (IOException e) {
- throw new ProjectManagerException(e.getMessage());
- }
- }
- }
-
- return propertyMap;
- }
-
- @Override
- public void cleanOlderProjectVersion(int projectId, int version) throws ProjectManagerException {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void removePermission(Project project, String name, boolean isGroup) throws ProjectManagerException {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void updateProjectProperty(Project project, Props props)
- throws ProjectManagerException {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public Props fetchProjectProperty(int projectId, int projectVer,
- String propsName) throws ProjectManagerException {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public List<Triple<String, Boolean, Permission>> getProjectPermissions(
- int projectId) throws ProjectManagerException {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public void updateProjectSettings(Project project)
- throws ProjectManagerException {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void updateFlow(Project project, int version, Flow flow) throws ProjectManagerException {
- // TODO Auto-generated method stub
-
- }
+ public File dir;
+
+ public MockProjectLoader(File dir) {
+ this.dir = dir;
+ }
+
+ @Override
+ public List<Project> fetchAllActiveProjects() throws ProjectManagerException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public Project fetchProjectById(int id) throws ProjectManagerException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public Project createNewProject(String name, String description, User creator)
+ throws ProjectManagerException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public void removeProject(Project project, String user)
+ throws ProjectManagerException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void updatePermission(Project project, String name, Permission perm,
+ boolean isGroup) throws ProjectManagerException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void updateDescription(Project project, String description, String user)
+ throws ProjectManagerException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public boolean postEvent(Project project, EventType type, String user,
+ String message) {
+ // TODO Auto-generated method stub
+ return false;
+ }
+
+ @Override
+ public List<ProjectLogEvent> getProjectEvents(Project project, int num,
+ int skip) throws ProjectManagerException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public void uploadProjectFile(Project project, int version, String filetype,
+ String filename, File localFile, String user)
+ throws ProjectManagerException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public ProjectFileHandler getUploadedFile(Project project, int version)
+ throws ProjectManagerException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public ProjectFileHandler getUploadedFile(int projectId, int version)
+ throws ProjectManagerException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public void changeProjectVersion(Project project, int version, String user)
+ throws ProjectManagerException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void uploadFlows(Project project, int version, Collection<Flow> flows)
+ throws ProjectManagerException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void uploadFlow(Project project, int version, Flow flow)
+ throws ProjectManagerException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public Flow fetchFlow(Project project, String flowId)
+ throws ProjectManagerException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public List<Flow> fetchAllProjectFlows(Project project)
+ throws ProjectManagerException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public int getLatestProjectVersion(Project project)
+ throws ProjectManagerException {
+ // TODO Auto-generated method stub
+ return 0;
+ }
+
+ @Override
+ public void uploadProjectProperty(Project project, Props props)
+ throws ProjectManagerException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void uploadProjectProperties(Project project, List<Props> properties)
+ throws ProjectManagerException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public Props fetchProjectProperty(Project project, String propsName)
+ throws ProjectManagerException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public Map<String, Props> fetchProjectProperties(int projectId, int version)
+ throws ProjectManagerException {
+ Map<String, Props> propertyMap = new HashMap<String, Props>();
+ for (File file : dir.listFiles()) {
+ String name = file.getName();
+ if (name.endsWith(".job") || name.endsWith(".properties")) {
+ try {
+ Props props = new Props(null, file);
+ propertyMap.put(name, props);
+ } catch (IOException e) {
+ throw new ProjectManagerException(e.getMessage());
+ }
+ }
+ }
+
+ return propertyMap;
+ }
+
+ @Override
+ public void cleanOlderProjectVersion(int projectId, int version)
+ throws ProjectManagerException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void removePermission(Project project, String name, boolean isGroup)
+ throws ProjectManagerException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void updateProjectProperty(Project project, Props props)
+ throws ProjectManagerException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public Props fetchProjectProperty(int projectId, int projectVer,
+ String propsName) throws ProjectManagerException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public List<Triple<String, Boolean, Permission>> getProjectPermissions(
+ int projectId) throws ProjectManagerException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public void updateProjectSettings(Project project)
+ throws ProjectManagerException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void updateFlow(Project project, int version, Flow flow)
+ throws ProjectManagerException {
+ // TODO Auto-generated method stub
+
+ }
}
\ No newline at end of file
diff --git a/unit/java/azkaban/test/execapp/ProjectVersionsTest.java b/unit/java/azkaban/test/execapp/ProjectVersionsTest.java
index 9120729..c8044a7 100644
--- a/unit/java/azkaban/test/execapp/ProjectVersionsTest.java
+++ b/unit/java/azkaban/test/execapp/ProjectVersionsTest.java
@@ -9,20 +9,20 @@ import org.junit.Test;
import azkaban.execapp.ProjectVersion;
public class ProjectVersionsTest {
-
- @Test
- public void testVersionOrdering() {
- ArrayList<ProjectVersion> pversion = new ArrayList<ProjectVersion>();
- pversion.add(new ProjectVersion(1, 2));
- pversion.add(new ProjectVersion(1, 3));
- pversion.add(new ProjectVersion(1, 1));
-
- Collections.sort(pversion);
-
- int i = 0;
- for (ProjectVersion version: pversion) {
- Assert.assertTrue(i < version.getVersion());
- i = version.getVersion();
- }
- }
+
+ @Test
+ public void testVersionOrdering() {
+ ArrayList<ProjectVersion> pversion = new ArrayList<ProjectVersion>();
+ pversion.add(new ProjectVersion(1, 2));
+ pversion.add(new ProjectVersion(1, 3));
+ pversion.add(new ProjectVersion(1, 1));
+
+ Collections.sort(pversion);
+
+ int i = 0;
+ for (ProjectVersion version : pversion) {
+ Assert.assertTrue(i < version.getVersion());
+ i = version.getVersion();
+ }
+ }
}
\ No newline at end of file
unit/java/azkaban/test/executor/ExecutableFlowTest.java 726(+374 -352)
diff --git a/unit/java/azkaban/test/executor/ExecutableFlowTest.java b/unit/java/azkaban/test/executor/ExecutableFlowTest.java
index 6e4b0b1..08dfbda 100644
--- a/unit/java/azkaban/test/executor/ExecutableFlowTest.java
+++ b/unit/java/azkaban/test/executor/ExecutableFlowTest.java
@@ -26,358 +26,380 @@ import azkaban.utils.DirectoryFlowLoader;
import azkaban.utils.JSONUtils;
public class ExecutableFlowTest {
- private Project project;
-
- @Before
- public void setUp() throws Exception {
- Logger logger = Logger.getLogger(this.getClass());
- DirectoryFlowLoader loader = new DirectoryFlowLoader(logger);
- loader.loadProjectFlow(new File("unit/executions/embedded"));
- Assert.assertEquals(0, loader.getErrors().size());
-
- project = new Project(11, "myTestProject");
- project.setFlows(loader.getFlowMap());
- project.setVersion(123);
+ private Project project;
+
+ @Before
+ public void setUp() throws Exception {
+ Logger logger = Logger.getLogger(this.getClass());
+ DirectoryFlowLoader loader = new DirectoryFlowLoader(logger);
+ loader.loadProjectFlow(new File("unit/executions/embedded"));
+ Assert.assertEquals(0, loader.getErrors().size());
+
+ project = new Project(11, "myTestProject");
+ project.setFlows(loader.getFlowMap());
+ project.setVersion(123);
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ }
+
+ @Test
+ public void testExecutorFlowCreation() throws Exception {
+ Flow flow = project.getFlow("jobe");
+ Assert.assertNotNull(flow);
+
+ ExecutableFlow exFlow = new ExecutableFlow(project, flow);
+ Assert.assertNotNull(exFlow.getExecutableNode("joba"));
+ Assert.assertNotNull(exFlow.getExecutableNode("jobb"));
+ Assert.assertNotNull(exFlow.getExecutableNode("jobc"));
+ Assert.assertNotNull(exFlow.getExecutableNode("jobd"));
+ Assert.assertNotNull(exFlow.getExecutableNode("jobe"));
+
+ Assert.assertFalse(exFlow.getExecutableNode("joba") instanceof ExecutableFlowBase);
+ Assert.assertTrue(exFlow.getExecutableNode("jobb") instanceof ExecutableFlowBase);
+ Assert.assertTrue(exFlow.getExecutableNode("jobc") instanceof ExecutableFlowBase);
+ Assert.assertTrue(exFlow.getExecutableNode("jobd") instanceof ExecutableFlowBase);
+ Assert.assertFalse(exFlow.getExecutableNode("jobe") instanceof ExecutableFlowBase);
+
+ ExecutableFlowBase jobbFlow =
+ (ExecutableFlowBase) exFlow.getExecutableNode("jobb");
+ ExecutableFlowBase jobcFlow =
+ (ExecutableFlowBase) exFlow.getExecutableNode("jobc");
+ ExecutableFlowBase jobdFlow =
+ (ExecutableFlowBase) exFlow.getExecutableNode("jobd");
+
+ Assert.assertEquals("innerFlow", jobbFlow.getFlowId());
+ Assert.assertEquals("jobb", jobbFlow.getId());
+ Assert.assertEquals(4, jobbFlow.getExecutableNodes().size());
+
+ Assert.assertEquals("innerFlow", jobcFlow.getFlowId());
+ Assert.assertEquals("jobc", jobcFlow.getId());
+ Assert.assertEquals(4, jobcFlow.getExecutableNodes().size());
+
+ Assert.assertEquals("innerFlow", jobdFlow.getFlowId());
+ Assert.assertEquals("jobd", jobdFlow.getId());
+ Assert.assertEquals(4, jobdFlow.getExecutableNodes().size());
+ }
+
+ @Test
+ public void testExecutorFlowJson() throws Exception {
+ Flow flow = project.getFlow("jobe");
+ Assert.assertNotNull(flow);
+
+ ExecutableFlow exFlow = new ExecutableFlow(project, flow);
+
+ Object obj = exFlow.toObject();
+ String exFlowJSON = JSONUtils.toJSON(obj);
+ @SuppressWarnings("unchecked")
+ Map<String, Object> flowObjMap =
+ (Map<String, Object>) JSONUtils.parseJSONFromString(exFlowJSON);
+
+ ExecutableFlow parsedExFlow =
+ ExecutableFlow.createExecutableFlowFromObject(flowObjMap);
+ testEquals(exFlow, parsedExFlow);
+ }
+
+ @Test
+ public void testExecutorFlowJson2() throws Exception {
+ Flow flow = project.getFlow("jobe");
+ Assert.assertNotNull(flow);
+
+ ExecutableFlow exFlow = new ExecutableFlow(project, flow);
+ exFlow.setExecutionId(101);
+ exFlow.setAttempt(2);
+ exFlow.setDelayedExecution(1000);
+
+ ExecutionOptions options = new ExecutionOptions();
+ options.setConcurrentOption("blah");
+ options.setDisabledJobs(Arrays.asList(new Object[] { "bee", null, "boo" }));
+ options.setFailureAction(FailureAction.CANCEL_ALL);
+ options
+ .setFailureEmails(Arrays.asList(new String[] { "doo", null, "daa" }));
+ options
+ .setSuccessEmails(Arrays.asList(new String[] { "dee", null, "dae" }));
+ options.setPipelineLevel(2);
+ options.setPipelineExecutionId(3);
+ options.setNotifyOnFirstFailure(true);
+ options.setNotifyOnLastFailure(true);
+
+ HashMap<String, String> flowProps = new HashMap<String, String>();
+ flowProps.put("la", "fa");
+ options.addAllFlowParameters(flowProps);
+ exFlow.setExecutionOptions(options);
+
+ Object obj = exFlow.toObject();
+ String exFlowJSON = JSONUtils.toJSON(obj);
+ @SuppressWarnings("unchecked")
+ Map<String, Object> flowObjMap =
+ (Map<String, Object>) JSONUtils.parseJSONFromString(exFlowJSON);
+
+ ExecutableFlow parsedExFlow =
+ ExecutableFlow.createExecutableFlowFromObject(flowObjMap);
+ testEquals(exFlow, parsedExFlow);
+ }
+
+ @SuppressWarnings("rawtypes")
+ @Test
+ public void testExecutorFlowUpdates() throws Exception {
+ Flow flow = project.getFlow("jobe");
+ ExecutableFlow exFlow = new ExecutableFlow(project, flow);
+ exFlow.setExecutionId(101);
+
+ // Create copy of flow
+ Object obj = exFlow.toObject();
+ String exFlowJSON = JSONUtils.toJSON(obj);
+ @SuppressWarnings("unchecked")
+ Map<String, Object> flowObjMap =
+ (Map<String, Object>) JSONUtils.parseJSONFromString(exFlowJSON);
+ ExecutableFlow copyFlow =
+ ExecutableFlow.createExecutableFlowFromObject(flowObjMap);
+
+ testEquals(exFlow, copyFlow);
+
+ ExecutableNode joba = exFlow.getExecutableNode("joba");
+ ExecutableFlowBase jobb =
+ (ExecutableFlowBase) (exFlow.getExecutableNode("jobb"));
+ ExecutableFlowBase jobc =
+ (ExecutableFlowBase) (exFlow.getExecutableNode("jobc"));
+ ExecutableFlowBase jobd =
+ (ExecutableFlowBase) (exFlow.getExecutableNode("jobd"));
+ ExecutableNode jobe = exFlow.getExecutableNode("jobe");
+ assertNotNull(joba, jobb, jobc, jobd, jobe);
+
+ ExecutableNode jobbInnerFlowA = jobb.getExecutableNode("innerJobA");
+ ExecutableNode jobbInnerFlowB = jobb.getExecutableNode("innerJobB");
+ ExecutableNode jobbInnerFlowC = jobb.getExecutableNode("innerJobC");
+ ExecutableNode jobbInnerFlow = jobb.getExecutableNode("innerFlow");
+ assertNotNull(jobbInnerFlowA, jobbInnerFlowB, jobbInnerFlowC, jobbInnerFlow);
+
+ ExecutableNode jobcInnerFlowA = jobc.getExecutableNode("innerJobA");
+ ExecutableNode jobcInnerFlowB = jobc.getExecutableNode("innerJobB");
+ ExecutableNode jobcInnerFlowC = jobc.getExecutableNode("innerJobC");
+ ExecutableNode jobcInnerFlow = jobc.getExecutableNode("innerFlow");
+ assertNotNull(jobcInnerFlowA, jobcInnerFlowB, jobcInnerFlowC, jobcInnerFlow);
+
+ ExecutableNode jobdInnerFlowA = jobd.getExecutableNode("innerJobA");
+ ExecutableNode jobdInnerFlowB = jobd.getExecutableNode("innerJobB");
+ ExecutableNode jobdInnerFlowC = jobd.getExecutableNode("innerJobC");
+ ExecutableNode jobdInnerFlow = jobd.getExecutableNode("innerFlow");
+ assertNotNull(jobdInnerFlowA, jobdInnerFlowB, jobdInnerFlowC, jobdInnerFlow);
+
+ exFlow.setEndTime(1000);
+ exFlow.setStartTime(500);
+ exFlow.setStatus(Status.RUNNING);
+ exFlow.setUpdateTime(133);
+
+ // Change one job and see if it updates
+ long time = System.currentTimeMillis();
+ jobe.setEndTime(time);
+ jobe.setUpdateTime(time);
+ jobe.setStatus(Status.DISABLED);
+ jobe.setStartTime(time - 1);
+ // Should be one node that was changed
+ Map<String, Object> updateObject = exFlow.toUpdateObject(0);
+ Assert.assertEquals(1, ((List) (updateObject.get("nodes"))).size());
+ // Reapplying should give equal results.
+ copyFlow.applyUpdateObject(updateObject);
+ testEquals(exFlow, copyFlow);
+
+ // This update shouldn't provide any results
+ updateObject = exFlow.toUpdateObject(System.currentTimeMillis());
+ Assert.assertNull(updateObject.get("nodes"));
+
+ // Change inner flow
+ long currentTime = time + 1;
+ jobbInnerFlowA.setEndTime(currentTime);
+ jobbInnerFlowA.setUpdateTime(currentTime);
+ jobbInnerFlowA.setStatus(Status.DISABLED);
+ jobbInnerFlowA.setStartTime(currentTime - 100);
+ // We should get 2 updates if we do a toUpdateObject using 0 as the start
+ // time
+ updateObject = exFlow.toUpdateObject(0);
+ Assert.assertEquals(2, ((List) (updateObject.get("nodes"))).size());
+
+ // This should provide 1 update. That we can apply
+ updateObject = exFlow.toUpdateObject(jobe.getUpdateTime());
+ Assert.assertNotNull(updateObject.get("nodes"));
+ Assert.assertEquals(1, ((List) (updateObject.get("nodes"))).size());
+ copyFlow.applyUpdateObject(updateObject);
+ testEquals(exFlow, copyFlow);
+
+ // This shouldn't give any results anymore
+ updateObject = exFlow.toUpdateObject(jobbInnerFlowA.getUpdateTime());
+ Assert.assertNull(updateObject.get("nodes"));
+ }
+
+ private void assertNotNull(ExecutableNode... nodes) {
+ for (ExecutableNode node : nodes) {
+ Assert.assertNotNull(node);
+ }
+ }
+
+ public static void testEquals(ExecutableNode a, ExecutableNode b) {
+ if (a instanceof ExecutableFlow) {
+ if (b instanceof ExecutableFlow) {
+ ExecutableFlow exA = (ExecutableFlow) a;
+ ExecutableFlow exB = (ExecutableFlow) b;
+
+ Assert.assertEquals(exA.getScheduleId(), exB.getScheduleId());
+ Assert.assertEquals(exA.getProjectId(), exB.getProjectId());
+ Assert.assertEquals(exA.getVersion(), exB.getVersion());
+ Assert.assertEquals(exA.getSubmitTime(), exB.getSubmitTime());
+ Assert.assertEquals(exA.getSubmitUser(), exB.getSubmitUser());
+ Assert.assertEquals(exA.getExecutionPath(), exB.getExecutionPath());
+
+ testEquals(exA.getExecutionOptions(), exB.getExecutionOptions());
+ } else {
+ Assert.fail("A is ExecutableFlow, but B is not");
+ }
}
-
- @After
- public void tearDown() throws Exception {
+
+ if (a instanceof ExecutableFlowBase) {
+ if (b instanceof ExecutableFlowBase) {
+ ExecutableFlowBase exA = (ExecutableFlowBase) a;
+ ExecutableFlowBase exB = (ExecutableFlowBase) b;
+
+ Assert.assertEquals(exA.getFlowId(), exB.getFlowId());
+ Assert.assertEquals(exA.getExecutableNodes().size(), exB
+ .getExecutableNodes().size());
+
+ for (ExecutableNode nodeA : exA.getExecutableNodes()) {
+ ExecutableNode nodeB = exB.getExecutableNode(nodeA.getId());
+ Assert.assertNotNull(nodeB);
+ Assert.assertEquals(a, nodeA.getParentFlow());
+ Assert.assertEquals(b, nodeB.getParentFlow());
+
+ testEquals(nodeA, nodeB);
+ }
+ } else {
+ Assert.fail("A is ExecutableFlowBase, but B is not");
+ }
+ }
+
+ Assert.assertEquals(a.getId(), b.getId());
+ Assert.assertEquals(a.getStatus(), b.getStatus());
+ Assert.assertEquals(a.getStartTime(), b.getStartTime());
+ Assert.assertEquals(a.getEndTime(), b.getEndTime());
+ Assert.assertEquals(a.getUpdateTime(), b.getUpdateTime());
+ Assert.assertEquals(a.getAttempt(), b.getAttempt());
+
+ Assert.assertEquals(a.getJobSource(), b.getJobSource());
+ Assert.assertEquals(a.getPropsSource(), b.getPropsSource());
+ Assert.assertEquals(a.getInNodes(), a.getInNodes());
+ Assert.assertEquals(a.getOutNodes(), a.getOutNodes());
+ }
+
+ public static void testEquals(ExecutionOptions optionsA,
+ ExecutionOptions optionsB) {
+ Assert.assertEquals(optionsA.getConcurrentOption(),
+ optionsB.getConcurrentOption());
+ Assert.assertEquals(optionsA.getNotifyOnFirstFailure(),
+ optionsB.getNotifyOnFirstFailure());
+ Assert.assertEquals(optionsA.getNotifyOnLastFailure(),
+ optionsB.getNotifyOnLastFailure());
+ Assert.assertEquals(optionsA.getFailureAction(),
+ optionsB.getFailureAction());
+ Assert.assertEquals(optionsA.getPipelineExecutionId(),
+ optionsB.getPipelineExecutionId());
+ Assert.assertEquals(optionsA.getPipelineLevel(),
+ optionsB.getPipelineLevel());
+ Assert.assertEquals(optionsA.isFailureEmailsOverridden(),
+ optionsB.isFailureEmailsOverridden());
+ Assert.assertEquals(optionsA.isSuccessEmailsOverridden(),
+ optionsB.isSuccessEmailsOverridden());
+
+ testDisabledEquals(optionsA.getDisabledJobs(), optionsB.getDisabledJobs());
+ testEquals(optionsA.getSuccessEmails(), optionsB.getSuccessEmails());
+ testEquals(optionsA.getFailureEmails(), optionsB.getFailureEmails());
+ testEquals(optionsA.getFlowParameters(), optionsB.getFlowParameters());
+ }
+
+ public static void testEquals(Set<String> a, Set<String> b) {
+ if (a == b) {
+ return;
+ }
+
+ if (a == null || b == null) {
+ Assert.fail();
+ }
+
+ Assert.assertEquals(a.size(), b.size());
+
+ Iterator<String> iterA = a.iterator();
+
+ while (iterA.hasNext()) {
+ String aStr = iterA.next();
+ Assert.assertTrue(b.contains(aStr));
+ }
+ }
+
+ public static void testEquals(List<String> a, List<String> b) {
+ if (a == b) {
+ return;
+ }
+
+ if (a == null || b == null) {
+ Assert.fail();
+ }
+
+ Assert.assertEquals(a.size(), b.size());
+
+ Iterator<String> iterA = a.iterator();
+ Iterator<String> iterB = b.iterator();
+
+ while (iterA.hasNext()) {
+ String aStr = iterA.next();
+ String bStr = iterB.next();
+ Assert.assertEquals(aStr, bStr);
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ public static void testDisabledEquals(List<Object> a, List<Object> b) {
+ if (a == b) {
+ return;
+ }
+
+ if (a == null || b == null) {
+ Assert.fail();
+ }
+
+ Assert.assertEquals(a.size(), b.size());
+
+ Iterator<Object> iterA = a.iterator();
+ Iterator<Object> iterB = b.iterator();
+
+ while (iterA.hasNext()) {
+ Object aStr = iterA.next();
+ Object bStr = iterB.next();
+
+ if (aStr instanceof Map && bStr instanceof Map) {
+ Map<String, Object> aMap = (Map<String, Object>) aStr;
+ Map<String, Object> bMap = (Map<String, Object>) bStr;
+
+ Assert.assertEquals((String) aMap.get("id"), (String) bMap.get("id"));
+ testDisabledEquals((List<Object>) aMap.get("children"),
+ (List<Object>) bMap.get("children"));
+ } else {
+ Assert.assertEquals(aStr, bStr);
+ }
+ }
+ }
+
+ public static void testEquals(Map<String, String> a, Map<String, String> b) {
+ if (a == b) {
+ return;
+ }
+
+ if (a == null || b == null) {
+ Assert.fail();
+ }
+
+ Assert.assertEquals(a.size(), b.size());
+
+ for (String key : a.keySet()) {
+ Assert.assertEquals(a.get(key), b.get(key));
}
-
- @Test
- public void testExecutorFlowCreation() throws Exception {
- Flow flow = project.getFlow("jobe");
- Assert.assertNotNull(flow);
-
- ExecutableFlow exFlow = new ExecutableFlow(project, flow);
- Assert.assertNotNull(exFlow.getExecutableNode("joba"));
- Assert.assertNotNull(exFlow.getExecutableNode("jobb"));
- Assert.assertNotNull(exFlow.getExecutableNode("jobc"));
- Assert.assertNotNull(exFlow.getExecutableNode("jobd"));
- Assert.assertNotNull(exFlow.getExecutableNode("jobe"));
-
- Assert.assertFalse(exFlow.getExecutableNode("joba") instanceof ExecutableFlowBase);
- Assert.assertTrue(exFlow.getExecutableNode("jobb") instanceof ExecutableFlowBase);
- Assert.assertTrue(exFlow.getExecutableNode("jobc") instanceof ExecutableFlowBase);
- Assert.assertTrue(exFlow.getExecutableNode("jobd") instanceof ExecutableFlowBase);
- Assert.assertFalse(exFlow.getExecutableNode("jobe") instanceof ExecutableFlowBase);
-
- ExecutableFlowBase jobbFlow = (ExecutableFlowBase)exFlow.getExecutableNode("jobb");
- ExecutableFlowBase jobcFlow = (ExecutableFlowBase)exFlow.getExecutableNode("jobc");
- ExecutableFlowBase jobdFlow = (ExecutableFlowBase)exFlow.getExecutableNode("jobd");
-
- Assert.assertEquals("innerFlow", jobbFlow.getFlowId());
- Assert.assertEquals("jobb", jobbFlow.getId());
- Assert.assertEquals(4, jobbFlow.getExecutableNodes().size());
-
- Assert.assertEquals("innerFlow", jobcFlow.getFlowId());
- Assert.assertEquals("jobc", jobcFlow.getId());
- Assert.assertEquals(4, jobcFlow.getExecutableNodes().size());
-
- Assert.assertEquals("innerFlow", jobdFlow.getFlowId());
- Assert.assertEquals("jobd", jobdFlow.getId());
- Assert.assertEquals(4, jobdFlow.getExecutableNodes().size());
- }
-
- @Test
- public void testExecutorFlowJson() throws Exception {
- Flow flow = project.getFlow("jobe");
- Assert.assertNotNull(flow);
-
- ExecutableFlow exFlow = new ExecutableFlow(project, flow);
-
- Object obj = exFlow.toObject();
- String exFlowJSON = JSONUtils.toJSON(obj);
- @SuppressWarnings("unchecked")
- Map<String,Object> flowObjMap = (Map<String,Object>)JSONUtils.parseJSONFromString(exFlowJSON);
-
- ExecutableFlow parsedExFlow = ExecutableFlow.createExecutableFlowFromObject(flowObjMap);
- testEquals(exFlow, parsedExFlow);
- }
-
- @Test
- public void testExecutorFlowJson2() throws Exception {
- Flow flow = project.getFlow("jobe");
- Assert.assertNotNull(flow);
-
- ExecutableFlow exFlow = new ExecutableFlow(project, flow);
- exFlow.setExecutionId(101);
- exFlow.setAttempt(2);
- exFlow.setDelayedExecution(1000);
-
- ExecutionOptions options = new ExecutionOptions();
- options.setConcurrentOption("blah");
- options.setDisabledJobs(Arrays.asList(new Object[] {"bee", null, "boo"}));
- options.setFailureAction(FailureAction.CANCEL_ALL);
- options.setFailureEmails(Arrays.asList(new String[] {"doo", null, "daa"}));
- options.setSuccessEmails(Arrays.asList(new String[] {"dee", null, "dae"}));
- options.setPipelineLevel(2);
- options.setPipelineExecutionId(3);
- options.setNotifyOnFirstFailure(true);
- options.setNotifyOnLastFailure(true);
-
- HashMap<String, String> flowProps = new HashMap<String,String>();
- flowProps.put("la", "fa");
- options.addAllFlowParameters(flowProps);
- exFlow.setExecutionOptions(options);
-
- Object obj = exFlow.toObject();
- String exFlowJSON = JSONUtils.toJSON(obj);
- @SuppressWarnings("unchecked")
- Map<String,Object> flowObjMap = (Map<String,Object>)JSONUtils.parseJSONFromString(exFlowJSON);
-
- ExecutableFlow parsedExFlow = ExecutableFlow.createExecutableFlowFromObject(flowObjMap);
- testEquals(exFlow, parsedExFlow);
- }
-
- @SuppressWarnings("rawtypes")
- @Test
- public void testExecutorFlowUpdates() throws Exception {
- Flow flow = project.getFlow("jobe");
- ExecutableFlow exFlow = new ExecutableFlow(project, flow);
- exFlow.setExecutionId(101);
-
- // Create copy of flow
- Object obj = exFlow.toObject();
- String exFlowJSON = JSONUtils.toJSON(obj);
- @SuppressWarnings("unchecked")
- Map<String,Object> flowObjMap = (Map<String,Object>)JSONUtils.parseJSONFromString(exFlowJSON);
- ExecutableFlow copyFlow = ExecutableFlow.createExecutableFlowFromObject(flowObjMap);
-
- testEquals(exFlow, copyFlow);
-
- ExecutableNode joba = exFlow.getExecutableNode("joba");
- ExecutableFlowBase jobb = (ExecutableFlowBase)(exFlow.getExecutableNode("jobb"));
- ExecutableFlowBase jobc = (ExecutableFlowBase)(exFlow.getExecutableNode("jobc"));
- ExecutableFlowBase jobd = (ExecutableFlowBase)(exFlow.getExecutableNode("jobd"));
- ExecutableNode jobe = exFlow.getExecutableNode("jobe");
- assertNotNull(joba, jobb, jobc, jobd, jobe);
-
- ExecutableNode jobbInnerFlowA = jobb.getExecutableNode("innerJobA");
- ExecutableNode jobbInnerFlowB = jobb.getExecutableNode("innerJobB");
- ExecutableNode jobbInnerFlowC = jobb.getExecutableNode("innerJobC");
- ExecutableNode jobbInnerFlow = jobb.getExecutableNode("innerFlow");
- assertNotNull(jobbInnerFlowA, jobbInnerFlowB, jobbInnerFlowC, jobbInnerFlow);
-
- ExecutableNode jobcInnerFlowA = jobc.getExecutableNode("innerJobA");
- ExecutableNode jobcInnerFlowB = jobc.getExecutableNode("innerJobB");
- ExecutableNode jobcInnerFlowC = jobc.getExecutableNode("innerJobC");
- ExecutableNode jobcInnerFlow = jobc.getExecutableNode("innerFlow");
- assertNotNull(jobcInnerFlowA, jobcInnerFlowB, jobcInnerFlowC, jobcInnerFlow);
-
- ExecutableNode jobdInnerFlowA = jobd.getExecutableNode("innerJobA");
- ExecutableNode jobdInnerFlowB = jobd.getExecutableNode("innerJobB");
- ExecutableNode jobdInnerFlowC = jobd.getExecutableNode("innerJobC");
- ExecutableNode jobdInnerFlow = jobd.getExecutableNode("innerFlow");
- assertNotNull(jobdInnerFlowA, jobdInnerFlowB, jobdInnerFlowC, jobdInnerFlow);
-
- exFlow.setEndTime(1000);
- exFlow.setStartTime(500);
- exFlow.setStatus(Status.RUNNING);
- exFlow.setUpdateTime(133);
-
- // Change one job and see if it updates
- long time = System.currentTimeMillis();
- jobe.setEndTime(time);
- jobe.setUpdateTime(time);
- jobe.setStatus(Status.DISABLED);
- jobe.setStartTime(time - 1);
- // Should be one node that was changed
- Map<String,Object> updateObject = exFlow.toUpdateObject(0);
- Assert.assertEquals(1, ((List)(updateObject.get("nodes"))).size());
- // Reapplying should give equal results.
- copyFlow.applyUpdateObject(updateObject);
- testEquals(exFlow, copyFlow);
-
- // This update shouldn't provide any results
- updateObject = exFlow.toUpdateObject(System.currentTimeMillis());
- Assert.assertNull(updateObject.get("nodes"));
-
- // Change inner flow
- long currentTime = time + 1 ;
- jobbInnerFlowA.setEndTime(currentTime);
- jobbInnerFlowA.setUpdateTime(currentTime);
- jobbInnerFlowA.setStatus(Status.DISABLED);
- jobbInnerFlowA.setStartTime(currentTime - 100);
- // We should get 2 updates if we do a toUpdateObject using 0 as the start time
- updateObject = exFlow.toUpdateObject(0);
- Assert.assertEquals(2, ((List)(updateObject.get("nodes"))).size());
-
- // This should provide 1 update. That we can apply
- updateObject = exFlow.toUpdateObject(jobe.getUpdateTime());
- Assert.assertNotNull(updateObject.get("nodes"));
- Assert.assertEquals(1, ((List)(updateObject.get("nodes"))).size());
- copyFlow.applyUpdateObject(updateObject);
- testEquals(exFlow, copyFlow);
-
- // This shouldn't give any results anymore
- updateObject = exFlow.toUpdateObject(jobbInnerFlowA.getUpdateTime());
- Assert.assertNull(updateObject.get("nodes"));
- }
-
- private void assertNotNull(ExecutableNode ... nodes) {
- for (ExecutableNode node: nodes) {
- Assert.assertNotNull(node);
- }
- }
-
- public static void testEquals(ExecutableNode a, ExecutableNode b) {
- if (a instanceof ExecutableFlow) {
- if (b instanceof ExecutableFlow) {
- ExecutableFlow exA = (ExecutableFlow)a;
- ExecutableFlow exB = (ExecutableFlow)b;
-
- Assert.assertEquals(exA.getScheduleId(), exB.getScheduleId());
- Assert.assertEquals(exA.getProjectId(), exB.getProjectId());
- Assert.assertEquals(exA.getVersion(), exB.getVersion());
- Assert.assertEquals(exA.getSubmitTime(), exB.getSubmitTime());
- Assert.assertEquals(exA.getSubmitUser(), exB.getSubmitUser());
- Assert.assertEquals(exA.getExecutionPath(), exB.getExecutionPath());
-
- testEquals(exA.getExecutionOptions(), exB.getExecutionOptions());
- }
- else {
- Assert.fail("A is ExecutableFlow, but B is not");
- }
- }
-
- if (a instanceof ExecutableFlowBase) {
- if (b instanceof ExecutableFlowBase) {
- ExecutableFlowBase exA = (ExecutableFlowBase)a;
- ExecutableFlowBase exB = (ExecutableFlowBase)b;
-
- Assert.assertEquals(exA.getFlowId(), exB.getFlowId());
- Assert.assertEquals(exA.getExecutableNodes().size(), exB.getExecutableNodes().size());
-
- for(ExecutableNode nodeA : exA.getExecutableNodes()) {
- ExecutableNode nodeB = exB.getExecutableNode(nodeA.getId());
- Assert.assertNotNull(nodeB);
- Assert.assertEquals(a, nodeA.getParentFlow());
- Assert.assertEquals(b, nodeB.getParentFlow());
-
- testEquals(nodeA, nodeB);
- }
- }
- else {
- Assert.fail("A is ExecutableFlowBase, but B is not");
- }
- }
-
- Assert.assertEquals(a.getId(), b.getId());
- Assert.assertEquals(a.getStatus(), b.getStatus());
- Assert.assertEquals(a.getStartTime(), b.getStartTime());
- Assert.assertEquals(a.getEndTime(), b.getEndTime());
- Assert.assertEquals(a.getUpdateTime(), b.getUpdateTime());
- Assert.assertEquals(a.getAttempt(), b.getAttempt());
-
- Assert.assertEquals(a.getJobSource(), b.getJobSource());
- Assert.assertEquals(a.getPropsSource(), b.getPropsSource());
- Assert.assertEquals(a.getInNodes(), a.getInNodes());
- Assert.assertEquals(a.getOutNodes(), a.getOutNodes());
- }
-
- public static void testEquals(ExecutionOptions optionsA, ExecutionOptions optionsB) {
- Assert.assertEquals(optionsA.getConcurrentOption(), optionsB.getConcurrentOption());
- Assert.assertEquals(optionsA.getNotifyOnFirstFailure(), optionsB.getNotifyOnFirstFailure());
- Assert.assertEquals(optionsA.getNotifyOnLastFailure(), optionsB.getNotifyOnLastFailure());
- Assert.assertEquals(optionsA.getFailureAction(), optionsB.getFailureAction());
- Assert.assertEquals(optionsA.getPipelineExecutionId(), optionsB.getPipelineExecutionId());
- Assert.assertEquals(optionsA.getPipelineLevel(), optionsB.getPipelineLevel());
- Assert.assertEquals(optionsA.isFailureEmailsOverridden(), optionsB.isFailureEmailsOverridden());
- Assert.assertEquals(optionsA.isSuccessEmailsOverridden(), optionsB.isSuccessEmailsOverridden());
-
- testDisabledEquals(optionsA.getDisabledJobs(), optionsB.getDisabledJobs());
- testEquals(optionsA.getSuccessEmails(), optionsB.getSuccessEmails());
- testEquals(optionsA.getFailureEmails(), optionsB.getFailureEmails());
- testEquals(optionsA.getFlowParameters(), optionsB.getFlowParameters());
- }
-
- public static void testEquals(Set<String> a, Set<String> b) {
- if (a == b) {
- return;
- }
-
- if (a == null || b == null) {
- Assert.fail();
- }
-
- Assert.assertEquals(a.size(), b.size());
-
- Iterator<String> iterA = a.iterator();
-
- while(iterA.hasNext()) {
- String aStr = iterA.next();
- Assert.assertTrue(b.contains(aStr));
- }
- }
-
- public static void testEquals(List<String> a, List<String> b) {
- if (a == b) {
- return;
- }
-
- if (a == null || b == null) {
- Assert.fail();
- }
-
- Assert.assertEquals(a.size(), b.size());
-
- Iterator<String> iterA = a.iterator();
- Iterator<String> iterB = b.iterator();
-
- while(iterA.hasNext()) {
- String aStr = iterA.next();
- String bStr = iterB.next();
- Assert.assertEquals(aStr, bStr);
- }
- }
-
- @SuppressWarnings("unchecked")
- public static void testDisabledEquals(List<Object> a, List<Object> b) {
- if (a == b) {
- return;
- }
-
- if (a == null || b == null) {
- Assert.fail();
- }
-
- Assert.assertEquals(a.size(), b.size());
-
- Iterator<Object> iterA = a.iterator();
- Iterator<Object> iterB = b.iterator();
-
- while(iterA.hasNext()) {
- Object aStr = iterA.next();
- Object bStr = iterB.next();
-
- if (aStr instanceof Map && bStr instanceof Map) {
- Map<String, Object> aMap = (Map<String, Object>)aStr;
- Map<String, Object> bMap = (Map<String, Object>)bStr;
-
- Assert.assertEquals((String)aMap.get("id"), (String)bMap.get("id"));
- testDisabledEquals((List<Object>)aMap.get("children"), (List<Object>)bMap.get("children"));
- }
- else {
- Assert.assertEquals(aStr, bStr);
- }
- }
- }
-
-
- public static void testEquals(Map<String, String> a, Map<String, String> b) {
- if (a == b) {
- return;
- }
-
- if (a == null || b == null) {
- Assert.fail();
- }
-
- Assert.assertEquals(a.size(), b.size());
-
- for (String key: a.keySet()) {
- Assert.assertEquals(a.get(key), b.get(key));
- }
- }
+ }
}
diff --git a/unit/java/azkaban/test/executor/InteractiveTestJob.java b/unit/java/azkaban/test/executor/InteractiveTestJob.java
index 3c385de..0bea8cb 100644
--- a/unit/java/azkaban/test/executor/InteractiveTestJob.java
+++ b/unit/java/azkaban/test/executor/InteractiveTestJob.java
@@ -9,85 +9,87 @@ import azkaban.jobExecutor.AbstractProcessJob;
import azkaban.utils.Props;
public class InteractiveTestJob extends AbstractProcessJob {
- private static ConcurrentHashMap<String, InteractiveTestJob> testJobs = new ConcurrentHashMap<String, InteractiveTestJob>();
- private Props generatedProperties = new Props();
- private boolean isWaiting = true;
- private boolean succeed = true;
+ private static ConcurrentHashMap<String, InteractiveTestJob> testJobs =
+ new ConcurrentHashMap<String, InteractiveTestJob>();
+ private Props generatedProperties = new Props();
+ private boolean isWaiting = true;
+ private boolean succeed = true;
- public static InteractiveTestJob getTestJob(String name) {
- return testJobs.get(name);
- }
-
- public static void clearTestJobs() {
- testJobs.clear();
- }
-
- public InteractiveTestJob(String jobId, Props sysProps, Props jobProps, Logger log) {
- super(jobId, sysProps, jobProps, log);
- }
+ public static InteractiveTestJob getTestJob(String name) {
+ return testJobs.get(name);
+ }
- @Override
- public void run() throws Exception {
- String nestedFlowPath = this.getJobProps().get(CommonJobProperties.NESTED_FLOW_PATH);
- String groupName = this.getJobProps().getString("group", null);
- String id = nestedFlowPath == null ? this.getId() : nestedFlowPath;
- if (groupName != null) {
- id = groupName + ":" + id;
- }
- testJobs.put(id, this);
-
- while(isWaiting) {
- synchronized(this) {
- try {
- wait(30000);
- } catch (InterruptedException e) {
- }
-
- if (!isWaiting) {
- if (!succeed) {
- throw new RuntimeException("Forced failure of " + getId());
- }
- else {
- info("Job " + getId() + " succeeded.");
- }
- }
- }
- }
- }
-
- public void failJob() {
- synchronized(this) {
- succeed = false;
- isWaiting = false;
- this.notify();
- }
- }
-
- public void succeedJob() {
- synchronized(this) {
- succeed = true;
- isWaiting = false;
- this.notify();
- }
- }
-
- public void succeedJob(Props generatedProperties) {
- synchronized(this) {
- this.generatedProperties = generatedProperties;
- succeed = true;
- isWaiting = false;
- this.notify();
- }
- }
-
- @Override
- public Props getJobGeneratedProperties() {
- return generatedProperties;
- }
+ public static void clearTestJobs() {
+ testJobs.clear();
+ }
- @Override
- public void cancel() throws InterruptedException {
- info("Killing job");
- failJob();
- }
+ public InteractiveTestJob(String jobId, Props sysProps, Props jobProps,
+ Logger log) {
+ super(jobId, sysProps, jobProps, log);
+ }
+
+ @Override
+ public void run() throws Exception {
+ String nestedFlowPath =
+ this.getJobProps().get(CommonJobProperties.NESTED_FLOW_PATH);
+ String groupName = this.getJobProps().getString("group", null);
+ String id = nestedFlowPath == null ? this.getId() : nestedFlowPath;
+ if (groupName != null) {
+ id = groupName + ":" + id;
+ }
+ testJobs.put(id, this);
+
+ while (isWaiting) {
+ synchronized (this) {
+ try {
+ wait(30000);
+ } catch (InterruptedException e) {
+ }
+
+ if (!isWaiting) {
+ if (!succeed) {
+ throw new RuntimeException("Forced failure of " + getId());
+ } else {
+ info("Job " + getId() + " succeeded.");
+ }
+ }
+ }
+ }
+ }
+
+ public void failJob() {
+ synchronized (this) {
+ succeed = false;
+ isWaiting = false;
+ this.notify();
+ }
+ }
+
+ public void succeedJob() {
+ synchronized (this) {
+ succeed = true;
+ isWaiting = false;
+ this.notify();
+ }
+ }
+
+ public void succeedJob(Props generatedProperties) {
+ synchronized (this) {
+ this.generatedProperties = generatedProperties;
+ succeed = true;
+ isWaiting = false;
+ this.notify();
+ }
+ }
+
+ @Override
+ public Props getJobGeneratedProperties() {
+ return generatedProperties;
+ }
+
+ @Override
+ public void cancel() throws InterruptedException {
+ info("Killing job");
+ failJob();
+ }
}
unit/java/azkaban/test/executor/JavaJob.java 148(+74 -74)
diff --git a/unit/java/azkaban/test/executor/JavaJob.java b/unit/java/azkaban/test/executor/JavaJob.java
index 22166cd..6615433 100644
--- a/unit/java/azkaban/test/executor/JavaJob.java
+++ b/unit/java/azkaban/test/executor/JavaJob.java
@@ -27,78 +27,78 @@ import azkaban.utils.Props;
public class JavaJob extends JavaProcessJob {
- public static final String RUN_METHOD_PARAM = "method.run";
- public static final String CANCEL_METHOD_PARAM = "method.cancel";
- public static final String PROGRESS_METHOD_PARAM = "method.progress";
-
- public static final String JOB_CLASS = "job.class";
- public static final String DEFAULT_CANCEL_METHOD = "cancel";
- public static final String DEFAULT_RUN_METHOD = "run";
- public static final String DEFAULT_PROGRESS_METHOD = "getProgress";
-
- private String _runMethod;
- private String _cancelMethod;
- private String _progressMethod;
-
- private Object _javaObject = null;
- private String props;
-
- public JavaJob(String jobid, Props sysProps, Props jobProps, Logger log) {
- super(jobid, sysProps, new Props(sysProps, jobProps), log);
- }
-
- @Override
- protected List<String> getClassPaths() {
- List<String> classPath = super.getClassPaths();
-
- classPath.add(getSourcePathFromClass(JavaJobRunnerMain.class));
- classPath.add(getSourcePathFromClass(Props.class));
-
- String loggerPath = getSourcePathFromClass(org.apache.log4j.Logger.class);
- if (!classPath.contains(loggerPath)) {
- classPath.add(loggerPath);
- }
-
- // Add hadoop home to classpath
- String hadoopHome = System.getenv("HADOOP_HOME");
- if (hadoopHome == null) {
- info("HADOOP_HOME not set, using default hadoop config.");
- } else {
- info("Using hadoop config found in " + hadoopHome);
- classPath.add(new File(hadoopHome, "conf").getPath());
- }
- return classPath;
- }
-
- private static String getSourcePathFromClass(Class<?> containedClass) {
- File file = new File(containedClass.getProtectionDomain().getCodeSource().getLocation().getPath());
-
- if (!file.isDirectory() && file.getName().endsWith(".class")) {
- String name = containedClass.getName();
- StringTokenizer tokenizer = new StringTokenizer(name, ".");
- while(tokenizer.hasMoreTokens()) {
- tokenizer.nextElement();
- file = file.getParentFile();
- }
- return file.getPath();
- }
- else {
- return containedClass.getProtectionDomain().getCodeSource().getLocation().getPath();
- }
- }
-
- @Override
- protected String getJavaClass() {
- return JavaJobRunnerMain.class.getName();
- }
-
- @Override
- public String toString() {
- return "JavaJob{" + "_runMethod='" + _runMethod + '\''
- + ", _cancelMethod='" + _cancelMethod + '\''
- + ", _progressMethod='" + _progressMethod + '\''
- + ", _javaObject=" + _javaObject + ", props="
- + props + '}';
- }
+ public static final String RUN_METHOD_PARAM = "method.run";
+ public static final String CANCEL_METHOD_PARAM = "method.cancel";
+ public static final String PROGRESS_METHOD_PARAM = "method.progress";
+
+ public static final String JOB_CLASS = "job.class";
+ public static final String DEFAULT_CANCEL_METHOD = "cancel";
+ public static final String DEFAULT_RUN_METHOD = "run";
+ public static final String DEFAULT_PROGRESS_METHOD = "getProgress";
+
+ private String _runMethod;
+ private String _cancelMethod;
+ private String _progressMethod;
+
+ private Object _javaObject = null;
+ private String props;
+
+ public JavaJob(String jobid, Props sysProps, Props jobProps, Logger log) {
+ super(jobid, sysProps, new Props(sysProps, jobProps), log);
+ }
+
+ @Override
+ protected List<String> getClassPaths() {
+ List<String> classPath = super.getClassPaths();
+
+ classPath.add(getSourcePathFromClass(JavaJobRunnerMain.class));
+ classPath.add(getSourcePathFromClass(Props.class));
+
+ String loggerPath = getSourcePathFromClass(org.apache.log4j.Logger.class);
+ if (!classPath.contains(loggerPath)) {
+ classPath.add(loggerPath);
+ }
+
+ // Add hadoop home to classpath
+ String hadoopHome = System.getenv("HADOOP_HOME");
+ if (hadoopHome == null) {
+ info("HADOOP_HOME not set, using default hadoop config.");
+ } else {
+ info("Using hadoop config found in " + hadoopHome);
+ classPath.add(new File(hadoopHome, "conf").getPath());
+ }
+ return classPath;
+ }
+
+ private static String getSourcePathFromClass(Class<?> containedClass) {
+ File file =
+ new File(containedClass.getProtectionDomain().getCodeSource()
+ .getLocation().getPath());
+
+ if (!file.isDirectory() && file.getName().endsWith(".class")) {
+ String name = containedClass.getName();
+ StringTokenizer tokenizer = new StringTokenizer(name, ".");
+ while (tokenizer.hasMoreTokens()) {
+ tokenizer.nextElement();
+ file = file.getParentFile();
+ }
+ return file.getPath();
+ } else {
+ return containedClass.getProtectionDomain().getCodeSource().getLocation()
+ .getPath();
+ }
+ }
+
+ @Override
+ protected String getJavaClass() {
+ return JavaJobRunnerMain.class.getName();
+ }
+
+ @Override
+ public String toString() {
+ return "JavaJob{" + "_runMethod='" + _runMethod + '\''
+ + ", _cancelMethod='" + _cancelMethod + '\'' + ", _progressMethod='"
+ + _progressMethod + '\'' + ", _javaObject=" + _javaObject + ", props="
+ + props + '}';
+ }
}
-
unit/java/azkaban/test/executor/JavaJobRunnerMain.java 535(+280 -255)
diff --git a/unit/java/azkaban/test/executor/JavaJobRunnerMain.java b/unit/java/azkaban/test/executor/JavaJobRunnerMain.java
index d1766e6..dc68337 100644
--- a/unit/java/azkaban/test/executor/JavaJobRunnerMain.java
+++ b/unit/java/azkaban/test/executor/JavaJobRunnerMain.java
@@ -1,5 +1,3 @@
-package azkaban.test.executor;
-
/*
* Copyright 2012 LinkedIn, Inc
*
@@ -16,6 +14,8 @@ package azkaban.test.executor;
* the License.
*/
+package azkaban.test.executor;
+
import azkaban.jobExecutor.ProcessJob;
import azkaban.utils.Props;
@@ -40,258 +40,283 @@ import java.util.Properties;
public class JavaJobRunnerMain {
- public static final String JOB_CLASS = "job.class";
- public static final String DEFAULT_RUN_METHOD = "run";
- public static final String DEFAULT_CANCEL_METHOD = "cancel";
-
- // This is the Job interface method to get the properties generated by the
- // job.
- public static final String GET_GENERATED_PROPERTIES_METHOD = "getJobGeneratedProperties";
-
- public static final String CANCEL_METHOD_PARAM = "method.cancel";
- public static final String RUN_METHOD_PARAM = "method.run";
- public static final String[] PROPS_CLASSES = new String[] { "azkaban.utils.Props", "azkaban.common.utils.Props" };
-
- private static final Layout DEFAULT_LAYOUT = new PatternLayout("%p %m\n");
-
- public final Logger _logger;
-
- public String _cancelMethod;
- public String _jobName;
- public Object _javaObject;
- private boolean _isFinished = false;
-
- public static void main(String[] args) throws Exception {
- @SuppressWarnings("unused")
- JavaJobRunnerMain wrapper = new JavaJobRunnerMain();
- }
-
- public JavaJobRunnerMain() throws Exception {
- Runtime.getRuntime().addShutdownHook(new Thread() {
- public void run() {
- cancelJob();
- }
- });
-
- try {
- _jobName = System.getenv(ProcessJob.JOB_NAME_ENV);
- String propsFile = System.getenv(ProcessJob.JOB_PROP_ENV);
-
- _logger = Logger.getRootLogger();
- _logger.removeAllAppenders();
- ConsoleAppender appender = new ConsoleAppender(DEFAULT_LAYOUT);
- appender.activateOptions();
- _logger.addAppender(appender);
-
- Properties prop = new Properties();
- prop.load(new BufferedReader(new FileReader(propsFile)));
-
- _logger.info("Running job " + _jobName);
- String className = prop.getProperty(JOB_CLASS);
- if (className == null) {
- throw new Exception("Class name is not set.");
- }
- _logger.info("Class name " + className);
-
- // Create the object using proxy
-
- _javaObject = getObject(_jobName, className, prop, _logger);
-
- if (_javaObject == null) {
- _logger.info("Could not create java object to run job: " + className);
- throw new Exception("Could not create running object");
- }
-
- _cancelMethod = prop.getProperty(CANCEL_METHOD_PARAM, DEFAULT_CANCEL_METHOD);
-
- final String runMethod = prop.getProperty(RUN_METHOD_PARAM, DEFAULT_RUN_METHOD);
- _logger.info("Invoking method " + runMethod);
-
- _logger.info("Proxy check failed, not proxying run.");
- runMethod(_javaObject, runMethod);
-
- _isFinished = true;
-
- // Get the generated properties and store them to disk, to be read
- // by ProcessJob.
- try {
- final Method generatedPropertiesMethod = _javaObject.getClass().getMethod(
- GET_GENERATED_PROPERTIES_METHOD, new Class<?>[] {});
- Object outputGendProps = generatedPropertiesMethod.invoke(_javaObject, new Object[] {});
- if(outputGendProps != null) {
- final Method toPropertiesMethod = outputGendProps.getClass().getMethod("toProperties", new Class<?>[] {});
- Properties properties = (Properties)toPropertiesMethod.invoke(outputGendProps, new Object[] {});
-
- Props outputProps = new Props(null, properties);
- outputGeneratedProperties(outputProps);
- }
- else {
- outputGeneratedProperties(new Props());
- }
-
- } catch (NoSuchMethodException e) {
- _logger.info(String.format(
- "Apparently there isn't a method[%s] on object[%s], using empty Props object instead.",
- GET_GENERATED_PROPERTIES_METHOD, _javaObject));
- outputGeneratedProperties(new Props());
- }
- } catch (Exception e) {
- _isFinished = true;
- throw e;
- }
- }
-
- private void runMethod(Object obj, String runMethod) throws IllegalAccessException, InvocationTargetException,
- NoSuchMethodException {
- obj.getClass().getMethod(runMethod, new Class<?>[] {}).invoke(obj);
- }
-
- private void outputGeneratedProperties(Props outputProperties) {
-
- if (outputProperties == null) {
- _logger.info(" no gend props");
- return;
- }
- for (String key : outputProperties.getKeySet()) {
- _logger.info(" gend prop " + key + " value:" + outputProperties.get(key));
- }
-
- String outputFileStr = System.getenv(ProcessJob.JOB_OUTPUT_PROP_FILE);
- if (outputFileStr == null) {
- return;
- }
-
- _logger.info("Outputting generated properties to " + outputFileStr);
-
- Map<String, String> properties = new LinkedHashMap<String, String>();
- for (String key : outputProperties.getKeySet()) {
- properties.put(key, outputProperties.get(key));
- }
-
- OutputStream writer = null;
- try {
- writer = new BufferedOutputStream(new FileOutputStream(outputFileStr));
-
- // Manually serialize into JSON instead of adding org.json to
- // external classpath. Reduces one dependency for something that's essentially easy.
- writer.write("{\n".getBytes());
- for (Map.Entry<String, String> entry : properties.entrySet()) {
- writer.write(String.format(" \"%s\":\"%s\",\n",
- entry.getKey().replace("\"", "\\\\\""),
- entry.getValue().replace("\"", "\\\\\"")).getBytes());
- }
- writer.write("}".getBytes());
- } catch (Exception e) {
- new RuntimeException("Unable to store output properties to: " + outputFileStr);
- } finally {
- try {
- if (writer != null) {
- writer.close();
- }
- } catch (IOException e) {
- }
- }
- }
-
- public void cancelJob() {
- if (_isFinished) {
- return;
- }
- _logger.info("Attempting to call cancel on this job");
- if (_javaObject != null) {
- Method method = null;
-
- try {
- method = _javaObject.getClass().getMethod(_cancelMethod);
- } catch (SecurityException e) {
- } catch (NoSuchMethodException e) {
- }
-
- if (method != null)
- try {
- method.invoke(_javaObject);
- } catch (Exception e) {
- if (_logger != null) {
- _logger.error("Cancel method failed! ", e);
- }
- }
- else {
- throw new RuntimeException("Job " + _jobName + " does not have cancel method " + _cancelMethod);
- }
- }
- }
-
- private static Object getObject(String jobName, String className, Properties properties, Logger logger)
- throws Exception {
-
- Class<?> runningClass = JavaJobRunnerMain.class.getClassLoader().loadClass(className);
-
- if (runningClass == null) {
- throw new Exception("Class " + className + " was not found. Cannot run job.");
- }
-
- Class<?> propsClass = null;
- for (String propClassName : PROPS_CLASSES) {
- try {
- propsClass = JavaJobRunnerMain.class.getClassLoader().loadClass(propClassName);
- }
- catch (ClassNotFoundException e) {
- }
-
- if (propsClass != null && getConstructor(runningClass, String.class, propsClass) != null) {
- //is this the props class
- break;
- }
- propsClass = null;
- }
-
- Object obj = null;
- if (propsClass != null && getConstructor(runningClass, String.class, propsClass) != null) {
- // Create props class
- Constructor<?> propsCon = getConstructor(propsClass, propsClass, Properties[].class);
- Object props = propsCon.newInstance(null, new Properties[] { properties });
-
- Constructor<?> con = getConstructor(runningClass, String.class, propsClass);
- logger.info("Constructor found " + con.toGenericString());
- obj = con.newInstance(jobName, props);
- } else if (getConstructor(runningClass, String.class, Properties.class) != null) {
-
- Constructor<?> con = getConstructor(runningClass, String.class, Properties.class);
- logger.info("Constructor found " + con.toGenericString());
- obj = con.newInstance(jobName, properties);
- } else if (getConstructor(runningClass, String.class, Map.class) != null) {
- Constructor<?> con = getConstructor(runningClass, String.class, Map.class);
- logger.info("Constructor found " + con.toGenericString());
-
- HashMap<Object, Object> map = new HashMap<Object, Object>();
- for (Map.Entry<Object, Object> entry : properties.entrySet()) {
- map.put(entry.getKey(), entry.getValue());
- }
- obj = con.newInstance(jobName, map);
- } else if (getConstructor(runningClass, String.class) != null) {
- Constructor<?> con = getConstructor(runningClass, String.class);
- logger.info("Constructor found " + con.toGenericString());
- obj = con.newInstance(jobName);
- } else if (getConstructor(runningClass) != null) {
- Constructor<?> con = getConstructor(runningClass);
- logger.info("Constructor found " + con.toGenericString());
- obj = con.newInstance();
- } else {
- logger.error("Constructor not found. Listing available Constructors.");
- for (Constructor<?> c : runningClass.getConstructors()) {
- logger.info(c.toGenericString());
- }
- }
- return obj;
- }
-
- private static Constructor<?> getConstructor(Class<?> c, Class<?>... args) {
- try {
- Constructor<?> cons = c.getConstructor(args);
- return cons;
- } catch (NoSuchMethodException e) {
- return null;
- }
- }
+ public static final String JOB_CLASS = "job.class";
+ public static final String DEFAULT_RUN_METHOD = "run";
+ public static final String DEFAULT_CANCEL_METHOD = "cancel";
+
+ // This is the Job interface method to get the properties generated by the
+ // job.
+ public static final String GET_GENERATED_PROPERTIES_METHOD =
+ "getJobGeneratedProperties";
+
+ public static final String CANCEL_METHOD_PARAM = "method.cancel";
+ public static final String RUN_METHOD_PARAM = "method.run";
+ public static final String[] PROPS_CLASSES = new String[] {
+ "azkaban.utils.Props", "azkaban.common.utils.Props" };
+
+ private static final Layout DEFAULT_LAYOUT = new PatternLayout("%p %m\n");
+
+ public final Logger _logger;
+
+ public String _cancelMethod;
+ public String _jobName;
+ public Object _javaObject;
+ private boolean _isFinished = false;
+
+ public static void main(String[] args) throws Exception {
+ @SuppressWarnings("unused")
+ JavaJobRunnerMain wrapper = new JavaJobRunnerMain();
+ }
+
+ public JavaJobRunnerMain() throws Exception {
+ Runtime.getRuntime().addShutdownHook(new Thread() {
+ public void run() {
+ cancelJob();
+ }
+ });
+
+ try {
+ _jobName = System.getenv(ProcessJob.JOB_NAME_ENV);
+ String propsFile = System.getenv(ProcessJob.JOB_PROP_ENV);
+
+ _logger = Logger.getRootLogger();
+ _logger.removeAllAppenders();
+ ConsoleAppender appender = new ConsoleAppender(DEFAULT_LAYOUT);
+ appender.activateOptions();
+ _logger.addAppender(appender);
+
+ Properties prop = new Properties();
+ prop.load(new BufferedReader(new FileReader(propsFile)));
+
+ _logger.info("Running job " + _jobName);
+ String className = prop.getProperty(JOB_CLASS);
+ if (className == null) {
+ throw new Exception("Class name is not set.");
+ }
+ _logger.info("Class name " + className);
+
+ // Create the object using proxy
+
+ _javaObject = getObject(_jobName, className, prop, _logger);
+
+ if (_javaObject == null) {
+ _logger.info("Could not create java object to run job: " + className);
+ throw new Exception("Could not create running object");
+ }
+
+ _cancelMethod =
+ prop.getProperty(CANCEL_METHOD_PARAM, DEFAULT_CANCEL_METHOD);
+
+ final String runMethod =
+ prop.getProperty(RUN_METHOD_PARAM, DEFAULT_RUN_METHOD);
+ _logger.info("Invoking method " + runMethod);
+
+ _logger.info("Proxy check failed, not proxying run.");
+ runMethod(_javaObject, runMethod);
+
+ _isFinished = true;
+
+ // Get the generated properties and store them to disk, to be read
+ // by ProcessJob.
+ try {
+ final Method generatedPropertiesMethod =
+ _javaObject.getClass().getMethod(GET_GENERATED_PROPERTIES_METHOD,
+ new Class<?>[] {});
+ Object outputGendProps =
+ generatedPropertiesMethod.invoke(_javaObject, new Object[] {});
+ if (outputGendProps != null) {
+ final Method toPropertiesMethod =
+ outputGendProps.getClass().getMethod("toProperties",
+ new Class<?>[] {});
+ Properties properties =
+ (Properties) toPropertiesMethod.invoke(outputGendProps,
+ new Object[] {});
+
+ Props outputProps = new Props(null, properties);
+ outputGeneratedProperties(outputProps);
+ } else {
+ outputGeneratedProperties(new Props());
+ }
+
+ } catch (NoSuchMethodException e) {
+ _logger
+ .info(String
+ .format(
+ "Apparently there isn't a method[%s] on object[%s], using empty Props object instead.",
+ GET_GENERATED_PROPERTIES_METHOD, _javaObject));
+ outputGeneratedProperties(new Props());
+ }
+ } catch (Exception e) {
+ _isFinished = true;
+ throw e;
+ }
+ }
+
+ private void runMethod(Object obj, String runMethod)
+ throws IllegalAccessException, InvocationTargetException,
+ NoSuchMethodException {
+ obj.getClass().getMethod(runMethod, new Class<?>[] {}).invoke(obj);
+ }
+
+ private void outputGeneratedProperties(Props outputProperties) {
+
+ if (outputProperties == null) {
+ _logger.info(" no gend props");
+ return;
+ }
+ for (String key : outputProperties.getKeySet()) {
+ _logger
+ .info(" gend prop " + key + " value:" + outputProperties.get(key));
+ }
+
+ String outputFileStr = System.getenv(ProcessJob.JOB_OUTPUT_PROP_FILE);
+ if (outputFileStr == null) {
+ return;
+ }
+
+ _logger.info("Outputting generated properties to " + outputFileStr);
+
+ Map<String, String> properties = new LinkedHashMap<String, String>();
+ for (String key : outputProperties.getKeySet()) {
+ properties.put(key, outputProperties.get(key));
+ }
+
+ OutputStream writer = null;
+ try {
+ writer = new BufferedOutputStream(new FileOutputStream(outputFileStr));
+
+ // Manually serialize into JSON instead of adding org.json to
+ // external classpath. Reduces one dependency for something that's
+ // essentially easy.
+ writer.write("{\n".getBytes());
+ for (Map.Entry<String, String> entry : properties.entrySet()) {
+ writer.write(String.format(" \"%s\":\"%s\",\n",
+ entry.getKey().replace("\"", "\\\\\""),
+ entry.getValue().replace("\"", "\\\\\"")).getBytes());
+ }
+ writer.write("}".getBytes());
+ } catch (Exception e) {
+ new RuntimeException("Unable to store output properties to: "
+ + outputFileStr);
+ } finally {
+ try {
+ if (writer != null) {
+ writer.close();
+ }
+ } catch (IOException e) {
+ }
+ }
+ }
+
+ public void cancelJob() {
+ if (_isFinished) {
+ return;
+ }
+ _logger.info("Attempting to call cancel on this job");
+ if (_javaObject != null) {
+ Method method = null;
+
+ try {
+ method = _javaObject.getClass().getMethod(_cancelMethod);
+ } catch (SecurityException e) {
+ } catch (NoSuchMethodException e) {
+ }
+
+ if (method != null)
+ try {
+ method.invoke(_javaObject);
+ } catch (Exception e) {
+ if (_logger != null) {
+ _logger.error("Cancel method failed! ", e);
+ }
+ }
+ else {
+ throw new RuntimeException("Job " + _jobName
+ + " does not have cancel method " + _cancelMethod);
+ }
+ }
+ }
+
+ private static Object getObject(String jobName, String className,
+ Properties properties, Logger logger) throws Exception {
+
+ Class<?> runningClass =
+ JavaJobRunnerMain.class.getClassLoader().loadClass(className);
+
+ if (runningClass == null) {
+ throw new Exception("Class " + className
+ + " was not found. Cannot run job.");
+ }
+
+ Class<?> propsClass = null;
+ for (String propClassName : PROPS_CLASSES) {
+ try {
+ propsClass =
+ JavaJobRunnerMain.class.getClassLoader().loadClass(propClassName);
+ } catch (ClassNotFoundException e) {
+ }
+
+ if (propsClass != null
+ && getConstructor(runningClass, String.class, propsClass) != null) {
+ // is this the props class
+ break;
+ }
+ propsClass = null;
+ }
+
+ Object obj = null;
+ if (propsClass != null
+ && getConstructor(runningClass, String.class, propsClass) != null) {
+ // Create props class
+ Constructor<?> propsCon =
+ getConstructor(propsClass, propsClass, Properties[].class);
+ Object props =
+ propsCon.newInstance(null, new Properties[] { properties });
+
+ Constructor<?> con =
+ getConstructor(runningClass, String.class, propsClass);
+ logger.info("Constructor found " + con.toGenericString());
+ obj = con.newInstance(jobName, props);
+ } else if (getConstructor(runningClass, String.class, Properties.class) != null) {
+
+ Constructor<?> con =
+ getConstructor(runningClass, String.class, Properties.class);
+ logger.info("Constructor found " + con.toGenericString());
+ obj = con.newInstance(jobName, properties);
+ } else if (getConstructor(runningClass, String.class, Map.class) != null) {
+ Constructor<?> con =
+ getConstructor(runningClass, String.class, Map.class);
+ logger.info("Constructor found " + con.toGenericString());
+
+ HashMap<Object, Object> map = new HashMap<Object, Object>();
+ for (Map.Entry<Object, Object> entry : properties.entrySet()) {
+ map.put(entry.getKey(), entry.getValue());
+ }
+ obj = con.newInstance(jobName, map);
+ } else if (getConstructor(runningClass, String.class) != null) {
+ Constructor<?> con = getConstructor(runningClass, String.class);
+ logger.info("Constructor found " + con.toGenericString());
+ obj = con.newInstance(jobName);
+ } else if (getConstructor(runningClass) != null) {
+ Constructor<?> con = getConstructor(runningClass);
+ logger.info("Constructor found " + con.toGenericString());
+ obj = con.newInstance();
+ } else {
+ logger.error("Constructor not found. Listing available Constructors.");
+ for (Constructor<?> c : runningClass.getConstructors()) {
+ logger.info(c.toGenericString());
+ }
+ }
+ return obj;
+ }
+
+ private static Constructor<?> getConstructor(Class<?> c, Class<?>... args) {
+ try {
+ Constructor<?> cons = c.getConstructor(args);
+ return cons;
+ } catch (NoSuchMethodException e) {
+ return null;
+ }
+ }
}
unit/java/azkaban/test/executor/JdbcExecutorLoaderTest.java 909(+475 -434)
diff --git a/unit/java/azkaban/test/executor/JdbcExecutorLoaderTest.java b/unit/java/azkaban/test/executor/JdbcExecutorLoaderTest.java
index 6a2e399..4923c1e 100644
--- a/unit/java/azkaban/test/executor/JdbcExecutorLoaderTest.java
+++ b/unit/java/azkaban/test/executor/JdbcExecutorLoaderTest.java
@@ -6,7 +6,7 @@ import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
-import java.util.HashSet;
+import java.util.HashSet;
import java.util.Map;
import javax.sql.DataSource;
@@ -19,7 +19,6 @@ import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
-
import azkaban.executor.ExecutableFlow;
import azkaban.executor.ExecutableJobInfo;
import azkaban.executor.ExecutableNode;
@@ -38,436 +37,478 @@ import azkaban.utils.Pair;
import azkaban.utils.Props;
public class JdbcExecutorLoaderTest {
- private static boolean testDBExists;
- //@TODO remove this and turn into local host.
- private static final String host = "cyu-ld.linkedin.biz";
- private static final int port = 3306;
- private static final String database = "azkaban2";
- private static final String user = "azkaban";
- private static final String password = "azkaban";
- private static final int numConnections = 10;
-
- private File flowDir = new File("unit/executions/exectest1");
-
- @BeforeClass
- public static void setupDB() {
- DataSource dataSource = DataSourceUtils.getMySQLDataSource(host, port, database, user, password, numConnections);
- testDBExists = true;
-
- Connection connection = null;
- try {
- connection = dataSource.getConnection();
- } catch (SQLException e) {
- e.printStackTrace();
- testDBExists = false;
- DbUtils.closeQuietly(connection);
- return;
- }
-
- CountHandler countHandler = new CountHandler();
- QueryRunner runner = new QueryRunner();
- try {
- runner.query(connection, "SELECT COUNT(1) FROM active_executing_flows", countHandler);
- } catch (SQLException e) {
- e.printStackTrace();
- testDBExists = false;
- DbUtils.closeQuietly(connection);
- return;
- }
-
- try {
- runner.query(connection, "SELECT COUNT(1) FROM execution_flows", countHandler);
- } catch (SQLException e) {
- e.printStackTrace();
- testDBExists = false;
- DbUtils.closeQuietly(connection);
- return;
- }
-
- try {
- runner.query(connection, "SELECT COUNT(1) FROM execution_jobs", countHandler);
- } catch (SQLException e) {
- e.printStackTrace();
- testDBExists = false;
- DbUtils.closeQuietly(connection);
- return;
- }
-
- try {
- runner.query(connection, "SELECT COUNT(1) FROM execution_logs", countHandler);
- } catch (SQLException e) {
- e.printStackTrace();
- testDBExists = false;
- DbUtils.closeQuietly(connection);
- return;
- }
-
- DbUtils.closeQuietly(connection);
-
- clearDB();
- }
-
- private static void clearDB() {
- if (!testDBExists) {
- return;
- }
-
- DataSource dataSource = DataSourceUtils.getMySQLDataSource(host, port, database, user, password, numConnections);
- Connection connection = null;
- try {
- connection = dataSource.getConnection();
- } catch (SQLException e) {
- e.printStackTrace();
- testDBExists = false;
- DbUtils.closeQuietly(connection);
- return;
- }
-
- QueryRunner runner = new QueryRunner();
- try {
- runner.update(connection, "DELETE FROM active_executing_flows");
-
- } catch (SQLException e) {
- e.printStackTrace();
- testDBExists = false;
- DbUtils.closeQuietly(connection);
- return;
- }
-
- try {
- runner.update(connection, "DELETE FROM execution_flows");
- } catch (SQLException e) {
- e.printStackTrace();
- testDBExists = false;
- DbUtils.closeQuietly(connection);
- return;
- }
-
- try {
- runner.update(connection, "DELETE FROM execution_jobs");
- } catch (SQLException e) {
- e.printStackTrace();
- testDBExists = false;
- DbUtils.closeQuietly(connection);
- return;
- }
-
- try {
- runner.update(connection, "DELETE FROM execution_logs");
- } catch (SQLException e) {
- e.printStackTrace();
- testDBExists = false;
- DbUtils.closeQuietly(connection);
- return;
- }
-
- DbUtils.closeQuietly(connection);
- }
-
- @Test
- public void testUploadExecutionFlows() throws Exception {
- if (!isTestSetup()) {
- return;
- }
-
- ExecutorLoader loader = createLoader();
- ExecutableFlow flow = createExecutableFlow("exec1");
-
- loader.uploadExecutableFlow(flow);
-
- ExecutableFlow fetchFlow = loader.fetchExecutableFlow(flow.getExecutionId());
-
- // Shouldn't be the same object.
- Assert.assertTrue(flow != fetchFlow);
- Assert.assertEquals(flow.getExecutionId(), fetchFlow.getExecutionId());
- Assert.assertEquals(flow.getEndTime(), fetchFlow.getEndTime());
- Assert.assertEquals(flow.getStartTime(), fetchFlow.getStartTime());
- Assert.assertEquals(flow.getSubmitTime(), fetchFlow.getSubmitTime());
- Assert.assertEquals(flow.getFlowId(), fetchFlow.getFlowId());
- Assert.assertEquals(flow.getProjectId(), fetchFlow.getProjectId());
- Assert.assertEquals(flow.getVersion(), fetchFlow.getVersion());
- Assert.assertEquals(flow.getExecutionOptions().getFailureAction(), fetchFlow.getExecutionOptions().getFailureAction());
- Assert.assertEquals(new HashSet<String>(flow.getEndNodes()), new HashSet<String>(fetchFlow.getEndNodes()));
- }
-
- @Test
- public void testUpdateExecutionFlows() throws Exception {
- if (!isTestSetup()) {
- return;
- }
-
- ExecutorLoader loader = createLoader();
- ExecutableFlow flow = createExecutableFlow("exec1");
-
- loader.uploadExecutableFlow(flow);
-
- ExecutableFlow fetchFlow2 = loader.fetchExecutableFlow(flow.getExecutionId());
-
- fetchFlow2.setEndTime(System.currentTimeMillis());
- fetchFlow2.setStatus(Status.SUCCEEDED);
- loader.updateExecutableFlow(fetchFlow2);
- ExecutableFlow fetchFlow = loader.fetchExecutableFlow(flow.getExecutionId());
-
- // Shouldn't be the same object.
- Assert.assertTrue(flow != fetchFlow);
- Assert.assertEquals(flow.getExecutionId(), fetchFlow.getExecutionId());
- Assert.assertEquals(fetchFlow2.getEndTime(), fetchFlow.getEndTime());
- Assert.assertEquals(fetchFlow2.getStatus(), fetchFlow.getStatus());
- Assert.assertEquals(flow.getStartTime(), fetchFlow.getStartTime());
- Assert.assertEquals(flow.getSubmitTime(), fetchFlow.getSubmitTime());
- Assert.assertEquals(flow.getFlowId(), fetchFlow.getFlowId());
- Assert.assertEquals(flow.getProjectId(), fetchFlow.getProjectId());
- Assert.assertEquals(flow.getVersion(), fetchFlow.getVersion());
- Assert.assertEquals(flow.getExecutionOptions().getFailureAction(), fetchFlow.getExecutionOptions().getFailureAction());
- Assert.assertEquals(new HashSet<String>(flow.getEndNodes()), new HashSet<String>(fetchFlow.getEndNodes()));
- }
-
-
- @Test
- public void testUploadExecutableNode() throws Exception {
- if (!isTestSetup()) {
- return;
- }
-
- ExecutorLoader loader = createLoader();
- ExecutableFlow flow = createExecutableFlow(10, "exec1");
- flow.setExecutionId(10);
-
- File jobFile = new File(flowDir, "job10.job");
- Props props = new Props(null, jobFile);
- props.put("test","test2");
- ExecutableNode oldNode = flow.getExecutableNode("job10");
- oldNode.setStartTime(System.currentTimeMillis());
- loader.uploadExecutableNode(oldNode, props);
-
- ExecutableJobInfo info = loader.fetchJobInfo(10, "job10", 0);
- Assert.assertEquals(flow.getExecutionId(), info.getExecId());
- Assert.assertEquals(flow.getProjectId(), info.getProjectId());
- Assert.assertEquals(flow.getVersion(), info.getVersion());
- Assert.assertEquals(flow.getFlowId(), info.getFlowId());
- Assert.assertEquals(oldNode.getId(), info.getJobId());
- Assert.assertEquals(oldNode.getStatus(), info.getStatus());
- Assert.assertEquals(oldNode.getStartTime(), info.getStartTime());
- Assert.assertEquals("endTime = " + oldNode.getEndTime() + " info endTime = " + info.getEndTime(), oldNode.getEndTime(), info.getEndTime());
-
- // Fetch props
- Props outputProps = new Props();
- outputProps.put("hello", "output");
- oldNode.setOutputProps(outputProps);
- oldNode.setEndTime(System.currentTimeMillis());
- loader.updateExecutableNode(oldNode);
-
- Props fInputProps = loader.fetchExecutionJobInputProps(10, "job10");
- Props fOutputProps = loader.fetchExecutionJobOutputProps(10, "job10");
- Pair<Props,Props> inOutProps = loader.fetchExecutionJobProps(10, "job10");
-
- Assert.assertEquals(fInputProps.get("test"), "test2");
- Assert.assertEquals(fOutputProps.get("hello"), "output");
- Assert.assertEquals(inOutProps.getFirst().get("test"), "test2");
- Assert.assertEquals(inOutProps.getSecond().get("hello"), "output");
-
- }
-
- @Test
- public void testActiveReference() throws Exception {
- if (!isTestSetup()) {
- return;
- }
-
- ExecutorLoader loader = createLoader();
- ExecutableFlow flow1 = createExecutableFlow("exec1");
- loader.uploadExecutableFlow(flow1);
- ExecutionReference ref1 = new ExecutionReference(flow1.getExecutionId(), "test", 1);
- loader.addActiveExecutableReference(ref1);
-
- ExecutableFlow flow2 = createExecutableFlow("exec1");
- loader.uploadExecutableFlow(flow2);
- ExecutionReference ref2 = new ExecutionReference(flow2.getExecutionId(), "test", 1);
- loader.addActiveExecutableReference(ref2);
-
- ExecutableFlow flow3 = createExecutableFlow("exec1");
- loader.uploadExecutableFlow(flow3);
-
- Map<Integer, Pair<ExecutionReference, ExecutableFlow>> activeFlows1 = loader.fetchActiveFlows();
- ExecutableFlow flow1Result = activeFlows1.get(flow1.getExecutionId()).getSecond();
- Assert.assertNotNull(flow1Result);
- Assert.assertTrue(flow1 != flow1Result);
- Assert.assertEquals(flow1.getExecutionId(), flow1Result.getExecutionId());
- Assert.assertEquals(flow1.getEndTime(), flow1Result.getEndTime());
- Assert.assertEquals(flow1.getStartTime(), flow1Result.getStartTime());
- Assert.assertEquals(flow1.getSubmitTime(), flow1Result.getSubmitTime());
- Assert.assertEquals(flow1.getFlowId(), flow1Result.getFlowId());
- Assert.assertEquals(flow1.getProjectId(), flow1Result.getProjectId());
- Assert.assertEquals(flow1.getVersion(), flow1Result.getVersion());
- Assert.assertEquals(flow1.getExecutionOptions().getFailureAction(), flow1Result.getExecutionOptions().getFailureAction());
-
- ExecutableFlow flow1Result2 = activeFlows1.get(flow2.getExecutionId()).getSecond();
- Assert.assertNotNull(flow1Result2);
- Assert.assertTrue(flow2 != flow1Result2);
- Assert.assertEquals(flow2.getExecutionId(), flow1Result2.getExecutionId());
- Assert.assertEquals(flow2.getEndTime(), flow1Result2.getEndTime());
- Assert.assertEquals(flow2.getStartTime(), flow1Result2.getStartTime());
- Assert.assertEquals(flow2.getSubmitTime(), flow1Result2.getSubmitTime());
- Assert.assertEquals(flow2.getFlowId(), flow1Result2.getFlowId());
- Assert.assertEquals(flow2.getProjectId(), flow1Result2.getProjectId());
- Assert.assertEquals(flow2.getVersion(), flow1Result2.getVersion());
- Assert.assertEquals(flow2.getExecutionOptions().getFailureAction(), flow1Result2.getExecutionOptions().getFailureAction());
-
- loader.removeActiveExecutableReference(flow2.getExecutionId());
- Map<Integer, Pair<ExecutionReference,ExecutableFlow>> activeFlows2 = loader.fetchActiveFlows();
-
- Assert.assertTrue(activeFlows2.containsKey(flow1.getExecutionId()));
- Assert.assertFalse(activeFlows2.containsKey(flow3.getExecutionId()));
- Assert.assertFalse(activeFlows2.containsKey(flow2.getExecutionId()));
- }
-
- @Test
- public void testSmallUploadLog() throws ExecutorManagerException {
- File logDir = new File("unit/executions/logtest");
- File[] smalllog = {new File(logDir, "log1.log"), new File(logDir, "log2.log"), new File(logDir, "log3.log")};
-
- ExecutorLoader loader = createLoader();
- loader.uploadLogFile(1, "smallFiles", 0, smalllog);
-
- LogData data = loader.fetchLogs(1, "smallFiles", 0, 0, 50000);
- Assert.assertNotNull(data);
- Assert.assertEquals("Logs length is " + data.getLength(), data.getLength(), 53);
-
- System.out.println(data.toString());
-
- LogData data2 = loader.fetchLogs(1, "smallFiles", 0, 10, 20);
- System.out.println(data2.toString());
- Assert.assertNotNull(data2);
- Assert.assertEquals("Logs length is " + data2.getLength(), data2.getLength(), 20);
-
- }
-
- @Test
- public void testLargeUploadLog() throws ExecutorManagerException {
- File logDir = new File("unit/executions/logtest");
-
- // Multiple of 255 for Henry the Eigth
- File[] largelog = {new File(logDir, "largeLog1.log"), new File(logDir, "largeLog2.log"), new File(logDir, "largeLog3.log")};
-
- ExecutorLoader loader = createLoader();
- loader.uploadLogFile(1, "largeFiles",0, largelog);
-
- LogData logsResult = loader.fetchLogs(1, "largeFiles",0, 0, 64000);
- Assert.assertNotNull(logsResult);
- Assert.assertEquals("Logs length is " + logsResult.getLength(), logsResult.getLength(), 64000);
-
- LogData logsResult2 = loader.fetchLogs(1, "largeFiles",0, 1000, 64000);
- Assert.assertNotNull(logsResult2);
- Assert.assertEquals("Logs length is " + logsResult2.getLength(), logsResult2.getLength(), 64000);
-
- LogData logsResult3 = loader.fetchLogs(1, "largeFiles",0, 330000, 400000);
- Assert.assertNotNull(logsResult3);
- Assert.assertEquals("Logs length is " + logsResult3.getLength(), logsResult3.getLength(), 5493);
-
- LogData logsResult4 = loader.fetchLogs(1, "largeFiles",0, 340000, 400000);
- Assert.assertNull(logsResult4);
-
- LogData logsResult5 = loader.fetchLogs(1, "largeFiles",0, 153600, 204800);
- Assert.assertNotNull(logsResult5);
- Assert.assertEquals("Logs length is " + logsResult5.getLength(), logsResult5.getLength(), 181893);
-
- LogData logsResult6 = loader.fetchLogs(1, "largeFiles",0, 150000, 250000);
- Assert.assertNotNull(logsResult6);
- Assert.assertEquals("Logs length is " + logsResult6.getLength(), logsResult6.getLength(), 185493);
- }
-
- @SuppressWarnings("static-access")
- @Test
- public void testRemoveExecutionLogsByTime() throws ExecutorManagerException, IOException, InterruptedException {
-
- ExecutorLoader loader = createLoader();
-
- File logDir = new File("unit/executions/logtest");
-
- // Multiple of 255 for Henry the Eigth
- File[] largelog = {new File(logDir, "largeLog1.log"), new File(logDir, "largeLog2.log"), new File(logDir, "largeLog3.log")};
-
- DateTime time1 = DateTime.now();
- loader.uploadLogFile(1, "oldlog", 0, largelog);
- // sleep for 5 seconds
- Thread.currentThread().sleep(5000);
- loader.uploadLogFile(2, "newlog", 0, largelog);
-
- DateTime time2 = time1.plusMillis(2500);
-
- int count = loader.removeExecutionLogsByTime(time2.getMillis());
- System.out.print("Removed " + count + " records");
- LogData logs = loader.fetchLogs(1, "oldlog", 0, 0, 22222);
- Assert.assertTrue(logs == null);
- logs = loader.fetchLogs(2, "newlog", 0, 0, 22222);
- Assert.assertFalse(logs == null);
- }
-
- private ExecutableFlow createExecutableFlow(int executionId, String flowName) throws IOException {
- File jsonFlowFile = new File(flowDir, flowName + ".flow");
- @SuppressWarnings("unchecked")
- HashMap<String, Object> flowObj = (HashMap<String, Object>) JSONUtils.parseJSONFromFile(jsonFlowFile);
-
- Flow flow = Flow.flowFromObject(flowObj);
- Project project = new Project(1, "flow");
- HashMap<String, Flow> flowMap = new HashMap<String, Flow>();
- flowMap.put(flow.getId(), flow);
- project.setFlows(flowMap);
- ExecutableFlow execFlow = new ExecutableFlow(project, flow);
- execFlow.setExecutionId(executionId);
-
- return execFlow;
- }
-
- private ExecutableFlow createExecutableFlow(String flowName) throws IOException {
- File jsonFlowFile = new File(flowDir, flowName + ".flow");
- @SuppressWarnings("unchecked")
- HashMap<String, Object> flowObj = (HashMap<String, Object>) JSONUtils.parseJSONFromFile(jsonFlowFile);
-
- Flow flow = Flow.flowFromObject(flowObj);
- Project project = new Project(1, "flow");
- HashMap<String, Flow> flowMap = new HashMap<String, Flow>();
- flowMap.put(flow.getId(), flow);
- project.setFlows(flowMap);
- ExecutableFlow execFlow = new ExecutableFlow(project, flow);
-
- return execFlow;
- }
-
- private ExecutorLoader createLoader() {
- Props props = new Props();
- props.put("database.type", "mysql");
-
- props.put("mysql.host", host);
- props.put("mysql.port", port);
- props.put("mysql.user", user);
- props.put("mysql.database", database);
- props.put("mysql.password", password);
- props.put("mysql.numconnections", numConnections);
-
- return new JdbcExecutorLoader(props);
- }
-
- private boolean isTestSetup() {
- if (!testDBExists) {
- System.err.println("Skipping DB test because Db not setup.");
- return false;
- }
-
- System.out.println("Running DB test because Db setup.");
- return true;
- }
-
- public static class CountHandler implements ResultSetHandler<Integer> {
- @Override
- public Integer handle(ResultSet rs) throws SQLException {
- int val = 0;
- while (rs.next()) {
- val++;
- }
-
- return val;
- }
- }
-
+ private static boolean testDBExists;
+ // @TODO remove this and turn into local host.
+ private static final String host = "cyu-ld.linkedin.biz";
+ private static final int port = 3306;
+ private static final String database = "azkaban2";
+ private static final String user = "azkaban";
+ private static final String password = "azkaban";
+ private static final int numConnections = 10;
+
+ private File flowDir = new File("unit/executions/exectest1");
+
+ @BeforeClass
+ public static void setupDB() {
+ DataSource dataSource =
+ DataSourceUtils.getMySQLDataSource(host, port, database, user,
+ password, numConnections);
+ testDBExists = true;
+
+ Connection connection = null;
+ try {
+ connection = dataSource.getConnection();
+ } catch (SQLException e) {
+ e.printStackTrace();
+ testDBExists = false;
+ DbUtils.closeQuietly(connection);
+ return;
+ }
+
+ CountHandler countHandler = new CountHandler();
+ QueryRunner runner = new QueryRunner();
+ try {
+ runner.query(connection, "SELECT COUNT(1) FROM active_executing_flows",
+ countHandler);
+ } catch (SQLException e) {
+ e.printStackTrace();
+ testDBExists = false;
+ DbUtils.closeQuietly(connection);
+ return;
+ }
+
+ try {
+ runner.query(connection, "SELECT COUNT(1) FROM execution_flows",
+ countHandler);
+ } catch (SQLException e) {
+ e.printStackTrace();
+ testDBExists = false;
+ DbUtils.closeQuietly(connection);
+ return;
+ }
+
+ try {
+ runner.query(connection, "SELECT COUNT(1) FROM execution_jobs",
+ countHandler);
+ } catch (SQLException e) {
+ e.printStackTrace();
+ testDBExists = false;
+ DbUtils.closeQuietly(connection);
+ return;
+ }
+
+ try {
+ runner.query(connection, "SELECT COUNT(1) FROM execution_logs",
+ countHandler);
+ } catch (SQLException e) {
+ e.printStackTrace();
+ testDBExists = false;
+ DbUtils.closeQuietly(connection);
+ return;
+ }
+
+ DbUtils.closeQuietly(connection);
+
+ clearDB();
+ }
+
+ private static void clearDB() {
+ if (!testDBExists) {
+ return;
+ }
+
+ DataSource dataSource =
+ DataSourceUtils.getMySQLDataSource(host, port, database, user,
+ password, numConnections);
+ Connection connection = null;
+ try {
+ connection = dataSource.getConnection();
+ } catch (SQLException e) {
+ e.printStackTrace();
+ testDBExists = false;
+ DbUtils.closeQuietly(connection);
+ return;
+ }
+
+ QueryRunner runner = new QueryRunner();
+ try {
+ runner.update(connection, "DELETE FROM active_executing_flows");
+
+ } catch (SQLException e) {
+ e.printStackTrace();
+ testDBExists = false;
+ DbUtils.closeQuietly(connection);
+ return;
+ }
+
+ try {
+ runner.update(connection, "DELETE FROM execution_flows");
+ } catch (SQLException e) {
+ e.printStackTrace();
+ testDBExists = false;
+ DbUtils.closeQuietly(connection);
+ return;
+ }
+
+ try {
+ runner.update(connection, "DELETE FROM execution_jobs");
+ } catch (SQLException e) {
+ e.printStackTrace();
+ testDBExists = false;
+ DbUtils.closeQuietly(connection);
+ return;
+ }
+
+ try {
+ runner.update(connection, "DELETE FROM execution_logs");
+ } catch (SQLException e) {
+ e.printStackTrace();
+ testDBExists = false;
+ DbUtils.closeQuietly(connection);
+ return;
+ }
+
+ DbUtils.closeQuietly(connection);
+ }
+
+ @Test
+ public void testUploadExecutionFlows() throws Exception {
+ if (!isTestSetup()) {
+ return;
+ }
+
+ ExecutorLoader loader = createLoader();
+ ExecutableFlow flow = createExecutableFlow("exec1");
+
+ loader.uploadExecutableFlow(flow);
+
+ ExecutableFlow fetchFlow =
+ loader.fetchExecutableFlow(flow.getExecutionId());
+
+ // Shouldn't be the same object.
+ Assert.assertTrue(flow != fetchFlow);
+ Assert.assertEquals(flow.getExecutionId(), fetchFlow.getExecutionId());
+ Assert.assertEquals(flow.getEndTime(), fetchFlow.getEndTime());
+ Assert.assertEquals(flow.getStartTime(), fetchFlow.getStartTime());
+ Assert.assertEquals(flow.getSubmitTime(), fetchFlow.getSubmitTime());
+ Assert.assertEquals(flow.getFlowId(), fetchFlow.getFlowId());
+ Assert.assertEquals(flow.getProjectId(), fetchFlow.getProjectId());
+ Assert.assertEquals(flow.getVersion(), fetchFlow.getVersion());
+ Assert.assertEquals(flow.getExecutionOptions().getFailureAction(),
+ fetchFlow.getExecutionOptions().getFailureAction());
+ Assert.assertEquals(new HashSet<String>(flow.getEndNodes()),
+ new HashSet<String>(fetchFlow.getEndNodes()));
+ }
+
+ @Test
+ public void testUpdateExecutionFlows() throws Exception {
+ if (!isTestSetup()) {
+ return;
+ }
+
+ ExecutorLoader loader = createLoader();
+ ExecutableFlow flow = createExecutableFlow("exec1");
+
+ loader.uploadExecutableFlow(flow);
+
+ ExecutableFlow fetchFlow2 =
+ loader.fetchExecutableFlow(flow.getExecutionId());
+
+ fetchFlow2.setEndTime(System.currentTimeMillis());
+ fetchFlow2.setStatus(Status.SUCCEEDED);
+ loader.updateExecutableFlow(fetchFlow2);
+ ExecutableFlow fetchFlow =
+ loader.fetchExecutableFlow(flow.getExecutionId());
+
+ // Shouldn't be the same object.
+ Assert.assertTrue(flow != fetchFlow);
+ Assert.assertEquals(flow.getExecutionId(), fetchFlow.getExecutionId());
+ Assert.assertEquals(fetchFlow2.getEndTime(), fetchFlow.getEndTime());
+ Assert.assertEquals(fetchFlow2.getStatus(), fetchFlow.getStatus());
+ Assert.assertEquals(flow.getStartTime(), fetchFlow.getStartTime());
+ Assert.assertEquals(flow.getSubmitTime(), fetchFlow.getSubmitTime());
+ Assert.assertEquals(flow.getFlowId(), fetchFlow.getFlowId());
+ Assert.assertEquals(flow.getProjectId(), fetchFlow.getProjectId());
+ Assert.assertEquals(flow.getVersion(), fetchFlow.getVersion());
+ Assert.assertEquals(flow.getExecutionOptions().getFailureAction(),
+ fetchFlow.getExecutionOptions().getFailureAction());
+ Assert.assertEquals(new HashSet<String>(flow.getEndNodes()),
+ new HashSet<String>(fetchFlow.getEndNodes()));
+ }
+
+ @Test
+ public void testUploadExecutableNode() throws Exception {
+ if (!isTestSetup()) {
+ return;
+ }
+
+ ExecutorLoader loader = createLoader();
+ ExecutableFlow flow = createExecutableFlow(10, "exec1");
+ flow.setExecutionId(10);
+
+ File jobFile = new File(flowDir, "job10.job");
+ Props props = new Props(null, jobFile);
+ props.put("test", "test2");
+ ExecutableNode oldNode = flow.getExecutableNode("job10");
+ oldNode.setStartTime(System.currentTimeMillis());
+ loader.uploadExecutableNode(oldNode, props);
+
+ ExecutableJobInfo info = loader.fetchJobInfo(10, "job10", 0);
+ Assert.assertEquals(flow.getExecutionId(), info.getExecId());
+ Assert.assertEquals(flow.getProjectId(), info.getProjectId());
+ Assert.assertEquals(flow.getVersion(), info.getVersion());
+ Assert.assertEquals(flow.getFlowId(), info.getFlowId());
+ Assert.assertEquals(oldNode.getId(), info.getJobId());
+ Assert.assertEquals(oldNode.getStatus(), info.getStatus());
+ Assert.assertEquals(oldNode.getStartTime(), info.getStartTime());
+ Assert.assertEquals("endTime = " + oldNode.getEndTime()
+ + " info endTime = " + info.getEndTime(), oldNode.getEndTime(),
+ info.getEndTime());
+
+ // Fetch props
+ Props outputProps = new Props();
+ outputProps.put("hello", "output");
+ oldNode.setOutputProps(outputProps);
+ oldNode.setEndTime(System.currentTimeMillis());
+ loader.updateExecutableNode(oldNode);
+
+ Props fInputProps = loader.fetchExecutionJobInputProps(10, "job10");
+ Props fOutputProps = loader.fetchExecutionJobOutputProps(10, "job10");
+ Pair<Props, Props> inOutProps = loader.fetchExecutionJobProps(10, "job10");
+
+ Assert.assertEquals(fInputProps.get("test"), "test2");
+ Assert.assertEquals(fOutputProps.get("hello"), "output");
+ Assert.assertEquals(inOutProps.getFirst().get("test"), "test2");
+ Assert.assertEquals(inOutProps.getSecond().get("hello"), "output");
+
+ }
+
+ @Test
+ public void testActiveReference() throws Exception {
+ if (!isTestSetup()) {
+ return;
+ }
+
+ ExecutorLoader loader = createLoader();
+ ExecutableFlow flow1 = createExecutableFlow("exec1");
+ loader.uploadExecutableFlow(flow1);
+ ExecutionReference ref1 =
+ new ExecutionReference(flow1.getExecutionId(), "test", 1);
+ loader.addActiveExecutableReference(ref1);
+
+ ExecutableFlow flow2 = createExecutableFlow("exec1");
+ loader.uploadExecutableFlow(flow2);
+ ExecutionReference ref2 =
+ new ExecutionReference(flow2.getExecutionId(), "test", 1);
+ loader.addActiveExecutableReference(ref2);
+
+ ExecutableFlow flow3 = createExecutableFlow("exec1");
+ loader.uploadExecutableFlow(flow3);
+
+ Map<Integer, Pair<ExecutionReference, ExecutableFlow>> activeFlows1 =
+ loader.fetchActiveFlows();
+ ExecutableFlow flow1Result =
+ activeFlows1.get(flow1.getExecutionId()).getSecond();
+ Assert.assertNotNull(flow1Result);
+ Assert.assertTrue(flow1 != flow1Result);
+ Assert.assertEquals(flow1.getExecutionId(), flow1Result.getExecutionId());
+ Assert.assertEquals(flow1.getEndTime(), flow1Result.getEndTime());
+ Assert.assertEquals(flow1.getStartTime(), flow1Result.getStartTime());
+ Assert.assertEquals(flow1.getSubmitTime(), flow1Result.getSubmitTime());
+ Assert.assertEquals(flow1.getFlowId(), flow1Result.getFlowId());
+ Assert.assertEquals(flow1.getProjectId(), flow1Result.getProjectId());
+ Assert.assertEquals(flow1.getVersion(), flow1Result.getVersion());
+ Assert.assertEquals(flow1.getExecutionOptions().getFailureAction(),
+ flow1Result.getExecutionOptions().getFailureAction());
+
+ ExecutableFlow flow1Result2 =
+ activeFlows1.get(flow2.getExecutionId()).getSecond();
+ Assert.assertNotNull(flow1Result2);
+ Assert.assertTrue(flow2 != flow1Result2);
+ Assert.assertEquals(flow2.getExecutionId(), flow1Result2.getExecutionId());
+ Assert.assertEquals(flow2.getEndTime(), flow1Result2.getEndTime());
+ Assert.assertEquals(flow2.getStartTime(), flow1Result2.getStartTime());
+ Assert.assertEquals(flow2.getSubmitTime(), flow1Result2.getSubmitTime());
+ Assert.assertEquals(flow2.getFlowId(), flow1Result2.getFlowId());
+ Assert.assertEquals(flow2.getProjectId(), flow1Result2.getProjectId());
+ Assert.assertEquals(flow2.getVersion(), flow1Result2.getVersion());
+ Assert.assertEquals(flow2.getExecutionOptions().getFailureAction(),
+ flow1Result2.getExecutionOptions().getFailureAction());
+
+ loader.removeActiveExecutableReference(flow2.getExecutionId());
+ Map<Integer, Pair<ExecutionReference, ExecutableFlow>> activeFlows2 =
+ loader.fetchActiveFlows();
+
+ Assert.assertTrue(activeFlows2.containsKey(flow1.getExecutionId()));
+ Assert.assertFalse(activeFlows2.containsKey(flow3.getExecutionId()));
+ Assert.assertFalse(activeFlows2.containsKey(flow2.getExecutionId()));
+ }
+
+ @Test
+ public void testSmallUploadLog() throws ExecutorManagerException {
+ File logDir = new File("unit/executions/logtest");
+ File[] smalllog =
+ { new File(logDir, "log1.log"), new File(logDir, "log2.log"),
+ new File(logDir, "log3.log") };
+
+ ExecutorLoader loader = createLoader();
+ loader.uploadLogFile(1, "smallFiles", 0, smalllog);
+
+ LogData data = loader.fetchLogs(1, "smallFiles", 0, 0, 50000);
+ Assert.assertNotNull(data);
+ Assert.assertEquals("Logs length is " + data.getLength(), data.getLength(),
+ 53);
+
+ System.out.println(data.toString());
+
+ LogData data2 = loader.fetchLogs(1, "smallFiles", 0, 10, 20);
+ System.out.println(data2.toString());
+ Assert.assertNotNull(data2);
+ Assert.assertEquals("Logs length is " + data2.getLength(),
+ data2.getLength(), 20);
+
+ }
+
+ @Test
+ public void testLargeUploadLog() throws ExecutorManagerException {
+ File logDir = new File("unit/executions/logtest");
+
+ // Multiple of 255 for Henry the Eigth
+ File[] largelog =
+ { new File(logDir, "largeLog1.log"), new File(logDir, "largeLog2.log"),
+ new File(logDir, "largeLog3.log") };
+
+ ExecutorLoader loader = createLoader();
+ loader.uploadLogFile(1, "largeFiles", 0, largelog);
+
+ LogData logsResult = loader.fetchLogs(1, "largeFiles", 0, 0, 64000);
+ Assert.assertNotNull(logsResult);
+ Assert.assertEquals("Logs length is " + logsResult.getLength(),
+ logsResult.getLength(), 64000);
+
+ LogData logsResult2 = loader.fetchLogs(1, "largeFiles", 0, 1000, 64000);
+ Assert.assertNotNull(logsResult2);
+ Assert.assertEquals("Logs length is " + logsResult2.getLength(),
+ logsResult2.getLength(), 64000);
+
+ LogData logsResult3 = loader.fetchLogs(1, "largeFiles", 0, 330000, 400000);
+ Assert.assertNotNull(logsResult3);
+ Assert.assertEquals("Logs length is " + logsResult3.getLength(),
+ logsResult3.getLength(), 5493);
+
+ LogData logsResult4 = loader.fetchLogs(1, "largeFiles", 0, 340000, 400000);
+ Assert.assertNull(logsResult4);
+
+ LogData logsResult5 = loader.fetchLogs(1, "largeFiles", 0, 153600, 204800);
+ Assert.assertNotNull(logsResult5);
+ Assert.assertEquals("Logs length is " + logsResult5.getLength(),
+ logsResult5.getLength(), 181893);
+
+ LogData logsResult6 = loader.fetchLogs(1, "largeFiles", 0, 150000, 250000);
+ Assert.assertNotNull(logsResult6);
+ Assert.assertEquals("Logs length is " + logsResult6.getLength(),
+ logsResult6.getLength(), 185493);
+ }
+
+ @SuppressWarnings("static-access")
+ @Test
+ public void testRemoveExecutionLogsByTime() throws ExecutorManagerException,
+ IOException, InterruptedException {
+
+ ExecutorLoader loader = createLoader();
+
+ File logDir = new File("unit/executions/logtest");
+
+ // Multiple of 255 for Henry the Eigth
+ File[] largelog =
+ { new File(logDir, "largeLog1.log"), new File(logDir, "largeLog2.log"),
+ new File(logDir, "largeLog3.log") };
+
+ DateTime time1 = DateTime.now();
+ loader.uploadLogFile(1, "oldlog", 0, largelog);
+ // sleep for 5 seconds
+ Thread.currentThread().sleep(5000);
+ loader.uploadLogFile(2, "newlog", 0, largelog);
+
+ DateTime time2 = time1.plusMillis(2500);
+
+ int count = loader.removeExecutionLogsByTime(time2.getMillis());
+ System.out.print("Removed " + count + " records");
+ LogData logs = loader.fetchLogs(1, "oldlog", 0, 0, 22222);
+ Assert.assertTrue(logs == null);
+ logs = loader.fetchLogs(2, "newlog", 0, 0, 22222);
+ Assert.assertFalse(logs == null);
+ }
+
+ private ExecutableFlow createExecutableFlow(int executionId, String flowName)
+ throws IOException {
+ File jsonFlowFile = new File(flowDir, flowName + ".flow");
+ @SuppressWarnings("unchecked")
+ HashMap<String, Object> flowObj =
+ (HashMap<String, Object>) JSONUtils.parseJSONFromFile(jsonFlowFile);
+
+ Flow flow = Flow.flowFromObject(flowObj);
+ Project project = new Project(1, "flow");
+ HashMap<String, Flow> flowMap = new HashMap<String, Flow>();
+ flowMap.put(flow.getId(), flow);
+ project.setFlows(flowMap);
+ ExecutableFlow execFlow = new ExecutableFlow(project, flow);
+ execFlow.setExecutionId(executionId);
+
+ return execFlow;
+ }
+
+ private ExecutableFlow createExecutableFlow(String flowName)
+ throws IOException {
+ File jsonFlowFile = new File(flowDir, flowName + ".flow");
+ @SuppressWarnings("unchecked")
+ HashMap<String, Object> flowObj =
+ (HashMap<String, Object>) JSONUtils.parseJSONFromFile(jsonFlowFile);
+
+ Flow flow = Flow.flowFromObject(flowObj);
+ Project project = new Project(1, "flow");
+ HashMap<String, Flow> flowMap = new HashMap<String, Flow>();
+ flowMap.put(flow.getId(), flow);
+ project.setFlows(flowMap);
+ ExecutableFlow execFlow = new ExecutableFlow(project, flow);
+
+ return execFlow;
+ }
+
+ private ExecutorLoader createLoader() {
+ Props props = new Props();
+ props.put("database.type", "mysql");
+
+ props.put("mysql.host", host);
+ props.put("mysql.port", port);
+ props.put("mysql.user", user);
+ props.put("mysql.database", database);
+ props.put("mysql.password", password);
+ props.put("mysql.numconnections", numConnections);
+
+ return new JdbcExecutorLoader(props);
+ }
+
+ private boolean isTestSetup() {
+ if (!testDBExists) {
+ System.err.println("Skipping DB test because Db not setup.");
+ return false;
+ }
+
+ System.out.println("Running DB test because Db setup.");
+ return true;
+ }
+
+ public static class CountHandler implements ResultSetHandler<Integer> {
+ @Override
+ public Integer handle(ResultSet rs) throws SQLException {
+ int val = 0;
+ while (rs.next()) {
+ val++;
+ }
+
+ return val;
+ }
+ }
+
}
\ No newline at end of file
unit/java/azkaban/test/executor/SleepJavaJob.java 169(+84 -85)
diff --git a/unit/java/azkaban/test/executor/SleepJavaJob.java b/unit/java/azkaban/test/executor/SleepJavaJob.java
index 535151a..486f943 100644
--- a/unit/java/azkaban/test/executor/SleepJavaJob.java
+++ b/unit/java/azkaban/test/executor/SleepJavaJob.java
@@ -6,90 +6,89 @@ import java.util.Map;
import java.util.Properties;
public class SleepJavaJob {
- private boolean fail;
- private String seconds;
- private int attempts;
- private int currentAttempt;
-
- public SleepJavaJob(String id, Properties props) {
- setup(props);
- }
-
- public SleepJavaJob(String id, Map<String, String> parameters) {
- Properties properties = new Properties();
- properties.putAll(parameters);
-
- setup(properties);
- }
-
- private void setup(Properties props) {
- String failStr = (String)props.get("fail");
-
- if (failStr == null || failStr.equals("false")) {
- fail = false;
- }
- else {
- fail = true;
- }
-
- currentAttempt = props.containsKey("azkaban.job.attempt") ? Integer.parseInt((String)props.get("azkaban.job.attempt")) : 0;
- String attemptString = (String)props.get("passRetry");
- if (attemptString == null) {
- attempts = -1;
- }
- else {
- attempts = Integer.valueOf(attemptString);
- }
- seconds = (String)props.get("seconds");
-
- if (fail) {
- System.out.println("Planning to fail after " + seconds + " seconds. Attempts left " + currentAttempt + " of " + attempts);
- }
- else {
- System.out.println("Planning to succeed after " + seconds + " seconds.");
- }
- }
-
- public static void main(String[] args) throws Exception {
- String propsFile = System.getenv("JOB_PROP_FILE");
- Properties prop = new Properties();
- prop.load(new BufferedReader(new FileReader(propsFile)));
-
- String jobName = System.getenv("JOB_NAME");
- SleepJavaJob job = new SleepJavaJob(jobName, prop);
-
- job.run();
- }
-
- public void run() throws Exception {
- if (seconds == null) {
- throw new RuntimeException("Seconds not set");
- }
-
- int sec = Integer.parseInt(seconds);
- System.out.println("Sec " + sec);
- synchronized(this) {
- try {
- this.wait(sec*1000);
- } catch (InterruptedException e) {
- System.out.println("Interrupted " + fail);
- }
- }
-
- if (fail) {
- if (attempts <= 0 || currentAttempt <= attempts) {
- throw new Exception("I failed because I had to.");
- }
- }
- }
-
- public void cancel() throws Exception {
- System.out.println("Cancelled called on Sleep job");
- fail = true;
- synchronized(this) {
- this.notifyAll();
- }
- }
-
+ private boolean fail;
+ private String seconds;
+ private int attempts;
+ private int currentAttempt;
+
+ public SleepJavaJob(String id, Properties props) {
+ setup(props);
+ }
+
+ public SleepJavaJob(String id, Map<String, String> parameters) {
+ Properties properties = new Properties();
+ properties.putAll(parameters);
+
+ setup(properties);
+ }
+
+ private void setup(Properties props) {
+ String failStr = (String) props.get("fail");
+
+ if (failStr == null || failStr.equals("false")) {
+ fail = false;
+ } else {
+ fail = true;
+ }
+
+ currentAttempt =
+ props.containsKey("azkaban.job.attempt") ? Integer
+ .parseInt((String) props.get("azkaban.job.attempt")) : 0;
+ String attemptString = (String) props.get("passRetry");
+ if (attemptString == null) {
+ attempts = -1;
+ } else {
+ attempts = Integer.valueOf(attemptString);
+ }
+ seconds = (String) props.get("seconds");
+
+ if (fail) {
+ System.out.println("Planning to fail after " + seconds
+ + " seconds. Attempts left " + currentAttempt + " of " + attempts);
+ } else {
+ System.out.println("Planning to succeed after " + seconds + " seconds.");
+ }
+ }
+
+ public static void main(String[] args) throws Exception {
+ String propsFile = System.getenv("JOB_PROP_FILE");
+ Properties prop = new Properties();
+ prop.load(new BufferedReader(new FileReader(propsFile)));
+
+ String jobName = System.getenv("JOB_NAME");
+ SleepJavaJob job = new SleepJavaJob(jobName, prop);
+
+ job.run();
+ }
+
+ public void run() throws Exception {
+ if (seconds == null) {
+ throw new RuntimeException("Seconds not set");
+ }
+
+ int sec = Integer.parseInt(seconds);
+ System.out.println("Sec " + sec);
+ synchronized (this) {
+ try {
+ this.wait(sec * 1000);
+ } catch (InterruptedException e) {
+ System.out.println("Interrupted " + fail);
+ }
+ }
+
+ if (fail) {
+ if (attempts <= 0 || currentAttempt <= attempts) {
+ throw new Exception("I failed because I had to.");
+ }
+ }
+ }
+
+ public void cancel() throws Exception {
+ System.out.println("Cancelled called on Sleep job");
+ fail = true;
+ synchronized (this) {
+ this.notifyAll();
+ }
+ }
}
diff --git a/unit/java/azkaban/test/jobExecutor/AllJobExecutorTests.java b/unit/java/azkaban/test/jobExecutor/AllJobExecutorTests.java
index 83e55b2..04b3365 100644
--- a/unit/java/azkaban/test/jobExecutor/AllJobExecutorTests.java
+++ b/unit/java/azkaban/test/jobExecutor/AllJobExecutorTests.java
@@ -5,7 +5,8 @@ import org.junit.runners.Suite;
import org.junit.runners.Suite.SuiteClasses;
@RunWith(Suite.class)
-@SuiteClasses({ JavaProcessJobTest.class, ProcessJobTest.class, PythonJobTest.class })
+@SuiteClasses({ JavaProcessJobTest.class, ProcessJobTest.class,
+ PythonJobTest.class })
public class AllJobExecutorTests {
}
diff --git a/unit/java/azkaban/test/jobExecutor/JavaProcessJobTest.java b/unit/java/azkaban/test/jobExecutor/JavaProcessJobTest.java
index 048801b..bc27249 100644
--- a/unit/java/azkaban/test/jobExecutor/JavaProcessJobTest.java
+++ b/unit/java/azkaban/test/jobExecutor/JavaProcessJobTest.java
@@ -16,101 +16,101 @@ import azkaban.jobExecutor.AbstractProcessJob;
import azkaban.jobExecutor.JavaProcessJob;
import azkaban.jobExecutor.ProcessJob;
-public class JavaProcessJobTest
-{
+public class JavaProcessJobTest {
private JavaProcessJob job = null;
-// private JobDescriptor descriptor = null;
+ // private JobDescriptor descriptor = null;
private Props props = null;
private Logger log = Logger.getLogger(JavaProcessJob.class);
- private static String classPaths ;
-
- private static final String inputContent =
- "Quick Change in Strategy for a Bookseller \n" +
- " By JULIE BOSMAN \n" +
- "Published: August 11, 2010 \n" +
- " \n" +
- "Twelve years later, it may be Joe Fox's turn to worry. Readers have gone from skipping small \n" +
- "bookstores to wondering if they need bookstores at all. More people are ordering books online \n" +
- "or plucking them from the best-seller bin at Wal-Mart";
-
- private static final String errorInputContent =
- inputContent + "\n stop_here " +
- "But the threat that has the industry and some readers the most rattled is the growth of e-books. \n" +
- " In the first five months of 2009, e-books made up 2.9 percent of trade book sales. In the same period \n" +
- "in 2010, sales of e-books, which generally cost less than hardcover books, grew to 8.5 percent, according \n" +
- "to the Association of American Publishers, spurred by sales of the Amazon Kindle and the new Apple iPad. \n" +
- "For Barnes & Noble, long the largest and most powerful bookstore chain in the country, the new competition \n" +
- "has led to declining profits and store traffic.";
-
-
- private static String inputFile ;
- private static String errorInputFile ;
- private static String outputFile ;
-
+ private static String classPaths;
+
+ private static final String inputContent =
+ "Quick Change in Strategy for a Bookseller \n"
+ + " By JULIE BOSMAN \n"
+ + "Published: August 11, 2010 \n"
+ + " \n"
+ + "Twelve years later, it may be Joe Fox's turn to worry. Readers have gone from skipping small \n"
+ + "bookstores to wondering if they need bookstores at all. More people are ordering books online \n"
+ + "or plucking them from the best-seller bin at Wal-Mart";
+
+ private static final String errorInputContent =
+ inputContent
+ + "\n stop_here "
+ + "But the threat that has the industry and some readers the most rattled is the growth of e-books. \n"
+ + " In the first five months of 2009, e-books made up 2.9 percent of trade book sales. In the same period \n"
+ + "in 2010, sales of e-books, which generally cost less than hardcover books, grew to 8.5 percent, according \n"
+ + "to the Association of American Publishers, spurred by sales of the Amazon Kindle and the new Apple iPad. \n"
+ + "For Barnes & Noble, long the largest and most powerful bookstore chain in the country, the new competition \n"
+ + "has led to declining profits and store traffic.";
+
+ private static String inputFile;
+ private static String errorInputFile;
+ private static String outputFile;
+
@BeforeClass
public static void init() {
// get the classpath
Properties prop = System.getProperties();
- classPaths = String.format("'%s'", prop.getProperty("java.class.path", null));
-
+ classPaths =
+ String.format("'%s'", prop.getProperty("java.class.path", null));
+
long time = (new Date()).getTime();
- inputFile = "/tmp/azkaban_input_" + time;
- errorInputFile = "/tmp/azkaban_input_error_" + time;
- outputFile = "/tmp/azkaban_output_" + time;
+ inputFile = "/tmp/azkaban_input_" + time;
+ errorInputFile = "/tmp/azkaban_input_error_" + time;
+ outputFile = "/tmp/azkaban_output_" + time;
// dump input files
- try {
- Utils.dumpFile(inputFile, inputContent);
- Utils.dumpFile(errorInputFile, errorInputContent);
- }
- catch (IOException e) {
- e.printStackTrace(System.err);
- Assert.fail("error in creating input file:" + e.getLocalizedMessage());
- }
-
+ try {
+ Utils.dumpFile(inputFile, inputContent);
+ Utils.dumpFile(errorInputFile, errorInputContent);
+ } catch (IOException e) {
+ e.printStackTrace(System.err);
+ Assert.fail("error in creating input file:" + e.getLocalizedMessage());
+ }
+
}
-
+
@AfterClass
public static void cleanup() {
// remove the input file and error input file
Utils.removeFile(inputFile);
Utils.removeFile(errorInputFile);
- //Utils.removeFile(outputFile);
+ // Utils.removeFile(outputFile);
}
-
+
@Before
public void setUp() {
-
- /* initialize job */
-// descriptor = EasyMock.createMock(JobDescriptor.class);
-
+
+ /* initialize job */
+ // descriptor = EasyMock.createMock(JobDescriptor.class);
+
props = new Props();
props.put(AbstractProcessJob.WORKING_DIR, ".");
props.put("type", "java");
props.put("fullPath", ".");
-
-// EasyMock.expect(descriptor.getId()).andReturn("java").times(1);
-// EasyMock.expect(descriptor.getProps()).andReturn(props).times(1);
-// EasyMock.expect(descriptor.getFullPath()).andReturn(".").times(1);
-//
-// EasyMock.replay(descriptor);
-
+
+ // EasyMock.expect(descriptor.getId()).andReturn("java").times(1);
+ // EasyMock.expect(descriptor.getProps()).andReturn(props).times(1);
+ // EasyMock.expect(descriptor.getFullPath()).andReturn(".").times(1);
+ //
+ // EasyMock.replay(descriptor);
+
job = new JavaProcessJob("testJavaProcess", props, props, log);
-
-// EasyMock.verify(descriptor);
+
+ // EasyMock.verify(descriptor);
}
-
+
@Test
public void testJavaJob() throws Exception {
/* initialize the Props */
- props.put(JavaProcessJob.JAVA_CLASS, "azkaban.test.jobExecutor.WordCountLocal");
+ props.put(JavaProcessJob.JAVA_CLASS,
+ "azkaban.test.jobExecutor.WordCountLocal");
props.put(ProcessJob.WORKING_DIR, ".");
props.put("input", inputFile);
props.put("output", outputFile);
- props.put("classpath", classPaths);
+ props.put("classpath", classPaths);
job.run();
}
-
+
@Test
public void testJavaJobHashmap() throws Exception {
/* initialize the Props */
@@ -119,25 +119,24 @@ public class JavaProcessJobTest
props.put(ProcessJob.WORKING_DIR, ".");
props.put("input", inputFile);
props.put("output", outputFile);
- props.put("classpath", classPaths);
+ props.put("classpath", classPaths);
job.run();
}
-
+
@Test
public void testFailedJavaJob() throws Exception {
- props.put(JavaProcessJob.JAVA_CLASS, "azkaban.test.jobExecutor.WordCountLocal");
+ props.put(JavaProcessJob.JAVA_CLASS,
+ "azkaban.test.jobExecutor.WordCountLocal");
props.put(ProcessJob.WORKING_DIR, ".");
props.put("input", errorInputFile);
props.put("output", outputFile);
props.put("classpath", classPaths);
-
+
try {
- job.run();
- }
- catch (RuntimeException e) {
+ job.run();
+ } catch (RuntimeException e) {
Assert.assertTrue(true);
}
}
-
-}
+}
diff --git a/unit/java/azkaban/test/jobExecutor/ProcessJobTest.java b/unit/java/azkaban/test/jobExecutor/ProcessJobTest.java
index fde3e53..33610a3 100644
--- a/unit/java/azkaban/test/jobExecutor/ProcessJobTest.java
+++ b/unit/java/azkaban/test/jobExecutor/ProcessJobTest.java
@@ -9,36 +9,33 @@ import azkaban.utils.Props;
import azkaban.jobExecutor.AbstractProcessJob;
import azkaban.jobExecutor.ProcessJob;
-
-public class ProcessJobTest
-{
+public class ProcessJobTest {
private ProcessJob job = null;
-// private JobDescriptor descriptor = null;
+ // private JobDescriptor descriptor = null;
private Props props = null;
private Logger log = Logger.getLogger(ProcessJob.class);
+
@Before
public void setUp() {
-
- /* initialize job */
-// props = EasyMock.createMock(Props.class);
-
+
+ /* initialize job */
+ // props = EasyMock.createMock(Props.class);
+
props = new Props();
props.put(AbstractProcessJob.WORKING_DIR, ".");
props.put("type", "command");
props.put("fullPath", ".");
-
-// EasyMock.expect(props.getString("type")).andReturn("command").times(1);
-// EasyMock.expect(props.getProps()).andReturn(props).times(1);
-// EasyMock.expect(props.getString("fullPath")).andReturn(".").times(1);
-//
-// EasyMock.replay(props);
-
+ // EasyMock.expect(props.getString("type")).andReturn("command").times(1);
+ // EasyMock.expect(props.getProps()).andReturn(props).times(1);
+ // EasyMock.expect(props.getString("fullPath")).andReturn(".").times(1);
+ //
+ // EasyMock.replay(props);
+
job = new ProcessJob("TestProcess", props, props, log);
-
}
-
+
@Test
public void testOneUnixCommand() throws Exception {
/* initialize the Props */
@@ -46,46 +43,47 @@ public class ProcessJobTest
props.put(ProcessJob.WORKING_DIR, ".");
job.run();
-
+
}
@Test
- public void testFailedUnixCommand() throws Exception {
+ public void testFailedUnixCommand() throws Exception {
/* initialize the Props */
props.put(ProcessJob.COMMAND, "xls -al");
props.put(ProcessJob.WORKING_DIR, ".");
try {
job.run();
- }catch (RuntimeException e) {
+ } catch (RuntimeException e) {
Assert.assertTrue(true);
e.printStackTrace();
}
}
-
- @Test
- public void testMultipleUnixCommands( ) throws Exception {
- /* initialize the Props */
- props.put(ProcessJob.WORKING_DIR, ".");
- props.put(ProcessJob.COMMAND, "pwd");
- props.put("command.1", "date");
- props.put("command.2", "whoami");
-
- job.run();
- }
-
- @Test
- public void testPartitionCommand() throws Exception {
- String test1 = "a b c";
-
- Assert.assertArrayEquals(new String[] {"a", "b", "c"}, ProcessJob.partitionCommandLine(test1));
-
- String test2 = "a 'b c'";
- Assert.assertArrayEquals(new String[] {"a", "b c"}, ProcessJob.partitionCommandLine(test2));
-
- String test3 = "a e='b c'";
- Assert.assertArrayEquals(new String[] {"a", "e=b c"}, ProcessJob.partitionCommandLine(test3));
- }
-}
+ @Test
+ public void testMultipleUnixCommands() throws Exception {
+ /* initialize the Props */
+ props.put(ProcessJob.WORKING_DIR, ".");
+ props.put(ProcessJob.COMMAND, "pwd");
+ props.put("command.1", "date");
+ props.put("command.2", "whoami");
+
+ job.run();
+ }
+ @Test
+ public void testPartitionCommand() throws Exception {
+ String test1 = "a b c";
+
+ Assert.assertArrayEquals(new String[] { "a", "b", "c" },
+ ProcessJob.partitionCommandLine(test1));
+
+ String test2 = "a 'b c'";
+ Assert.assertArrayEquals(new String[] { "a", "b c" },
+ ProcessJob.partitionCommandLine(test2));
+
+ String test3 = "a e='b c'";
+ Assert.assertArrayEquals(new String[] { "a", "e=b c" },
+ ProcessJob.partitionCommandLine(test3));
+ }
+}
diff --git a/unit/java/azkaban/test/jobExecutor/PythonJobTest.java b/unit/java/azkaban/test/jobExecutor/PythonJobTest.java
index d42cd22..0733342 100644
--- a/unit/java/azkaban/test/jobExecutor/PythonJobTest.java
+++ b/unit/java/azkaban/test/jobExecutor/PythonJobTest.java
@@ -13,94 +13,86 @@ import azkaban.utils.Props;
import azkaban.jobExecutor.AbstractProcessJob;
import azkaban.jobExecutor.PythonJob;
-public class PythonJobTest
-{
+public class PythonJobTest {
private PythonJob job = null;
-// private JobDescriptor descriptor = null;
+ // private JobDescriptor descriptor = null;
private Props props = null;
private Logger log = Logger.getLogger(PythonJob.class);
-
+
private static final String scriptContent =
- "#!/usr/bin/python \n" +
- "import re, string, sys \n" +
- "# if no arguments were given, print a helpful message \n" +
- "l=len(sys.argv) \n" +
- "if l < 1: \n"+
- "\tprint 'Usage: celsium --t temp' \n" +
- "\tsys.exit(1) \n" +
- "\n" +
- "# Loop over the arguments \n" +
- "i=1 \n" +
- "while i < l-1 : \n" +
- "\tname = sys.argv[i] \n" +
- "\tvalue = sys.argv[i+1] \n" +
- "\tif name == \"--t\": \n" +
- "\t\ttry: \n" +
- "\t\t\tfahrenheit = float(string.atoi(value)) \n" +
- "\t\texcept string.atoi_error: \n" +
- "\t\t\tprint repr(value), \" not a numeric value\" \n" +
- "\t\telse: \n" +
- "\t\t\tcelsius=(fahrenheit-32)*5.0/9.0 \n" +
- "\t\t\tprint '%i F = %iC' % (int(fahrenheit), int(celsius+.5)) \n" +
- "\t\t\tsys.exit(0) \n" +
- "\t\ti=i+2\n" ;
-
-
- private static String scriptFile ;
-
-
+ "#!/usr/bin/python \n" +
+ "import re, string, sys \n" +
+ "# if no arguments were given, print a helpful message \n" +
+ "l=len(sys.argv) \n" +
+ "if l < 1: \n"+
+ "\tprint 'Usage: celsium --t temp' \n" +
+ "\tsys.exit(1) \n" +
+ "\n" +
+ "# Loop over the arguments \n" +
+ "i=1 \n" +
+ "while i < l-1 : \n" +
+ "\tname = sys.argv[i] \n" +
+ "\tvalue = sys.argv[i+1] \n" +
+ "\tif name == \"--t\": \n" +
+ "\t\ttry: \n" +
+ "\t\t\tfahrenheit = float(string.atoi(value)) \n" +
+ "\t\texcept string.atoi_error: \n" +
+ "\t\t\tprint repr(value), \" not a numeric value\" \n" +
+ "\t\telse: \n" +
+ "\t\t\tcelsius=(fahrenheit-32)*5.0/9.0 \n" +
+ "\t\t\tprint '%i F = %iC' % (int(fahrenheit), int(celsius+.5)) \n" +
+ "\t\t\tsys.exit(0) \n" +
+ "\t\ti=i+2\n";
+
+ private static String scriptFile;
@BeforeClass
public static void init() {
-
+
long time = (new Date()).getTime();
scriptFile = "/tmp/azkaban_python" + time + ".py";
// dump script file
- try {
- Utils.dumpFile(scriptFile, scriptContent);
+ try {
+ Utils.dumpFile(scriptFile, scriptContent);
+ } catch (IOException e) {
+ e.printStackTrace(System.err);
+ Assert.fail("error in creating script file:" + e.getLocalizedMessage());
}
- catch (IOException e) {
- e.printStackTrace(System.err);
- Assert.fail("error in creating script file:" + e.getLocalizedMessage());
- }
-
+
}
-
+
@AfterClass
public static void cleanup() {
// remove the input file and error input file
Utils.removeFile(scriptFile);
}
-
+
@Test
public void testPythonJob() {
-
- /* initialize job */
-// descriptor = EasyMock.createMock(JobDescriptor.class);
-
+
+ /* initialize job */
+ // descriptor = EasyMock.createMock(JobDescriptor.class);
+
props = new Props();
props.put(AbstractProcessJob.WORKING_DIR, ".");
props.put("type", "python");
- props.put("script", scriptFile);
- props.put("t", "90");
+ props.put("script", scriptFile);
+ props.put("t", "90");
props.put("type", "script");
props.put("fullPath", ".");
-// EasyMock.expect(descriptor.getId()).andReturn("script").times(1);
-// EasyMock.expect(descriptor.getProps()).andReturn(props).times(3);
-// EasyMock.expect(descriptor.getFullPath()).andReturn(".").times(1);
-// EasyMock.replay(descriptor);
+ // EasyMock.expect(descriptor.getId()).andReturn("script").times(1);
+ // EasyMock.expect(descriptor.getProps()).andReturn(props).times(3);
+ // EasyMock.expect(descriptor.getFullPath()).andReturn(".").times(1);
+ // EasyMock.replay(descriptor);
job = new PythonJob("TestProcess", props, props, log);
-// EasyMock.verify(descriptor);
- try
- {
+ // EasyMock.verify(descriptor);
+ try {
job.run();
- }
- catch (Exception e)
- {
+ } catch (Exception e) {
e.printStackTrace(System.err);
Assert.fail("Python job failed:" + e.getLocalizedMessage());
}
}
-
+
}
unit/java/azkaban/test/jobExecutor/Utils.java 27(+13 -14)
diff --git a/unit/java/azkaban/test/jobExecutor/Utils.java b/unit/java/azkaban/test/jobExecutor/Utils.java
index 0c012cf..4de06f5 100644
--- a/unit/java/azkaban/test/jobExecutor/Utils.java
+++ b/unit/java/azkaban/test/jobExecutor/Utils.java
@@ -5,19 +5,18 @@ import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
-public class Utils
-{
+public class Utils {
+
+ public static void dumpFile(String filename, String filecontent)
+ throws IOException {
+ PrintWriter writer = new PrintWriter(new FileWriter(filename));
+ writer.print(filecontent);
+ writer.close();
+ }
+
+ public static void removeFile(String filename) {
+ File file = new File(filename);
+ file.delete();
+ }
- public static void dumpFile (String filename, String filecontent)
- throws IOException {
- PrintWriter writer = new PrintWriter(new FileWriter(filename));
- writer.print(filecontent);
- writer.close();
- }
-
- public static void removeFile (String filename) {
- File file = new File (filename);
- file.delete();
- }
-
}
diff --git a/unit/java/azkaban/test/jobExecutor/WordCountLocal.java b/unit/java/azkaban/test/jobExecutor/WordCountLocal.java
index d63fc39..9a4e15b 100644
--- a/unit/java/azkaban/test/jobExecutor/WordCountLocal.java
+++ b/unit/java/azkaban/test/jobExecutor/WordCountLocal.java
@@ -1,5 +1,5 @@
package azkaban.test.jobExecutor;
-
+
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.FileOutputStream;
@@ -18,73 +18,71 @@ import azkaban.jobExecutor.ProcessJob;
import azkaban.utils.Props;
public class WordCountLocal extends AbstractJob {
-
- private String _input = null;
- private String _output = null;
- private Map<String, Integer> _dic = new HashMap<String,Integer>();
-
- public static void main(String[] args) throws Exception {
- String propsFile = System.getenv(ProcessJob.JOB_PROP_ENV);
- System.out.println("propsFile: " + propsFile);
- Props prop = new Props(null, propsFile);
- WordCountLocal instance = new WordCountLocal("", prop);
- instance.run();
- }
- public WordCountLocal(String id, Props prop)
- {
- super(id, Logger.getLogger(WordCountLocal.class));
- _input = prop.getString("input");
- _output = prop.getString("output");
- }
-
-
- public void run () throws Exception {
-
- if (_input == null) throw new Exception ("input file is null");
- if (_output == null) throw new Exception ("output file is null");
- BufferedReader in = new BufferedReader (new InputStreamReader( new FileInputStream(_input)));
-
- String line = null;
- while ( (line = in.readLine()) != null ) {
- StringTokenizer tokenizer = new StringTokenizer(line);
- while (tokenizer.hasMoreTokens()) {
- String word =tokenizer.nextToken();
-
- if (word.toString().equals("end_here")) { //expect an out-of-bound exception
- String [] errArray = new String[1];
- System.out.println("string in possition 2 is " + errArray[1]);
- }
-
- if (_dic.containsKey(word)) {
- Integer num = _dic.get(word);
- _dic.put(word, num +1);
- }
- else {
- _dic.put(word, 1);
- }
- }
- }
- in.close();
-
- PrintWriter out = new PrintWriter(new FileOutputStream(_output));
- for (Map.Entry<String, Integer> entry: _dic.entrySet()) {
- out.println (entry.getKey() + "\t" + entry.getValue());
- }
- out.close();
- }
-
- @Override
- public Props getJobGeneratedProperties()
- {
- return new Props();
- }
- @Override
- public boolean isCanceled()
- {
- return false;
+ private String _input = null;
+ private String _output = null;
+ private Map<String, Integer> _dic = new HashMap<String, Integer>();
+
+ public static void main(String[] args) throws Exception {
+ String propsFile = System.getenv(ProcessJob.JOB_PROP_ENV);
+ System.out.println("propsFile: " + propsFile);
+ Props prop = new Props(null, propsFile);
+ WordCountLocal instance = new WordCountLocal("", prop);
+ instance.run();
+ }
+
+ public WordCountLocal(String id, Props prop) {
+ super(id, Logger.getLogger(WordCountLocal.class));
+ _input = prop.getString("input");
+ _output = prop.getString("output");
+ }
+
+ public void run() throws Exception {
+
+ if (_input == null)
+ throw new Exception("input file is null");
+ if (_output == null)
+ throw new Exception("output file is null");
+ BufferedReader in =
+ new BufferedReader(new InputStreamReader(new FileInputStream(_input)));
+
+ String line = null;
+ while ((line = in.readLine()) != null) {
+ StringTokenizer tokenizer = new StringTokenizer(line);
+ while (tokenizer.hasMoreTokens()) {
+ String word = tokenizer.nextToken();
+
+ if (word.toString().equals("end_here")) { // expect an out-of-bound
+ // exception
+ String[] errArray = new String[1];
+ System.out.println("string in possition 2 is " + errArray[1]);
+ }
+
+ if (_dic.containsKey(word)) {
+ Integer num = _dic.get(word);
+ _dic.put(word, num + 1);
+ } else {
+ _dic.put(word, 1);
+ }
+ }
}
-
-
+ in.close();
+
+ PrintWriter out = new PrintWriter(new FileOutputStream(_output));
+ for (Map.Entry<String, Integer> entry : _dic.entrySet()) {
+ out.println(entry.getKey() + "\t" + entry.getValue());
}
-
\ No newline at end of file
+ out.close();
+ }
+
+ @Override
+ public Props getJobGeneratedProperties() {
+ return new Props();
+ }
+
+ @Override
+ public boolean isCanceled() {
+ return false;
+ }
+
+}
diff --git a/unit/java/azkaban/test/jobtype/FakeJavaJob.java b/unit/java/azkaban/test/jobtype/FakeJavaJob.java
index 65ca1ba..35bb7d2 100644
--- a/unit/java/azkaban/test/jobtype/FakeJavaJob.java
+++ b/unit/java/azkaban/test/jobtype/FakeJavaJob.java
@@ -21,8 +21,7 @@ import azkaban.jobExecutor.JavaProcessJob;
import azkaban.utils.Props;
public class FakeJavaJob extends JavaProcessJob {
- public FakeJavaJob(String jobid, Props sysProps, Props jobProps, Logger log) {
- super(jobid, sysProps, jobProps, log);
- }
+ public FakeJavaJob(String jobid, Props sysProps, Props jobProps, Logger log) {
+ super(jobid, sysProps, jobProps, log);
+ }
}
-
diff --git a/unit/java/azkaban/test/jobtype/FakeJavaJob2.java b/unit/java/azkaban/test/jobtype/FakeJavaJob2.java
index 581aeff..ef306d5 100644
--- a/unit/java/azkaban/test/jobtype/FakeJavaJob2.java
+++ b/unit/java/azkaban/test/jobtype/FakeJavaJob2.java
@@ -21,8 +21,7 @@ import azkaban.jobExecutor.JavaProcessJob;
import azkaban.utils.Props;
public class FakeJavaJob2 extends JavaProcessJob {
- public FakeJavaJob2(String jobid, Props sysProps, Props jobProps, Logger log) {
- super(jobid, sysProps, jobProps, log);
- }
+ public FakeJavaJob2(String jobid, Props sysProps, Props jobProps, Logger log) {
+ super(jobid, sysProps, jobProps, log);
+ }
}
-
unit/java/azkaban/test/jobtype/JobTypeManagerTest.java 555(+286 -269)
diff --git a/unit/java/azkaban/test/jobtype/JobTypeManagerTest.java b/unit/java/azkaban/test/jobtype/JobTypeManagerTest.java
index 9669b51..eb0207f 100644
--- a/unit/java/azkaban/test/jobtype/JobTypeManagerTest.java
+++ b/unit/java/azkaban/test/jobtype/JobTypeManagerTest.java
@@ -32,277 +32,294 @@ import azkaban.jobtype.JobTypeManager;
import azkaban.jobtype.JobTypePluginSet;
import azkaban.utils.Props;
-
/**
- * Test the flow run, especially with embedded flows.
- * Files are in unit/plugins/jobtypes
+ * Test the flow run, especially with embedded flows. Files are in
+ * unit/plugins/jobtypes
*
*/
public class JobTypeManagerTest {
- public static String TEST_PLUGIN_DIR = "jobtypes_test";
- private Logger logger = Logger.getLogger(JobTypeManagerTest.class);
- private JobTypeManager manager;
-
- public JobTypeManagerTest() {
- }
-
- @Before
- public void setUp() throws Exception {
- File jobTypeDir = new File(TEST_PLUGIN_DIR);
- jobTypeDir.mkdirs();
-
- FileUtils.copyDirectory(new File("unit/plugins/jobtypes"), jobTypeDir);
- manager = new JobTypeManager(TEST_PLUGIN_DIR, null, this.getClass().getClassLoader());
- }
-
- @After
- public void tearDown() throws IOException {
- FileUtils.deleteDirectory(new File(TEST_PLUGIN_DIR));
- }
-
- /**
- * Tests that the common and common private properties are loaded correctly
- * @throws Exception
- */
- @Test
- public void testCommonPluginProps() throws Exception {
- JobTypePluginSet pluginSet = manager.getJobTypePluginSet();
-
- Props props = pluginSet.getCommonPluginJobProps();
- System.out.println(props.toString());
- assertEquals("commonprop1", props.getString("commonprop1"));
- assertEquals("commonprop2", props.getString("commonprop2"));
- assertEquals("commonprop3", props.getString("commonprop3"));
-
- Props priv = pluginSet.getCommonPluginLoadProps();
- assertEquals("commonprivate1", priv.getString("commonprivate1"));
- assertEquals("commonprivate2", priv.getString("commonprivate2"));
- assertEquals("commonprivate3", priv.getString("commonprivate3"));
- }
-
- /**
- * Tests that the proper classes were loaded and that the common and the load
- * properties are properly loaded.
- *
- * @throws Exception
- */
- @Test
- public void testLoadedClasses() throws Exception {
- JobTypePluginSet pluginSet = manager.getJobTypePluginSet();
-
- Props props = pluginSet.getCommonPluginJobProps();
- System.out.println(props.toString());
- assertEquals("commonprop1", props.getString("commonprop1"));
- assertEquals("commonprop2", props.getString("commonprop2"));
- assertEquals("commonprop3", props.getString("commonprop3"));
- assertNull(props.get("commonprivate1"));
-
- Props priv = pluginSet.getCommonPluginLoadProps();
- assertEquals("commonprivate1", priv.getString("commonprivate1"));
- assertEquals("commonprivate2", priv.getString("commonprivate2"));
- assertEquals("commonprivate3", priv.getString("commonprivate3"));
-
- // Testing the anothertestjobtype
- Class<? extends Job> aPluginClass = pluginSet.getPluginClass("anothertestjob");
- assertEquals("azkaban.test.jobtype.FakeJavaJob", aPluginClass.getName());
- Props ajobProps = pluginSet.getPluginJobProps("anothertestjob");
- Props aloadProps = pluginSet.getPluginLoaderProps("anothertestjob");
-
- // Loader props
- assertEquals("lib/*", aloadProps.get("jobtype.classpath"));
- assertEquals("azkaban.test.jobtype.FakeJavaJob", aloadProps.get("jobtype.class"));
- assertEquals("commonprivate1", aloadProps.get("commonprivate1"));
- assertEquals("commonprivate2", aloadProps.get("commonprivate2"));
- assertEquals("commonprivate3", aloadProps.get("commonprivate3"));
- // Job props
- assertEquals("commonprop1", ajobProps.get("commonprop1"));
- assertEquals("commonprop2", ajobProps.get("commonprop2"));
- assertEquals("commonprop3", ajobProps.get("commonprop3"));
- assertNull(ajobProps.get("commonprivate1"));
-
- Class<? extends Job> tPluginClass = pluginSet.getPluginClass("testjob");
- assertEquals("azkaban.test.jobtype.FakeJavaJob2", tPluginClass.getName());
- Props tjobProps = pluginSet.getPluginJobProps("testjob");
- Props tloadProps = pluginSet.getPluginLoaderProps("testjob");
-
- // Loader props
- assertNull(tloadProps.get("jobtype.classpath"));
- assertEquals("azkaban.test.jobtype.FakeJavaJob2", tloadProps.get("jobtype.class"));
- assertEquals("commonprivate1", tloadProps.get("commonprivate1"));
- assertEquals("commonprivate2", tloadProps.get("commonprivate2"));
- assertEquals("private3", tloadProps.get("commonprivate3"));
- assertEquals("0", tloadProps.get("testprivate"));
- // Job props
- assertEquals("commonprop1", tjobProps.get("commonprop1"));
- assertEquals("commonprop2", tjobProps.get("commonprop2"));
- assertEquals("1", tjobProps.get("pluginprops1"));
- assertEquals("2", tjobProps.get("pluginprops2"));
- assertEquals("3", tjobProps.get("pluginprops3"));
- assertEquals("pluginprops", tjobProps.get("commonprop3"));
- // Testing that the private properties aren't shared with the public ones
- assertNull(tjobProps.get("commonprivate1"));
- assertNull(tjobProps.get("testprivate"));
- }
-
- /**
- * Test building classes
- * @throws Exception
- */
- @Test
- public void testBuildClass() throws Exception {
- Props jobProps = new Props();
- jobProps.put("type", "anothertestjob");
- jobProps.put("test","test1");
- jobProps.put("pluginprops3","4");
- Job job = manager.buildJobExecutor("anothertestjob", jobProps, logger);
-
- assertTrue(job instanceof FakeJavaJob);
- FakeJavaJob fjj = (FakeJavaJob)job;
-
- Props props = fjj.getJobProps();
- assertEquals("test1", props.get("test"));
- assertNull(props.get("pluginprops1"));
- assertEquals("4", props.get("pluginprops3"));
- assertEquals("commonprop1", props.get("commonprop1"));
- assertEquals("commonprop2", props.get("commonprop2"));
- assertEquals("commonprop3", props.get("commonprop3"));
- assertNull(props.get("commonprivate1"));
- }
-
- /**
- * Test building classes 2
- * @throws Exception
- */
- @Test
- public void testBuildClass2() throws Exception {
- Props jobProps = new Props();
- jobProps.put("type", "testjob");
- jobProps.put("test","test1");
- jobProps.put("pluginprops3","4");
- Job job = manager.buildJobExecutor("testjob", jobProps, logger);
-
- assertTrue(job instanceof FakeJavaJob2);
- FakeJavaJob2 fjj = (FakeJavaJob2)job;
-
- Props props = fjj.getJobProps();
- assertEquals("test1", props.get("test"));
- assertEquals("1", props.get("pluginprops1"));
- assertEquals("2", props.get("pluginprops2"));
- assertEquals("4", props.get("pluginprops3")); // Overridden value
- assertEquals("commonprop1", props.get("commonprop1"));
- assertEquals("commonprop2", props.get("commonprop2"));
- assertEquals("pluginprops", props.get("commonprop3"));
- assertNull(props.get("commonprivate1"));
- }
-
- /**
- * Test out reloading properties
- * @throws Exception
- */
- @Test
- public void testResetPlugins() throws Exception {
- // Add a plugins file to the anothertestjob folder
- File anothertestfolder = new File(TEST_PLUGIN_DIR + "/anothertestjob");
- Props pluginProps = new Props();
- pluginProps.put("test1", "1");
- pluginProps.put("test2", "2");
- pluginProps.put("pluginprops3","4");
- pluginProps.storeFlattened(new File(anothertestfolder, "plugin.properties"));
-
- // clone the testjob folder
- File testFolder = new File(TEST_PLUGIN_DIR + "/testjob");
- FileUtils.copyDirectory(testFolder, new File(TEST_PLUGIN_DIR + "/newtestjob"));
-
- // change the common properties
- Props commonPlugin = new Props(null, TEST_PLUGIN_DIR + "/common.properties");
- commonPlugin.put("commonprop1", "1");
- commonPlugin.put("newcommonprop1", "2");
- commonPlugin.removeLocal("commonprop2");
- commonPlugin.storeFlattened(new File(TEST_PLUGIN_DIR + "/common.properties"));
-
- // change the common properties
- Props commonPrivate = new Props(null, TEST_PLUGIN_DIR + "/commonprivate.properties");
- commonPrivate.put("commonprivate1", "1");
- commonPrivate.put("newcommonprivate1", "2");
- commonPrivate.removeLocal("commonprivate2");
- commonPrivate.storeFlattened(new File(TEST_PLUGIN_DIR + "/commonprivate.properties"));
-
- // change testjob private property
- Props loadProps = new Props(null, TEST_PLUGIN_DIR + "/testjob/private.properties");
- loadProps.put("privatetest", "test");
-
- /*
- * Reload the plugins here!!
- */
- manager.loadPlugins();
-
- // Checkout common props
- JobTypePluginSet pluginSet = manager.getJobTypePluginSet();
- Props commonProps = pluginSet.getCommonPluginJobProps();
- assertEquals("1", commonProps.get("commonprop1"));
- assertEquals("commonprop3", commonProps.get("commonprop3"));
- assertEquals("2", commonProps.get("newcommonprop1"));
- assertNull(commonProps.get("commonprop2"));
-
- // Checkout common private
- Props commonPrivateProps = pluginSet.getCommonPluginLoadProps();
- assertEquals("1", commonPrivateProps.get("commonprivate1"));
- assertEquals("commonprivate3", commonPrivateProps.get("commonprivate3"));
- assertEquals("2", commonPrivateProps.get("newcommonprivate1"));
- assertNull(commonPrivateProps.get("commonprivate2"));
-
- // Verify anothertestjob changes
- Class<? extends Job> atjClass = pluginSet.getPluginClass("anothertestjob");
- assertEquals("azkaban.test.jobtype.FakeJavaJob", atjClass.getName());
- Props ajobProps = pluginSet.getPluginJobProps("anothertestjob");
- assertEquals("1", ajobProps.get("test1"));
- assertEquals("2", ajobProps.get("test2"));
- assertEquals("4", ajobProps.get("pluginprops3"));
- assertEquals("commonprop3", ajobProps.get("commonprop3"));
-
- Props aloadProps = pluginSet.getPluginLoaderProps("anothertestjob");
- assertEquals("1", aloadProps.get("commonprivate1"));
- assertNull(aloadProps.get("commonprivate2"));
- assertEquals("commonprivate3", aloadProps.get("commonprivate3"));
-
- // Verify testjob changes
- Class<? extends Job> tjClass = pluginSet.getPluginClass("testjob");
- assertEquals("azkaban.test.jobtype.FakeJavaJob2", tjClass.getName());
- Props tjobProps = pluginSet.getPluginJobProps("testjob");
- assertEquals("1", tjobProps.get("commonprop1"));
- assertEquals("2", tjobProps.get("newcommonprop1"));
- assertEquals("1", tjobProps.get("pluginprops1"));
- assertEquals("2", tjobProps.get("pluginprops2"));
- assertEquals("3", tjobProps.get("pluginprops3"));
- assertEquals("pluginprops", tjobProps.get("commonprop3"));
- assertNull(tjobProps.get("commonprop2"));
-
- Props tloadProps = pluginSet.getPluginLoaderProps("testjob");
- assertNull(tloadProps.get("jobtype.classpath"));
- assertEquals("azkaban.test.jobtype.FakeJavaJob2", tloadProps.get("jobtype.class"));
- assertEquals("1", tloadProps.get("commonprivate1"));
- assertNull(tloadProps.get("commonprivate2"));
- assertEquals("private3", tloadProps.get("commonprivate3"));
-
- // Verify newtestjob
- Class<? extends Job> ntPluginClass = pluginSet.getPluginClass("newtestjob");
- assertEquals("azkaban.test.jobtype.FakeJavaJob2", ntPluginClass.getName());
- Props ntjobProps = pluginSet.getPluginJobProps("newtestjob");
- Props ntloadProps = pluginSet.getPluginLoaderProps("newtestjob");
-
- // Loader props
- assertNull(ntloadProps.get("jobtype.classpath"));
- assertEquals("azkaban.test.jobtype.FakeJavaJob2", ntloadProps.get("jobtype.class"));
- assertEquals("1", ntloadProps.get("commonprivate1"));
- assertNull(ntloadProps.get("commonprivate2"));
- assertEquals("private3", ntloadProps.get("commonprivate3"));
- assertEquals("0", ntloadProps.get("testprivate"));
- // Job props
- assertEquals("1", ntjobProps.get("commonprop1"));
- assertNull(ntjobProps.get("commonprop2"));
- assertEquals("1", ntjobProps.get("pluginprops1"));
- assertEquals("2", ntjobProps.get("pluginprops2"));
- assertEquals("3", ntjobProps.get("pluginprops3"));
- assertEquals("pluginprops", ntjobProps.get("commonprop3"));
- }
+ public static String TEST_PLUGIN_DIR = "jobtypes_test";
+ private Logger logger = Logger.getLogger(JobTypeManagerTest.class);
+ private JobTypeManager manager;
+
+ public JobTypeManagerTest() {
+ }
+
+ @Before
+ public void setUp() throws Exception {
+ File jobTypeDir = new File(TEST_PLUGIN_DIR);
+ jobTypeDir.mkdirs();
+
+ FileUtils.copyDirectory(new File("unit/plugins/jobtypes"), jobTypeDir);
+ manager =
+ new JobTypeManager(TEST_PLUGIN_DIR, null, this.getClass()
+ .getClassLoader());
+ }
+
+ @After
+ public void tearDown() throws IOException {
+ FileUtils.deleteDirectory(new File(TEST_PLUGIN_DIR));
+ }
+
+ /**
+ * Tests that the common and common private properties are loaded correctly
+ *
+ * @throws Exception
+ */
+ @Test
+ public void testCommonPluginProps() throws Exception {
+ JobTypePluginSet pluginSet = manager.getJobTypePluginSet();
+
+ Props props = pluginSet.getCommonPluginJobProps();
+ System.out.println(props.toString());
+ assertEquals("commonprop1", props.getString("commonprop1"));
+ assertEquals("commonprop2", props.getString("commonprop2"));
+ assertEquals("commonprop3", props.getString("commonprop3"));
+
+ Props priv = pluginSet.getCommonPluginLoadProps();
+ assertEquals("commonprivate1", priv.getString("commonprivate1"));
+ assertEquals("commonprivate2", priv.getString("commonprivate2"));
+ assertEquals("commonprivate3", priv.getString("commonprivate3"));
+ }
+
+ /**
+ * Tests that the proper classes were loaded and that the common and the load
+ * properties are properly loaded.
+ *
+ * @throws Exception
+ */
+ @Test
+ public void testLoadedClasses() throws Exception {
+ JobTypePluginSet pluginSet = manager.getJobTypePluginSet();
+
+ Props props = pluginSet.getCommonPluginJobProps();
+ System.out.println(props.toString());
+ assertEquals("commonprop1", props.getString("commonprop1"));
+ assertEquals("commonprop2", props.getString("commonprop2"));
+ assertEquals("commonprop3", props.getString("commonprop3"));
+ assertNull(props.get("commonprivate1"));
+
+ Props priv = pluginSet.getCommonPluginLoadProps();
+ assertEquals("commonprivate1", priv.getString("commonprivate1"));
+ assertEquals("commonprivate2", priv.getString("commonprivate2"));
+ assertEquals("commonprivate3", priv.getString("commonprivate3"));
+
+ // Testing the anothertestjobtype
+ Class<? extends Job> aPluginClass =
+ pluginSet.getPluginClass("anothertestjob");
+ assertEquals("azkaban.test.jobtype.FakeJavaJob", aPluginClass.getName());
+ Props ajobProps = pluginSet.getPluginJobProps("anothertestjob");
+ Props aloadProps = pluginSet.getPluginLoaderProps("anothertestjob");
+
+ // Loader props
+ assertEquals("lib/*", aloadProps.get("jobtype.classpath"));
+ assertEquals("azkaban.test.jobtype.FakeJavaJob",
+ aloadProps.get("jobtype.class"));
+ assertEquals("commonprivate1", aloadProps.get("commonprivate1"));
+ assertEquals("commonprivate2", aloadProps.get("commonprivate2"));
+ assertEquals("commonprivate3", aloadProps.get("commonprivate3"));
+ // Job props
+ assertEquals("commonprop1", ajobProps.get("commonprop1"));
+ assertEquals("commonprop2", ajobProps.get("commonprop2"));
+ assertEquals("commonprop3", ajobProps.get("commonprop3"));
+ assertNull(ajobProps.get("commonprivate1"));
+
+ Class<? extends Job> tPluginClass = pluginSet.getPluginClass("testjob");
+ assertEquals("azkaban.test.jobtype.FakeJavaJob2", tPluginClass.getName());
+ Props tjobProps = pluginSet.getPluginJobProps("testjob");
+ Props tloadProps = pluginSet.getPluginLoaderProps("testjob");
+
+ // Loader props
+ assertNull(tloadProps.get("jobtype.classpath"));
+ assertEquals("azkaban.test.jobtype.FakeJavaJob2",
+ tloadProps.get("jobtype.class"));
+ assertEquals("commonprivate1", tloadProps.get("commonprivate1"));
+ assertEquals("commonprivate2", tloadProps.get("commonprivate2"));
+ assertEquals("private3", tloadProps.get("commonprivate3"));
+ assertEquals("0", tloadProps.get("testprivate"));
+ // Job props
+ assertEquals("commonprop1", tjobProps.get("commonprop1"));
+ assertEquals("commonprop2", tjobProps.get("commonprop2"));
+ assertEquals("1", tjobProps.get("pluginprops1"));
+ assertEquals("2", tjobProps.get("pluginprops2"));
+ assertEquals("3", tjobProps.get("pluginprops3"));
+ assertEquals("pluginprops", tjobProps.get("commonprop3"));
+ // Testing that the private properties aren't shared with the public ones
+ assertNull(tjobProps.get("commonprivate1"));
+ assertNull(tjobProps.get("testprivate"));
+ }
+
+ /**
+ * Test building classes
+ *
+ * @throws Exception
+ */
+ @Test
+ public void testBuildClass() throws Exception {
+ Props jobProps = new Props();
+ jobProps.put("type", "anothertestjob");
+ jobProps.put("test", "test1");
+ jobProps.put("pluginprops3", "4");
+ Job job = manager.buildJobExecutor("anothertestjob", jobProps, logger);
+
+ assertTrue(job instanceof FakeJavaJob);
+ FakeJavaJob fjj = (FakeJavaJob) job;
+
+ Props props = fjj.getJobProps();
+ assertEquals("test1", props.get("test"));
+ assertNull(props.get("pluginprops1"));
+ assertEquals("4", props.get("pluginprops3"));
+ assertEquals("commonprop1", props.get("commonprop1"));
+ assertEquals("commonprop2", props.get("commonprop2"));
+ assertEquals("commonprop3", props.get("commonprop3"));
+ assertNull(props.get("commonprivate1"));
+ }
+
+ /**
+ * Test building classes 2
+ *
+ * @throws Exception
+ */
+ @Test
+ public void testBuildClass2() throws Exception {
+ Props jobProps = new Props();
+ jobProps.put("type", "testjob");
+ jobProps.put("test", "test1");
+ jobProps.put("pluginprops3", "4");
+ Job job = manager.buildJobExecutor("testjob", jobProps, logger);
+
+ assertTrue(job instanceof FakeJavaJob2);
+ FakeJavaJob2 fjj = (FakeJavaJob2) job;
+
+ Props props = fjj.getJobProps();
+ assertEquals("test1", props.get("test"));
+ assertEquals("1", props.get("pluginprops1"));
+ assertEquals("2", props.get("pluginprops2"));
+ assertEquals("4", props.get("pluginprops3")); // Overridden value
+ assertEquals("commonprop1", props.get("commonprop1"));
+ assertEquals("commonprop2", props.get("commonprop2"));
+ assertEquals("pluginprops", props.get("commonprop3"));
+ assertNull(props.get("commonprivate1"));
+ }
+
+ /**
+ * Test out reloading properties
+ *
+ * @throws Exception
+ */
+ @Test
+ public void testResetPlugins() throws Exception {
+ // Add a plugins file to the anothertestjob folder
+ File anothertestfolder = new File(TEST_PLUGIN_DIR + "/anothertestjob");
+ Props pluginProps = new Props();
+ pluginProps.put("test1", "1");
+ pluginProps.put("test2", "2");
+ pluginProps.put("pluginprops3", "4");
+ pluginProps
+ .storeFlattened(new File(anothertestfolder, "plugin.properties"));
+
+ // clone the testjob folder
+ File testFolder = new File(TEST_PLUGIN_DIR + "/testjob");
+ FileUtils.copyDirectory(testFolder, new File(TEST_PLUGIN_DIR
+ + "/newtestjob"));
+
+ // change the common properties
+ Props commonPlugin =
+ new Props(null, TEST_PLUGIN_DIR + "/common.properties");
+ commonPlugin.put("commonprop1", "1");
+ commonPlugin.put("newcommonprop1", "2");
+ commonPlugin.removeLocal("commonprop2");
+ commonPlugin
+ .storeFlattened(new File(TEST_PLUGIN_DIR + "/common.properties"));
+
+ // change the common properties
+ Props commonPrivate =
+ new Props(null, TEST_PLUGIN_DIR + "/commonprivate.properties");
+ commonPrivate.put("commonprivate1", "1");
+ commonPrivate.put("newcommonprivate1", "2");
+ commonPrivate.removeLocal("commonprivate2");
+ commonPrivate.storeFlattened(new File(TEST_PLUGIN_DIR
+ + "/commonprivate.properties"));
+
+ // change testjob private property
+ Props loadProps =
+ new Props(null, TEST_PLUGIN_DIR + "/testjob/private.properties");
+ loadProps.put("privatetest", "test");
+
+ /*
+ * Reload the plugins here!!
+ */
+ manager.loadPlugins();
+
+ // Checkout common props
+ JobTypePluginSet pluginSet = manager.getJobTypePluginSet();
+ Props commonProps = pluginSet.getCommonPluginJobProps();
+ assertEquals("1", commonProps.get("commonprop1"));
+ assertEquals("commonprop3", commonProps.get("commonprop3"));
+ assertEquals("2", commonProps.get("newcommonprop1"));
+ assertNull(commonProps.get("commonprop2"));
+
+ // Checkout common private
+ Props commonPrivateProps = pluginSet.getCommonPluginLoadProps();
+ assertEquals("1", commonPrivateProps.get("commonprivate1"));
+ assertEquals("commonprivate3", commonPrivateProps.get("commonprivate3"));
+ assertEquals("2", commonPrivateProps.get("newcommonprivate1"));
+ assertNull(commonPrivateProps.get("commonprivate2"));
+
+ // Verify anothertestjob changes
+ Class<? extends Job> atjClass = pluginSet.getPluginClass("anothertestjob");
+ assertEquals("azkaban.test.jobtype.FakeJavaJob", atjClass.getName());
+ Props ajobProps = pluginSet.getPluginJobProps("anothertestjob");
+ assertEquals("1", ajobProps.get("test1"));
+ assertEquals("2", ajobProps.get("test2"));
+ assertEquals("4", ajobProps.get("pluginprops3"));
+ assertEquals("commonprop3", ajobProps.get("commonprop3"));
+
+ Props aloadProps = pluginSet.getPluginLoaderProps("anothertestjob");
+ assertEquals("1", aloadProps.get("commonprivate1"));
+ assertNull(aloadProps.get("commonprivate2"));
+ assertEquals("commonprivate3", aloadProps.get("commonprivate3"));
+
+ // Verify testjob changes
+ Class<? extends Job> tjClass = pluginSet.getPluginClass("testjob");
+ assertEquals("azkaban.test.jobtype.FakeJavaJob2", tjClass.getName());
+ Props tjobProps = pluginSet.getPluginJobProps("testjob");
+ assertEquals("1", tjobProps.get("commonprop1"));
+ assertEquals("2", tjobProps.get("newcommonprop1"));
+ assertEquals("1", tjobProps.get("pluginprops1"));
+ assertEquals("2", tjobProps.get("pluginprops2"));
+ assertEquals("3", tjobProps.get("pluginprops3"));
+ assertEquals("pluginprops", tjobProps.get("commonprop3"));
+ assertNull(tjobProps.get("commonprop2"));
+
+ Props tloadProps = pluginSet.getPluginLoaderProps("testjob");
+ assertNull(tloadProps.get("jobtype.classpath"));
+ assertEquals("azkaban.test.jobtype.FakeJavaJob2",
+ tloadProps.get("jobtype.class"));
+ assertEquals("1", tloadProps.get("commonprivate1"));
+ assertNull(tloadProps.get("commonprivate2"));
+ assertEquals("private3", tloadProps.get("commonprivate3"));
+
+ // Verify newtestjob
+ Class<? extends Job> ntPluginClass = pluginSet.getPluginClass("newtestjob");
+ assertEquals("azkaban.test.jobtype.FakeJavaJob2", ntPluginClass.getName());
+ Props ntjobProps = pluginSet.getPluginJobProps("newtestjob");
+ Props ntloadProps = pluginSet.getPluginLoaderProps("newtestjob");
+
+ // Loader props
+ assertNull(ntloadProps.get("jobtype.classpath"));
+ assertEquals("azkaban.test.jobtype.FakeJavaJob2",
+ ntloadProps.get("jobtype.class"));
+ assertEquals("1", ntloadProps.get("commonprivate1"));
+ assertNull(ntloadProps.get("commonprivate2"));
+ assertEquals("private3", ntloadProps.get("commonprivate3"));
+ assertEquals("0", ntloadProps.get("testprivate"));
+ // Job props
+ assertEquals("1", ntjobProps.get("commonprop1"));
+ assertNull(ntjobProps.get("commonprop2"));
+ assertEquals("1", ntjobProps.get("pluginprops1"));
+ assertEquals("2", ntjobProps.get("pluginprops2"));
+ assertEquals("3", ntjobProps.get("pluginprops3"));
+ assertEquals("pluginprops", ntjobProps.get("commonprop3"));
+ }
}
unit/java/azkaban/test/project/JdbcProjectLoaderTest.java 1040(+534 -506)
diff --git a/unit/java/azkaban/test/project/JdbcProjectLoaderTest.java b/unit/java/azkaban/test/project/JdbcProjectLoaderTest.java
index 7474c27..96e3991 100644
--- a/unit/java/azkaban/test/project/JdbcProjectLoaderTest.java
+++ b/unit/java/azkaban/test/project/JdbcProjectLoaderTest.java
@@ -34,510 +34,538 @@ import azkaban.utils.Props;
import azkaban.utils.PropsUtils;
public class JdbcProjectLoaderTest {
- private static boolean testDBExists;
- private static final String host = "localhost";
- private static final int port = 3306;
- private static final String database = "test";
- private static final String user = "azkaban";
- private static final String password = "azkaban";
- private static final int numConnections = 10;
-
- @BeforeClass
- public static void setupDB() {
- DataSource dataSource = DataSourceUtils.getMySQLDataSource(host, port, database, user, password, numConnections);
- testDBExists = true;
-
- Connection connection = null;
- try {
- connection = dataSource.getConnection();
- } catch (SQLException e) {
- e.printStackTrace();
- testDBExists = false;
- DbUtils.closeQuietly(connection);
- return;
- }
-
- CountHandler countHandler = new CountHandler();
- QueryRunner runner = new QueryRunner();
- try {
- runner.query(connection, "SELECT COUNT(1) FROM projects", countHandler);
- } catch (SQLException e) {
- e.printStackTrace();
- testDBExists = false;
- DbUtils.closeQuietly(connection);
- return;
- }
-
- try {
- runner.query(connection, "SELECT COUNT(1) FROM project_events", countHandler);
- } catch (SQLException e) {
- e.printStackTrace();
- testDBExists = false;
- DbUtils.closeQuietly(connection);
- return;
- }
-
- try {
- runner.query(connection, "SELECT COUNT(1) FROM project_permissions", countHandler);
- } catch (SQLException e) {
- e.printStackTrace();
- testDBExists = false;
- DbUtils.closeQuietly(connection);
- return;
- }
-
- try {
- runner.query(connection, "SELECT COUNT(1) FROM project_files", countHandler);
- } catch (SQLException e) {
- e.printStackTrace();
- testDBExists = false;
- DbUtils.closeQuietly(connection);
- return;
- }
-
- try {
- runner.query(connection, "SELECT COUNT(1) FROM project_flows", countHandler);
- } catch (SQLException e) {
- e.printStackTrace();
- testDBExists = false;
- DbUtils.closeQuietly(connection);
- return;
- }
-
- try {
- runner.query(connection, "SELECT COUNT(1) FROM project_properties", countHandler);
- } catch (SQLException e) {
- e.printStackTrace();
- testDBExists = false;
- DbUtils.closeQuietly(connection);
- return;
- }
-
- DbUtils.closeQuietly(connection);
-
- clearDB();
- }
-
- private static void clearDB() {
- if (!testDBExists) {
- return;
- }
-
- DataSource dataSource = DataSourceUtils.getMySQLDataSource(host, port, database, user, password, numConnections);
- Connection connection = null;
- try {
- connection = dataSource.getConnection();
- } catch (SQLException e) {
- e.printStackTrace();
- testDBExists = false;
- DbUtils.closeQuietly(connection);
- return;
- }
-
- QueryRunner runner = new QueryRunner();
- try {
- runner.update(connection, "DELETE FROM projects");
-
- } catch (SQLException e) {
- e.printStackTrace();
- testDBExists = false;
- DbUtils.closeQuietly(connection);
- return;
- }
-
- try {
- runner.update(connection, "DELETE FROM project_events");
- } catch (SQLException e) {
- e.printStackTrace();
- testDBExists = false;
- DbUtils.closeQuietly(connection);
- return;
- }
-
- try {
- runner.update(connection, "DELETE FROM project_permissions");
- } catch (SQLException e) {
- e.printStackTrace();
- testDBExists = false;
- DbUtils.closeQuietly(connection);
- return;
- }
-
- try {
- runner.update(connection, "DELETE FROM project_files");
- } catch (SQLException e) {
- e.printStackTrace();
- testDBExists = false;
- DbUtils.closeQuietly(connection);
- return;
- }
-
- try {
- runner.update(connection, "DELETE FROM project_flows");
- } catch (SQLException e) {
- e.printStackTrace();
- testDBExists = false;
- DbUtils.closeQuietly(connection);
- return;
- }
-
- try {
- runner.update(connection, "DELETE FROM project_properties");
- } catch (SQLException e) {
- e.printStackTrace();
- testDBExists = false;
- DbUtils.closeQuietly(connection);
- return;
- }
-
-
- DbUtils.closeQuietly(connection);
- }
-
- @Test
- public void testCreateProject() throws ProjectManagerException {
- if (!isTestSetup()) {
- return;
- }
-
- ProjectLoader loader = createLoader();
- String projectName = "mytestProject";
- String projectDescription = "This is my new project";
- User user = new User("testUser");
-
- Project project = loader.createNewProject(projectName, projectDescription, user);
- Assert.assertTrue("Project Id set", project.getId() > -1);
- Assert.assertEquals("Project name", projectName, project.getName());
- Assert.assertEquals("Project description", projectDescription, project.getDescription());
-
- System.out.println("Test true");
- Project project2 = loader.fetchProjectById(project.getId());
- assertProjectMemberEquals(project, project2);
- }
-
- @Test
- public void testRemoveProject() throws ProjectManagerException {
- if (!isTestSetup()) {
- return;
- }
-
- ProjectLoader loader = createLoader();
- String projectName = "testRemoveProject";
- String projectDescription = "This is my new project";
- User user = new User("testUser");
-
- Project project = loader.createNewProject(projectName, projectDescription, user);
- Assert.assertTrue("Project Id set", project.getId() > -1);
- Assert.assertEquals("Project name", projectName, project.getName());
- Assert.assertEquals("Project description", projectDescription, project.getDescription());
-
- Project project2 = loader.fetchProjectById(project.getId());
- assertProjectMemberEquals(project, project2);
- loader.removeProject(project, user.getUserId());
-
- Project project3 = loader.fetchProjectById(project.getId());
- Assert.assertFalse(project3.isActive());
-
- List<Project> projList = loader.fetchAllActiveProjects();
- for (Project proj: projList) {
- Assert.assertTrue(proj.getId() != project.getId());
- }
- }
-
- @Test
- public void testAddRemovePermissions() throws ProjectManagerException {
- if (!isTestSetup()) {
- return;
- }
-
- ProjectLoader loader = createLoader();
- String projectName = "mytestProject1";
- String projectDescription = "This is my new project";
- User user = new User("testUser");
-
- Project project = loader.createNewProject(projectName, projectDescription, user);
- Assert.assertTrue("Project Id set", project.getId() > -1);
- Assert.assertEquals("Project name", projectName, project.getName());
- Assert.assertEquals("Project description", projectDescription, project.getDescription());
-
- Permission perm = new Permission(0x2);
- loader.updatePermission(project, user.getUserId(), new Permission(0x2), false);
- loader.updatePermission(project, "group1", new Permission(0x2), true);
- Assert.assertEquals(perm, project.getUserPermission(user.getUserId()));
-
- Permission permOverride = new Permission(0x6);
- loader.updatePermission(project, user.getUserId(), permOverride, false);
- Assert.assertEquals(permOverride, project.getUserPermission(user.getUserId()));
-
- Project project2 = loader.fetchProjectById(project.getId());
- assertProjectMemberEquals(project, project2);
- Assert.assertEquals(permOverride, project2.getUserPermission(user.getUserId()));
- }
-
- @Test
- public void testProjectEventLogs() throws ProjectManagerException {
- if (!isTestSetup()) {
- return;
- }
-
- ProjectLoader loader = createLoader();
- String projectName = "testProjectEventLogs";
- String projectDescription = "This is my new project";
- User user = new User("testUser");
-
- String message = "My message";
- EventType type = EventType.USER_PERMISSION;
- Project project = loader.createNewProject(projectName, projectDescription, user);
- loader.postEvent(project, type, user.getUserId(), message);
-
- List<ProjectLogEvent> events = loader.getProjectEvents(project, 10, 0);
- Assert.assertTrue(events.size() == 1);
-
- ProjectLogEvent event = events.get(0);
- Assert.assertEquals(event.getProjectId(), project.getId());
- Assert.assertEquals(event.getUser(), user.getUserId());
- Assert.assertEquals(event.getMessage(), message);
- Assert.assertEquals(event.getType(), type);
- }
-
- @Test
- public void testFlowUpload() throws ProjectManagerException {
- ProjectLoader loader = createLoader();
- ((JdbcProjectLoader)loader).setDefaultEncodingType(JdbcProjectLoader.EncodingType.GZIP);
- String projectName = "mytestFlowUpload1";
- String projectDescription = "This is my new project";
- User user = new User("testUser");
-
- Project project = loader.createNewProject(projectName, projectDescription, user);
-
- Flow flow = new Flow("MyNewFlow");
-
- flow.addNode(new Node("A"));
- flow.addNode(new Node("B"));
- flow.addNode(new Node("C"));
- flow.addNode(new Node("D"));
-
- flow.addEdge(new Edge("A", "B"));
- flow.addEdge(new Edge("A", "C"));
- flow.addEdge(new Edge("B", "D"));
- flow.addEdge(new Edge("C", "D"));
-
- flow.initialize();
-
- loader.uploadFlow(project, 4, flow);
- project.setVersion(4);
- Flow newFlow = loader.fetchFlow(project, flow.getId());
- Assert.assertTrue(newFlow != null);
- Assert.assertEquals(flow.getId(), newFlow.getId());
- Assert.assertEquals(flow.getEdges().size(), newFlow.getEdges().size());
- Assert.assertEquals(flow.getNodes().size(), newFlow.getNodes().size());
- }
-
- @Test
- public void testFlowUploadPlain() throws ProjectManagerException {
- ProjectLoader loader = createLoader();
- ((JdbcProjectLoader)loader).setDefaultEncodingType(JdbcProjectLoader.EncodingType.PLAIN);
- String projectName = "mytestFlowUpload2";
- String projectDescription = "This is my new project";
- User user = new User("testUser");
-
- Project project = loader.createNewProject(projectName, projectDescription, user);
-
- Flow flow = new Flow("MyNewFlow2");
-
- flow.addNode(new Node("A1"));
- flow.addNode(new Node("B1"));
- flow.addNode(new Node("C1"));
- flow.addNode(new Node("D1"));
-
- flow.addEdge(new Edge("A1", "B1"));
- flow.addEdge(new Edge("A1", "C1"));
- flow.addEdge(new Edge("B1", "D1"));
- flow.addEdge(new Edge("C1", "D1"));
-
- flow.initialize();
-
- loader.uploadFlow(project, 4, flow);
- project.setVersion(4);
- Flow newFlow = loader.fetchFlow(project, flow.getId());
- Assert.assertTrue(newFlow != null);
- Assert.assertEquals(flow.getId(), newFlow.getId());
- Assert.assertEquals(flow.getEdges().size(), newFlow.getEdges().size());
- Assert.assertEquals(flow.getNodes().size(), newFlow.getNodes().size());
-
- List<Flow> flows = loader.fetchAllProjectFlows(project);
- Assert.assertTrue(flows.size() == 1);
- }
-
- @Test
- public void testProjectProperties() throws ProjectManagerException {
- ProjectLoader loader = createLoader();
- ((JdbcProjectLoader)loader).setDefaultEncodingType(JdbcProjectLoader.EncodingType.PLAIN);
- String projectName = "testProjectProperties";
- String projectDescription = "This is my new project";
- User user = new User("testUser");
-
- Project project = loader.createNewProject(projectName, projectDescription, user);
- project.setVersion(5);
- Props props = new Props();
- props.put("a", "abc");
- props.put("b", "bcd");
- props.put("c", "cde");
- props.setSource("mysource");
- loader.uploadProjectProperty(project, props);
-
- Props retProps = loader.fetchProjectProperty(project, "mysource");
-
- Assert.assertEquals(retProps.getSource(), props.getSource());
- Assert.assertEquals(retProps.getKeySet(), props.getKeySet());
- Assert.assertEquals(PropsUtils.toStringMap(retProps, true), PropsUtils.toStringMap(props, true));
- }
-
- @Test
- public void testProjectFilesUpload() throws ProjectManagerException {
- if (!isTestSetup()) {
- return;
- }
-
- ProjectLoader loader = createLoader();
- String projectName = "testProjectFilesUpload1";
- String projectDescription = "This is my new project";
- User user = new User("testUser");
-
- Project project = loader.createNewProject(projectName, projectDescription, user);
- Assert.assertTrue("Project Id set", project.getId() > -1);
- Assert.assertEquals("Project name", projectName, project.getName());
- Assert.assertEquals("Project description", projectDescription, project.getDescription());
-
- File testDir = new File("unit/project/testjob/testjob.zip");
-
- loader.uploadProjectFile(project, 1, "zip", "testjob.zip", testDir, user.getUserId());
-
- ProjectFileHandler handler = loader.getUploadedFile(project, 1);
- Assert.assertEquals(handler.getProjectId(), project.getId());
- Assert.assertEquals(handler.getFileName(), "testjob.zip");
- Assert.assertEquals(handler.getVersion(), 1);
- Assert.assertEquals(handler.getFileType(), "zip");
- File file = handler.getLocalFile();
- Assert.assertTrue(handler.getLocalFile().exists());
- Assert.assertEquals(handler.getFileName(), "testjob.zip");
- Assert.assertEquals(handler.getUploader(), user.getUserId());
-
- handler.deleteLocalFile();
- Assert.assertTrue(handler.getLocalFile() == null);
- Assert.assertFalse(file.exists());
- }
-
- // Custom equals for what I think is important
- private void assertProjectMemberEquals(Project p1, Project p2) {
- Assert.assertEquals(p1.getId(), p2.getId());
- Assert.assertEquals(p1.getName(), p2.getName());
- Assert.assertEquals(p1.getCreateTimestamp(), p2.getCreateTimestamp());
- Assert.assertEquals(p1.getDescription(), p2.getDescription());
- Assert.assertEquals(p1.getLastModifiedUser(), p2.getLastModifiedUser());
- Assert.assertEquals(p1.getVersion(), p2.getVersion());
- Assert.assertEquals(p1.isActive(), p2.isActive());
- Assert.assertEquals(p1.getLastModifiedUser(), p2.getLastModifiedUser());
-
- assertUserPermissionsEqual(p1, p2);
- assertGroupPermissionsEqual(p1, p2);
- }
-
- private void assertUserPermissionsEqual(Project p1, Project p2) {
- List<Pair<String, Permission>> perm1 = p1.getUserPermissions();
- List<Pair<String, Permission>> perm2 = p2.getUserPermissions();
-
- Assert.assertEquals(perm1.size(), perm2.size());
-
- {
- HashMap<String, Permission> perm1Map = new HashMap<String, Permission>();
- for (Pair<String, Permission> p: perm1) {
- perm1Map.put(p.getFirst(), p.getSecond());
- }
- for (Pair<String, Permission> p: perm2) {
- Assert.assertTrue(perm1Map.containsKey(p.getFirst()));
- Permission perm = perm1Map.get(p.getFirst());
- Assert.assertEquals(perm, p.getSecond());
- }
- }
-
- {
- HashMap<String, Permission> perm2Map = new HashMap<String, Permission>();
- for (Pair<String, Permission> p: perm2) {
- perm2Map.put(p.getFirst(), p.getSecond());
- }
- for (Pair<String, Permission> p: perm1) {
- Assert.assertTrue(perm2Map.containsKey(p.getFirst()));
- Permission perm = perm2Map.get(p.getFirst());
- Assert.assertEquals(perm, p.getSecond());
- }
- }
- }
-
- private void assertGroupPermissionsEqual(Project p1, Project p2) {
- List<Pair<String, Permission>> perm1 = p1.getGroupPermissions();
- List<Pair<String, Permission>> perm2 = p2.getGroupPermissions();
-
- Assert.assertEquals(perm1.size(), perm2.size());
-
- {
- HashMap<String, Permission> perm1Map = new HashMap<String, Permission>();
- for (Pair<String, Permission> p: perm1) {
- perm1Map.put(p.getFirst(), p.getSecond());
- }
- for (Pair<String, Permission> p: perm2) {
- Assert.assertTrue(perm1Map.containsKey(p.getFirst()));
- Permission perm = perm1Map.get(p.getFirst());
- Assert.assertEquals(perm, p.getSecond());
- }
- }
-
- {
- HashMap<String, Permission> perm2Map = new HashMap<String, Permission>();
- for (Pair<String, Permission> p: perm2) {
- perm2Map.put(p.getFirst(), p.getSecond());
- }
- for (Pair<String, Permission> p: perm1) {
- Assert.assertTrue(perm2Map.containsKey(p.getFirst()));
- Permission perm = perm2Map.get(p.getFirst());
- Assert.assertEquals(perm, p.getSecond());
- }
- }
- }
-
- private ProjectLoader createLoader() {
- Props props = new Props();
- props.put("database.type", "mysql");
-
- props.put("mysql.host", host);
- props.put("mysql.port", port);
- props.put("mysql.user", user);
- props.put("mysql.database", database);
- props.put("mysql.password", password);
- props.put("mysql.numconnections", numConnections);
-
- return new JdbcProjectLoader(props);
- }
-
- private boolean isTestSetup() {
- if (!testDBExists) {
- System.err.println("Skipping DB test because Db not setup.");
- return false;
- }
-
- System.out.println("Running DB test because Db setup.");
- return true;
- }
-
- public static class CountHandler implements ResultSetHandler<Integer> {
- @Override
- public Integer handle(ResultSet rs) throws SQLException {
- int val = 0;
- while (rs.next()) {
- val++;
- }
-
- return val;
- }
- }
+ private static boolean testDBExists;
+ private static final String host = "localhost";
+ private static final int port = 3306;
+ private static final String database = "test";
+ private static final String user = "azkaban";
+ private static final String password = "azkaban";
+ private static final int numConnections = 10;
+
+ @BeforeClass
+ public static void setupDB() {
+ DataSource dataSource =
+ DataSourceUtils.getMySQLDataSource(host, port, database, user,
+ password, numConnections);
+ testDBExists = true;
+
+ Connection connection = null;
+ try {
+ connection = dataSource.getConnection();
+ } catch (SQLException e) {
+ e.printStackTrace();
+ testDBExists = false;
+ DbUtils.closeQuietly(connection);
+ return;
+ }
+
+ CountHandler countHandler = new CountHandler();
+ QueryRunner runner = new QueryRunner();
+ try {
+ runner.query(connection, "SELECT COUNT(1) FROM projects", countHandler);
+ } catch (SQLException e) {
+ e.printStackTrace();
+ testDBExists = false;
+ DbUtils.closeQuietly(connection);
+ return;
+ }
+
+ try {
+ runner.query(connection, "SELECT COUNT(1) FROM project_events",
+ countHandler);
+ } catch (SQLException e) {
+ e.printStackTrace();
+ testDBExists = false;
+ DbUtils.closeQuietly(connection);
+ return;
+ }
+
+ try {
+ runner.query(connection, "SELECT COUNT(1) FROM project_permissions",
+ countHandler);
+ } catch (SQLException e) {
+ e.printStackTrace();
+ testDBExists = false;
+ DbUtils.closeQuietly(connection);
+ return;
+ }
+
+ try {
+ runner.query(connection, "SELECT COUNT(1) FROM project_files",
+ countHandler);
+ } catch (SQLException e) {
+ e.printStackTrace();
+ testDBExists = false;
+ DbUtils.closeQuietly(connection);
+ return;
+ }
+
+ try {
+ runner.query(connection, "SELECT COUNT(1) FROM project_flows",
+ countHandler);
+ } catch (SQLException e) {
+ e.printStackTrace();
+ testDBExists = false;
+ DbUtils.closeQuietly(connection);
+ return;
+ }
+
+ try {
+ runner.query(connection, "SELECT COUNT(1) FROM project_properties",
+ countHandler);
+ } catch (SQLException e) {
+ e.printStackTrace();
+ testDBExists = false;
+ DbUtils.closeQuietly(connection);
+ return;
+ }
+
+ DbUtils.closeQuietly(connection);
+
+ clearDB();
+ }
+
+ private static void clearDB() {
+ if (!testDBExists) {
+ return;
+ }
+
+ DataSource dataSource =
+ DataSourceUtils.getMySQLDataSource(host, port, database, user,
+ password, numConnections);
+ Connection connection = null;
+ try {
+ connection = dataSource.getConnection();
+ } catch (SQLException e) {
+ e.printStackTrace();
+ testDBExists = false;
+ DbUtils.closeQuietly(connection);
+ return;
+ }
+
+ QueryRunner runner = new QueryRunner();
+ try {
+ runner.update(connection, "DELETE FROM projects");
+
+ } catch (SQLException e) {
+ e.printStackTrace();
+ testDBExists = false;
+ DbUtils.closeQuietly(connection);
+ return;
+ }
+
+ try {
+ runner.update(connection, "DELETE FROM project_events");
+ } catch (SQLException e) {
+ e.printStackTrace();
+ testDBExists = false;
+ DbUtils.closeQuietly(connection);
+ return;
+ }
+
+ try {
+ runner.update(connection, "DELETE FROM project_permissions");
+ } catch (SQLException e) {
+ e.printStackTrace();
+ testDBExists = false;
+ DbUtils.closeQuietly(connection);
+ return;
+ }
+
+ try {
+ runner.update(connection, "DELETE FROM project_files");
+ } catch (SQLException e) {
+ e.printStackTrace();
+ testDBExists = false;
+ DbUtils.closeQuietly(connection);
+ return;
+ }
+
+ try {
+ runner.update(connection, "DELETE FROM project_flows");
+ } catch (SQLException e) {
+ e.printStackTrace();
+ testDBExists = false;
+ DbUtils.closeQuietly(connection);
+ return;
+ }
+
+ try {
+ runner.update(connection, "DELETE FROM project_properties");
+ } catch (SQLException e) {
+ e.printStackTrace();
+ testDBExists = false;
+ DbUtils.closeQuietly(connection);
+ return;
+ }
+
+ DbUtils.closeQuietly(connection);
+ }
+
+ @Test
+ public void testCreateProject() throws ProjectManagerException {
+ if (!isTestSetup()) {
+ return;
+ }
+
+ ProjectLoader loader = createLoader();
+ String projectName = "mytestProject";
+ String projectDescription = "This is my new project";
+ User user = new User("testUser");
+
+ Project project =
+ loader.createNewProject(projectName, projectDescription, user);
+ Assert.assertTrue("Project Id set", project.getId() > -1);
+ Assert.assertEquals("Project name", projectName, project.getName());
+ Assert.assertEquals("Project description", projectDescription,
+ project.getDescription());
+
+ System.out.println("Test true");
+ Project project2 = loader.fetchProjectById(project.getId());
+ assertProjectMemberEquals(project, project2);
+ }
+
+ @Test
+ public void testRemoveProject() throws ProjectManagerException {
+ if (!isTestSetup()) {
+ return;
+ }
+
+ ProjectLoader loader = createLoader();
+ String projectName = "testRemoveProject";
+ String projectDescription = "This is my new project";
+ User user = new User("testUser");
+
+ Project project =
+ loader.createNewProject(projectName, projectDescription, user);
+ Assert.assertTrue("Project Id set", project.getId() > -1);
+ Assert.assertEquals("Project name", projectName, project.getName());
+ Assert.assertEquals("Project description", projectDescription,
+ project.getDescription());
+
+ Project project2 = loader.fetchProjectById(project.getId());
+ assertProjectMemberEquals(project, project2);
+ loader.removeProject(project, user.getUserId());
+
+ Project project3 = loader.fetchProjectById(project.getId());
+ Assert.assertFalse(project3.isActive());
+
+ List<Project> projList = loader.fetchAllActiveProjects();
+ for (Project proj : projList) {
+ Assert.assertTrue(proj.getId() != project.getId());
+ }
+ }
+
+ @Test
+ public void testAddRemovePermissions() throws ProjectManagerException {
+ if (!isTestSetup()) {
+ return;
+ }
+
+ ProjectLoader loader = createLoader();
+ String projectName = "mytestProject1";
+ String projectDescription = "This is my new project";
+ User user = new User("testUser");
+
+ Project project =
+ loader.createNewProject(projectName, projectDescription, user);
+ Assert.assertTrue("Project Id set", project.getId() > -1);
+ Assert.assertEquals("Project name", projectName, project.getName());
+ Assert.assertEquals("Project description", projectDescription,
+ project.getDescription());
+
+ Permission perm = new Permission(0x2);
+ loader.updatePermission(project, user.getUserId(), new Permission(0x2),
+ false);
+ loader.updatePermission(project, "group1", new Permission(0x2), true);
+ Assert.assertEquals(perm, project.getUserPermission(user.getUserId()));
+
+ Permission permOverride = new Permission(0x6);
+ loader.updatePermission(project, user.getUserId(), permOverride, false);
+ Assert.assertEquals(permOverride,
+ project.getUserPermission(user.getUserId()));
+
+ Project project2 = loader.fetchProjectById(project.getId());
+ assertProjectMemberEquals(project, project2);
+ Assert.assertEquals(permOverride,
+ project2.getUserPermission(user.getUserId()));
+ }
+
+ @Test
+ public void testProjectEventLogs() throws ProjectManagerException {
+ if (!isTestSetup()) {
+ return;
+ }
+
+ ProjectLoader loader = createLoader();
+ String projectName = "testProjectEventLogs";
+ String projectDescription = "This is my new project";
+ User user = new User("testUser");
+
+ String message = "My message";
+ EventType type = EventType.USER_PERMISSION;
+ Project project =
+ loader.createNewProject(projectName, projectDescription, user);
+ loader.postEvent(project, type, user.getUserId(), message);
+
+ List<ProjectLogEvent> events = loader.getProjectEvents(project, 10, 0);
+ Assert.assertTrue(events.size() == 1);
+
+ ProjectLogEvent event = events.get(0);
+ Assert.assertEquals(event.getProjectId(), project.getId());
+ Assert.assertEquals(event.getUser(), user.getUserId());
+ Assert.assertEquals(event.getMessage(), message);
+ Assert.assertEquals(event.getType(), type);
+ }
+
+ @Test
+ public void testFlowUpload() throws ProjectManagerException {
+ ProjectLoader loader = createLoader();
+ ((JdbcProjectLoader) loader)
+ .setDefaultEncodingType(JdbcProjectLoader.EncodingType.GZIP);
+ String projectName = "mytestFlowUpload1";
+ String projectDescription = "This is my new project";
+ User user = new User("testUser");
+
+ Project project =
+ loader.createNewProject(projectName, projectDescription, user);
+
+ Flow flow = new Flow("MyNewFlow");
+
+ flow.addNode(new Node("A"));
+ flow.addNode(new Node("B"));
+ flow.addNode(new Node("C"));
+ flow.addNode(new Node("D"));
+
+ flow.addEdge(new Edge("A", "B"));
+ flow.addEdge(new Edge("A", "C"));
+ flow.addEdge(new Edge("B", "D"));
+ flow.addEdge(new Edge("C", "D"));
+
+ flow.initialize();
+
+ loader.uploadFlow(project, 4, flow);
+ project.setVersion(4);
+ Flow newFlow = loader.fetchFlow(project, flow.getId());
+ Assert.assertTrue(newFlow != null);
+ Assert.assertEquals(flow.getId(), newFlow.getId());
+ Assert.assertEquals(flow.getEdges().size(), newFlow.getEdges().size());
+ Assert.assertEquals(flow.getNodes().size(), newFlow.getNodes().size());
+ }
+
+ @Test
+ public void testFlowUploadPlain() throws ProjectManagerException {
+ ProjectLoader loader = createLoader();
+ ((JdbcProjectLoader) loader)
+ .setDefaultEncodingType(JdbcProjectLoader.EncodingType.PLAIN);
+ String projectName = "mytestFlowUpload2";
+ String projectDescription = "This is my new project";
+ User user = new User("testUser");
+
+ Project project =
+ loader.createNewProject(projectName, projectDescription, user);
+
+ Flow flow = new Flow("MyNewFlow2");
+
+ flow.addNode(new Node("A1"));
+ flow.addNode(new Node("B1"));
+ flow.addNode(new Node("C1"));
+ flow.addNode(new Node("D1"));
+
+ flow.addEdge(new Edge("A1", "B1"));
+ flow.addEdge(new Edge("A1", "C1"));
+ flow.addEdge(new Edge("B1", "D1"));
+ flow.addEdge(new Edge("C1", "D1"));
+
+ flow.initialize();
+
+ loader.uploadFlow(project, 4, flow);
+ project.setVersion(4);
+ Flow newFlow = loader.fetchFlow(project, flow.getId());
+ Assert.assertTrue(newFlow != null);
+ Assert.assertEquals(flow.getId(), newFlow.getId());
+ Assert.assertEquals(flow.getEdges().size(), newFlow.getEdges().size());
+ Assert.assertEquals(flow.getNodes().size(), newFlow.getNodes().size());
+
+ List<Flow> flows = loader.fetchAllProjectFlows(project);
+ Assert.assertTrue(flows.size() == 1);
+ }
+
+ @Test
+ public void testProjectProperties() throws ProjectManagerException {
+ ProjectLoader loader = createLoader();
+ ((JdbcProjectLoader) loader)
+ .setDefaultEncodingType(JdbcProjectLoader.EncodingType.PLAIN);
+ String projectName = "testProjectProperties";
+ String projectDescription = "This is my new project";
+ User user = new User("testUser");
+
+ Project project =
+ loader.createNewProject(projectName, projectDescription, user);
+ project.setVersion(5);
+ Props props = new Props();
+ props.put("a", "abc");
+ props.put("b", "bcd");
+ props.put("c", "cde");
+ props.setSource("mysource");
+ loader.uploadProjectProperty(project, props);
+
+ Props retProps = loader.fetchProjectProperty(project, "mysource");
+
+ Assert.assertEquals(retProps.getSource(), props.getSource());
+ Assert.assertEquals(retProps.getKeySet(), props.getKeySet());
+ Assert.assertEquals(PropsUtils.toStringMap(retProps, true),
+ PropsUtils.toStringMap(props, true));
+ }
+
+ @Test
+ public void testProjectFilesUpload() throws ProjectManagerException {
+ if (!isTestSetup()) {
+ return;
+ }
+
+ ProjectLoader loader = createLoader();
+ String projectName = "testProjectFilesUpload1";
+ String projectDescription = "This is my new project";
+ User user = new User("testUser");
+
+ Project project =
+ loader.createNewProject(projectName, projectDescription, user);
+ Assert.assertTrue("Project Id set", project.getId() > -1);
+ Assert.assertEquals("Project name", projectName, project.getName());
+ Assert.assertEquals("Project description", projectDescription,
+ project.getDescription());
+
+ File testDir = new File("unit/project/testjob/testjob.zip");
+
+ loader.uploadProjectFile(project, 1, "zip", "testjob.zip", testDir,
+ user.getUserId());
+
+ ProjectFileHandler handler = loader.getUploadedFile(project, 1);
+ Assert.assertEquals(handler.getProjectId(), project.getId());
+ Assert.assertEquals(handler.getFileName(), "testjob.zip");
+ Assert.assertEquals(handler.getVersion(), 1);
+ Assert.assertEquals(handler.getFileType(), "zip");
+ File file = handler.getLocalFile();
+ Assert.assertTrue(handler.getLocalFile().exists());
+ Assert.assertEquals(handler.getFileName(), "testjob.zip");
+ Assert.assertEquals(handler.getUploader(), user.getUserId());
+
+ handler.deleteLocalFile();
+ Assert.assertTrue(handler.getLocalFile() == null);
+ Assert.assertFalse(file.exists());
+ }
+
+ // Custom equals for what I think is important
+ private void assertProjectMemberEquals(Project p1, Project p2) {
+ Assert.assertEquals(p1.getId(), p2.getId());
+ Assert.assertEquals(p1.getName(), p2.getName());
+ Assert.assertEquals(p1.getCreateTimestamp(), p2.getCreateTimestamp());
+ Assert.assertEquals(p1.getDescription(), p2.getDescription());
+ Assert.assertEquals(p1.getLastModifiedUser(), p2.getLastModifiedUser());
+ Assert.assertEquals(p1.getVersion(), p2.getVersion());
+ Assert.assertEquals(p1.isActive(), p2.isActive());
+ Assert.assertEquals(p1.getLastModifiedUser(), p2.getLastModifiedUser());
+
+ assertUserPermissionsEqual(p1, p2);
+ assertGroupPermissionsEqual(p1, p2);
+ }
+
+ private void assertUserPermissionsEqual(Project p1, Project p2) {
+ List<Pair<String, Permission>> perm1 = p1.getUserPermissions();
+ List<Pair<String, Permission>> perm2 = p2.getUserPermissions();
+
+ Assert.assertEquals(perm1.size(), perm2.size());
+
+ {
+ HashMap<String, Permission> perm1Map = new HashMap<String, Permission>();
+ for (Pair<String, Permission> p : perm1) {
+ perm1Map.put(p.getFirst(), p.getSecond());
+ }
+ for (Pair<String, Permission> p : perm2) {
+ Assert.assertTrue(perm1Map.containsKey(p.getFirst()));
+ Permission perm = perm1Map.get(p.getFirst());
+ Assert.assertEquals(perm, p.getSecond());
+ }
+ }
+
+ {
+ HashMap<String, Permission> perm2Map = new HashMap<String, Permission>();
+ for (Pair<String, Permission> p : perm2) {
+ perm2Map.put(p.getFirst(), p.getSecond());
+ }
+ for (Pair<String, Permission> p : perm1) {
+ Assert.assertTrue(perm2Map.containsKey(p.getFirst()));
+ Permission perm = perm2Map.get(p.getFirst());
+ Assert.assertEquals(perm, p.getSecond());
+ }
+ }
+ }
+
+ private void assertGroupPermissionsEqual(Project p1, Project p2) {
+ List<Pair<String, Permission>> perm1 = p1.getGroupPermissions();
+ List<Pair<String, Permission>> perm2 = p2.getGroupPermissions();
+
+ Assert.assertEquals(perm1.size(), perm2.size());
+
+ {
+ HashMap<String, Permission> perm1Map = new HashMap<String, Permission>();
+ for (Pair<String, Permission> p : perm1) {
+ perm1Map.put(p.getFirst(), p.getSecond());
+ }
+ for (Pair<String, Permission> p : perm2) {
+ Assert.assertTrue(perm1Map.containsKey(p.getFirst()));
+ Permission perm = perm1Map.get(p.getFirst());
+ Assert.assertEquals(perm, p.getSecond());
+ }
+ }
+
+ {
+ HashMap<String, Permission> perm2Map = new HashMap<String, Permission>();
+ for (Pair<String, Permission> p : perm2) {
+ perm2Map.put(p.getFirst(), p.getSecond());
+ }
+ for (Pair<String, Permission> p : perm1) {
+ Assert.assertTrue(perm2Map.containsKey(p.getFirst()));
+ Permission perm = perm2Map.get(p.getFirst());
+ Assert.assertEquals(perm, p.getSecond());
+ }
+ }
+ }
+
+ private ProjectLoader createLoader() {
+ Props props = new Props();
+ props.put("database.type", "mysql");
+
+ props.put("mysql.host", host);
+ props.put("mysql.port", port);
+ props.put("mysql.user", user);
+ props.put("mysql.database", database);
+ props.put("mysql.password", password);
+ props.put("mysql.numconnections", numConnections);
+
+ return new JdbcProjectLoader(props);
+ }
+
+ private boolean isTestSetup() {
+ if (!testDBExists) {
+ System.err.println("Skipping DB test because Db not setup.");
+ return false;
+ }
+
+ System.out.println("Running DB test because Db setup.");
+ return true;
+ }
+
+ public static class CountHandler implements ResultSetHandler<Integer> {
+ @Override
+ public Integer handle(ResultSet rs) throws SQLException {
+ int val = 0;
+ while (rs.next()) {
+ val++;
+ }
+
+ return val;
+ }
+ }
}
unit/java/azkaban/test/project/ProjectTest.java 35(+18 -17)
diff --git a/unit/java/azkaban/test/project/ProjectTest.java b/unit/java/azkaban/test/project/ProjectTest.java
index c4021c0..761eb88 100644
--- a/unit/java/azkaban/test/project/ProjectTest.java
+++ b/unit/java/azkaban/test/project/ProjectTest.java
@@ -10,22 +10,23 @@ import azkaban.user.Permission.Type;
import azkaban.utils.JSONUtils;
public class ProjectTest {
- @Test
- public void testToAndFromObject() throws Exception {
- Project project = new Project(1, "tesTing");
- project.setCreateTimestamp(1l);
- project.setLastModifiedTimestamp(2l);
- project.setDescription("I am a test");
- project.setUserPermission("user1", new Permission(new Type[]{Type.ADMIN, Type.EXECUTE}));
-
- Object obj = project.toObject();
- String json = JSONUtils.toJSON(obj);
-
- Object jsonObj = JSONUtils.parseJSONFromString(json);
-
- Project parsedProject = Project.projectFromObject(jsonObj);
-
- assertTrue(project.equals(parsedProject));
- }
+ @Test
+ public void testToAndFromObject() throws Exception {
+ Project project = new Project(1, "tesTing");
+ project.setCreateTimestamp(1l);
+ project.setLastModifiedTimestamp(2l);
+ project.setDescription("I am a test");
+ project.setUserPermission("user1", new Permission(new Type[] { Type.ADMIN,
+ Type.EXECUTE }));
+
+ Object obj = project.toObject();
+ String json = JSONUtils.toJSON(obj);
+
+ Object jsonObj = JSONUtils.parseJSONFromString(json);
+
+ Project parsedProject = Project.projectFromObject(jsonObj);
+
+ assertTrue(project.equals(parsedProject));
+ }
}
diff --git a/unit/java/azkaban/test/trigger/BasicTimeCheckerTest.java b/unit/java/azkaban/test/trigger/BasicTimeCheckerTest.java
index dc9b970..c4fe36d 100644
--- a/unit/java/azkaban/test/trigger/BasicTimeCheckerTest.java
+++ b/unit/java/azkaban/test/trigger/BasicTimeCheckerTest.java
@@ -17,47 +17,51 @@ import azkaban.utils.Utils;
public class BasicTimeCheckerTest {
- @Test
- public void basicTimerTest(){
-
- Map<String, ConditionChecker> checkers = new HashMap<String, ConditionChecker>();
-
- // get a new timechecker, start from now, repeat every minute. should evaluate to false now, and true a minute later.
- DateTime now = DateTime.now();
- ReadablePeriod period = Utils.parsePeriodString("10s");
-
- BasicTimeChecker timeChecker = new BasicTimeChecker("BasicTimeChecket_1", now.getMillis(), now.getZone(), true, true, period);
- checkers.put(timeChecker.getId(), timeChecker);
- String expr = timeChecker.getId() + ".eval()";
-
- Condition cond = new Condition(checkers, expr);
- System.out.println(expr);
-
- assertFalse(cond.isMet());
-
- //sleep for 1 min
- try {
- Thread.sleep(10000);
- } catch (InterruptedException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
-
- assertTrue(cond.isMet());
-
- cond.resetCheckers();
-
- assertFalse(cond.isMet());
-
- //sleep for 1 min
- try {
- Thread.sleep(10000);
- } catch (InterruptedException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
-
- assertTrue(cond.isMet());
-
- }
+ @Test
+ public void basicTimerTest() {
+
+ Map<String, ConditionChecker> checkers =
+ new HashMap<String, ConditionChecker>();
+
+ // get a new timechecker, start from now, repeat every minute. should
+ // evaluate to false now, and true a minute later.
+ DateTime now = DateTime.now();
+ ReadablePeriod period = Utils.parsePeriodString("10s");
+
+ BasicTimeChecker timeChecker =
+ new BasicTimeChecker("BasicTimeChecket_1", now.getMillis(),
+ now.getZone(), true, true, period);
+ checkers.put(timeChecker.getId(), timeChecker);
+ String expr = timeChecker.getId() + ".eval()";
+
+ Condition cond = new Condition(checkers, expr);
+ System.out.println(expr);
+
+ assertFalse(cond.isMet());
+
+ // sleep for 1 min
+ try {
+ Thread.sleep(10000);
+ } catch (InterruptedException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+
+ assertTrue(cond.isMet());
+
+ cond.resetCheckers();
+
+ assertFalse(cond.isMet());
+
+ // sleep for 1 min
+ try {
+ Thread.sleep(10000);
+ } catch (InterruptedException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+
+ assertTrue(cond.isMet());
+
+ }
}
unit/java/azkaban/test/trigger/ConditionTest.java 146(+79 -67)
diff --git a/unit/java/azkaban/test/trigger/ConditionTest.java b/unit/java/azkaban/test/trigger/ConditionTest.java
index 76ced5e..4ecde66 100644
--- a/unit/java/azkaban/test/trigger/ConditionTest.java
+++ b/unit/java/azkaban/test/trigger/ConditionTest.java
@@ -19,71 +19,83 @@ import azkaban.utils.Props;
import azkaban.utils.Utils;
public class ConditionTest {
-
- @Test
- public void conditionTest(){
-
- Map<String, ConditionChecker> checkers = new HashMap<String, ConditionChecker>();
-
- ThresholdChecker fake1 = new ThresholdChecker("thresholdchecker1", 10);
- ThresholdChecker fake2 = new ThresholdChecker("thresholdchecker2", 20);
- ThresholdChecker.setVal(15);
- checkers.put(fake1.getId(), fake1);
- checkers.put(fake2.getId(), fake2);
-
- String expr1 = "( " + fake1.getId()+ ".eval()" + " && " + fake2.getId()+ ".eval()" + " )" + " || " + "( " + fake1.getId()+".eval()" + " && " + "!" + fake2.getId()+".eval()" + " )";
- String expr2 = "( " + fake1.getId()+ ".eval()" + " && " + fake2.getId()+ ".eval()" + " )" + " || " + "( " + fake1.getId()+".eval()" + " && " + fake2.getId()+".eval()" + " )";
-
- Condition cond = new Condition(checkers, expr1);
-
- System.out.println("Setting expression " + expr1);
- assertTrue(cond.isMet());
- cond.setExpression(expr2);
- System.out.println("Setting expression " + expr2);
- assertFalse(cond.isMet());
-
- }
-
- @Test
- public void jsonConversionTest() throws Exception {
-
- CheckerTypeLoader checkerTypeLoader = new CheckerTypeLoader();
- checkerTypeLoader.init(new Props());
- Condition.setCheckerLoader(checkerTypeLoader);
-
- Map<String, ConditionChecker> checkers = new HashMap<String, ConditionChecker>();
-
- // get a new timechecker, start from now, repeat every minute. should evaluate to false now, and true a minute later.
- DateTime now = DateTime.now();
- String period = "6s";
-
- //BasicTimeChecker timeChecker = new BasicTimeChecker(now, true, true, period);
- ConditionChecker timeChecker = new BasicTimeChecker("BasicTimeChecker_1", now.getMillis(), now.getZone(), true, true, Utils.parsePeriodString(period));
- System.out.println("checker id is " + timeChecker.getId());
-
- checkers.put(timeChecker.getId(), timeChecker);
- String expr = timeChecker.getId() + ".eval()";
-
- Condition cond = new Condition(checkers, expr);
-
- File temp = File.createTempFile("temptest", "temptest");
- temp.deleteOnExit();
- Object obj = cond.toJson();
- JSONUtils.toJSON(obj, temp);
-
- Condition cond2 = Condition.fromJson(JSONUtils.parseJSONFromFile(temp));
-
- Map<String, ConditionChecker> checkers2 = cond2.getCheckers();
-
- assertTrue(cond.getExpression().equals(cond2.getExpression()));
- System.out.println("cond1: " + cond.getExpression());
- System.out.println("cond2: " + cond2.getExpression());
- assertTrue(checkers2.size() == 1);
- ConditionChecker checker2 = checkers2.get(timeChecker.getId());
- //assertTrue(checker2.getId().equals(timeChecker.getId()));
- System.out.println("checker1: " + timeChecker.getId());
- System.out.println("checker2: " + checker2.getId());
- assertTrue(timeChecker.getId().equals(checker2.getId()));
- }
-
+
+ @Test
+ public void conditionTest() {
+
+ Map<String, ConditionChecker> checkers =
+ new HashMap<String, ConditionChecker>();
+
+ ThresholdChecker fake1 = new ThresholdChecker("thresholdchecker1", 10);
+ ThresholdChecker fake2 = new ThresholdChecker("thresholdchecker2", 20);
+ ThresholdChecker.setVal(15);
+ checkers.put(fake1.getId(), fake1);
+ checkers.put(fake2.getId(), fake2);
+
+ String expr1 =
+ "( " + fake1.getId() + ".eval()" + " && " + fake2.getId() + ".eval()"
+ + " )" + " || " + "( " + fake1.getId() + ".eval()" + " && " + "!"
+ + fake2.getId() + ".eval()" + " )";
+ String expr2 =
+ "( " + fake1.getId() + ".eval()" + " && " + fake2.getId() + ".eval()"
+ + " )" + " || " + "( " + fake1.getId() + ".eval()" + " && "
+ + fake2.getId() + ".eval()" + " )";
+
+ Condition cond = new Condition(checkers, expr1);
+
+ System.out.println("Setting expression " + expr1);
+ assertTrue(cond.isMet());
+ cond.setExpression(expr2);
+ System.out.println("Setting expression " + expr2);
+ assertFalse(cond.isMet());
+
+ }
+
+ @Test
+ public void jsonConversionTest() throws Exception {
+
+ CheckerTypeLoader checkerTypeLoader = new CheckerTypeLoader();
+ checkerTypeLoader.init(new Props());
+ Condition.setCheckerLoader(checkerTypeLoader);
+
+ Map<String, ConditionChecker> checkers =
+ new HashMap<String, ConditionChecker>();
+
+ // get a new timechecker, start from now, repeat every minute. should
+ // evaluate to false now, and true a minute later.
+ DateTime now = DateTime.now();
+ String period = "6s";
+
+ // BasicTimeChecker timeChecker = new BasicTimeChecker(now, true, true,
+ // period);
+ ConditionChecker timeChecker =
+ new BasicTimeChecker("BasicTimeChecker_1", now.getMillis(),
+ now.getZone(), true, true, Utils.parsePeriodString(period));
+ System.out.println("checker id is " + timeChecker.getId());
+
+ checkers.put(timeChecker.getId(), timeChecker);
+ String expr = timeChecker.getId() + ".eval()";
+
+ Condition cond = new Condition(checkers, expr);
+
+ File temp = File.createTempFile("temptest", "temptest");
+ temp.deleteOnExit();
+ Object obj = cond.toJson();
+ JSONUtils.toJSON(obj, temp);
+
+ Condition cond2 = Condition.fromJson(JSONUtils.parseJSONFromFile(temp));
+
+ Map<String, ConditionChecker> checkers2 = cond2.getCheckers();
+
+ assertTrue(cond.getExpression().equals(cond2.getExpression()));
+ System.out.println("cond1: " + cond.getExpression());
+ System.out.println("cond2: " + cond2.getExpression());
+ assertTrue(checkers2.size() == 1);
+ ConditionChecker checker2 = checkers2.get(timeChecker.getId());
+ // assertTrue(checker2.getId().equals(timeChecker.getId()));
+ System.out.println("checker1: " + timeChecker.getId());
+ System.out.println("checker2: " + checker2.getId());
+ assertTrue(timeChecker.getId().equals(checker2.getId()));
+ }
+
}
diff --git a/unit/java/azkaban/test/trigger/DummyTriggerAction.java b/unit/java/azkaban/test/trigger/DummyTriggerAction.java
index cffbed6..d5bd4cf 100644
--- a/unit/java/azkaban/test/trigger/DummyTriggerAction.java
+++ b/unit/java/azkaban/test/trigger/DummyTriggerAction.java
@@ -4,53 +4,53 @@ import java.util.Map;
import azkaban.trigger.TriggerAction;
-public class DummyTriggerAction implements TriggerAction{
-
- public static final String type = "DummyAction";
-
- private String message;
-
- public DummyTriggerAction(String message) {
- this.message = message;
- }
-
- @Override
- public String getType() {
- return type;
- }
-
- @Override
- public TriggerAction fromJson(Object obj) {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public Object toJson() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public void doAction() {
- System.out.println(getType() + " invoked.");
- System.out.println(message);
- }
-
- @Override
- public String getDescription() {
- return "this is real dummy action";
- }
-
- @Override
- public String getId() {
- return null;
- }
-
- @Override
- public void setContext(Map<String, Object> context) {
- // TODO Auto-generated method stub
-
- }
+public class DummyTriggerAction implements TriggerAction {
+
+ public static final String type = "DummyAction";
+
+ private String message;
+
+ public DummyTriggerAction(String message) {
+ this.message = message;
+ }
+
+ @Override
+ public String getType() {
+ return type;
+ }
+
+ @Override
+ public TriggerAction fromJson(Object obj) {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public Object toJson() {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public void doAction() {
+ System.out.println(getType() + " invoked.");
+ System.out.println(message);
+ }
+
+ @Override
+ public String getDescription() {
+ return "this is real dummy action";
+ }
+
+ @Override
+ public String getId() {
+ return null;
+ }
+
+ @Override
+ public void setContext(Map<String, Object> context) {
+ // TODO Auto-generated method stub
+
+ }
}
diff --git a/unit/java/azkaban/test/trigger/ExecuteFlowActionTest.java b/unit/java/azkaban/test/trigger/ExecuteFlowActionTest.java
index 4c49dab..565432e 100644
--- a/unit/java/azkaban/test/trigger/ExecuteFlowActionTest.java
+++ b/unit/java/azkaban/test/trigger/ExecuteFlowActionTest.java
@@ -12,28 +12,29 @@ import azkaban.trigger.ActionTypeLoader;
import azkaban.trigger.builtin.ExecuteFlowAction;
import azkaban.utils.Props;
-
public class ExecuteFlowActionTest {
-
- @Test
- public void jsonConversionTest() throws Exception {
- ActionTypeLoader loader = new ActionTypeLoader();
- loader.init(new Props());
-
- ExecutionOptions options = new ExecutionOptions();
- List<Object> disabledJobs = new ArrayList<Object>();
- options.setDisabledJobs(disabledJobs);
-
- ExecuteFlowAction executeFlowAction = new ExecuteFlowAction("ExecuteFlowAction", 1, "testproject", "testflow", "azkaban", options, null);
-
- Object obj = executeFlowAction.toJson();
-
- ExecuteFlowAction action = (ExecuteFlowAction) loader.createActionFromJson(ExecuteFlowAction.type, obj);
- assertTrue(executeFlowAction.getProjectId() == action.getProjectId());
- assertTrue(executeFlowAction.getFlowName().equals(action.getFlowName()));
- assertTrue(executeFlowAction.getSubmitUser().equals(action.getSubmitUser()));
- }
-
-
-
+
+ @Test
+ public void jsonConversionTest() throws Exception {
+ ActionTypeLoader loader = new ActionTypeLoader();
+ loader.init(new Props());
+
+ ExecutionOptions options = new ExecutionOptions();
+ List<Object> disabledJobs = new ArrayList<Object>();
+ options.setDisabledJobs(disabledJobs);
+
+ ExecuteFlowAction executeFlowAction =
+ new ExecuteFlowAction("ExecuteFlowAction", 1, "testproject",
+ "testflow", "azkaban", options, null);
+
+ Object obj = executeFlowAction.toJson();
+
+ ExecuteFlowAction action =
+ (ExecuteFlowAction) loader.createActionFromJson(ExecuteFlowAction.type,
+ obj);
+ assertTrue(executeFlowAction.getProjectId() == action.getProjectId());
+ assertTrue(executeFlowAction.getFlowName().equals(action.getFlowName()));
+ assertTrue(executeFlowAction.getSubmitUser().equals(action.getSubmitUser()));
+ }
+
}
unit/java/azkaban/test/trigger/JdbcTriggerLoaderTest.java 355(+183 -172)
diff --git a/unit/java/azkaban/test/trigger/JdbcTriggerLoaderTest.java b/unit/java/azkaban/test/trigger/JdbcTriggerLoaderTest.java
index 61f2ae7..2383f6e 100644
--- a/unit/java/azkaban/test/trigger/JdbcTriggerLoaderTest.java
+++ b/unit/java/azkaban/test/trigger/JdbcTriggerLoaderTest.java
@@ -39,176 +39,187 @@ import azkaban.utils.Utils;
public class JdbcTriggerLoaderTest {
- private static boolean testDBExists = false;
- //@TODO remove this and turn into local host.
- private static final String host = "localhost";
- private static final int port = 3306;
- private static final String database = "azkaban2";
- private static final String user = "azkaban";
- private static final String password = "azkaban";
- private static final int numConnections = 10;
-
- private TriggerLoader loader;
- private CheckerTypeLoader checkerLoader;
- private ActionTypeLoader actionLoader;
-
- @Before
- public void setup() throws TriggerException {
- Props props = new Props();
- props.put("database.type", "mysql");
-
- props.put("mysql.host", host);
- props.put("mysql.port", port);
- props.put("mysql.user", user);
- props.put("mysql.database", database);
- props.put("mysql.password", password);
- props.put("mysql.numconnections", numConnections);
-
- loader = new JdbcTriggerLoader(props);
- checkerLoader = new CheckerTypeLoader();
- checkerLoader.init(new Props());
- Condition.setCheckerLoader(checkerLoader);
- actionLoader = new ActionTypeLoader();
- actionLoader.init(new Props());
- Trigger.setActionTypeLoader(actionLoader);
- setupDB();
- }
-
- public void setupDB() {
- DataSource dataSource = DataSourceUtils.getMySQLDataSource(host, port, database, user, password, numConnections);
- testDBExists = true;
-
- Connection connection = null;
- try {
- connection = dataSource.getConnection();
- } catch (SQLException e) {
- e.printStackTrace();
- testDBExists = false;
- DbUtils.closeQuietly(connection);
- return;
- }
-
- CountHandler countHandler = new CountHandler();
- QueryRunner runner = new QueryRunner();
- try {
- runner.query(connection, "SELECT COUNT(1) FROM triggers", countHandler);
- } catch (SQLException e) {
- e.printStackTrace();
- testDBExists = false;
- DbUtils.closeQuietly(connection);
- return;
- }
-
- DbUtils.closeQuietly(connection);
-
- clearDB();
- }
-
- @After
- public void clearDB() {
- if (!testDBExists) {
- return;
- }
-
- DataSource dataSource = DataSourceUtils.getMySQLDataSource(host, port, database, user, password, numConnections);
- Connection connection = null;
- try {
- connection = dataSource.getConnection();
- } catch (SQLException e) {
- e.printStackTrace();
- testDBExists = false;
- DbUtils.closeQuietly(connection);
- return;
- }
-
- QueryRunner runner = new QueryRunner();
- try {
- runner.update(connection, "DELETE FROM triggers");
-
- } catch (SQLException e) {
- e.printStackTrace();
- testDBExists = false;
- DbUtils.closeQuietly(connection);
- return;
- }
-
- DbUtils.closeQuietly(connection);
- }
-
- @Test
- public void addTriggerTest() throws TriggerLoaderException {
- Trigger t1 = createTrigger("testProj1", "testFlow1", "source1");
- Trigger t2 = createTrigger("testProj2", "testFlow2", "source2");
- loader.addTrigger(t1);
- List<Trigger> ts = loader.loadTriggers();
- assertTrue(ts.size() == 1);
-
- Trigger t3 = ts.get(0);
- assertTrue(t3.getSource().equals("source1"));
-
- loader.addTrigger(t2);
- ts = loader.loadTriggers();
- assertTrue(ts.size() == 2);
-
- for(Trigger t : ts) {
- if(t.getTriggerId() == t2.getTriggerId()) {
- t.getSource().equals(t2.getSource());
- }
- }
- }
-
- @Test
- public void removeTriggerTest() throws TriggerLoaderException {
- Trigger t1 = createTrigger("testProj1", "testFlow1", "source1");
- Trigger t2 = createTrigger("testProj2", "testFlow2", "source2");
- loader.addTrigger(t1);
- loader.addTrigger(t2);
- List<Trigger> ts = loader.loadTriggers();
- assertTrue(ts.size() == 2);
- loader.removeTrigger(t2);
- ts = loader.loadTriggers();
- assertTrue(ts.size() == 1);
- assertTrue(ts.get(0).getTriggerId() == t1.getTriggerId());
- }
-
- @Test
- public void updateTriggerTest() throws TriggerLoaderException {
- Trigger t1 = createTrigger("testProj1", "testFlow1", "source1");
- t1.setResetOnExpire(true);
- loader.addTrigger(t1);
- List<Trigger> ts = loader.loadTriggers();
- assertTrue(ts.get(0).isResetOnExpire() == true);
- t1.setResetOnExpire(false);
- loader.updateTrigger(t1);
- ts = loader.loadTriggers();
- assertTrue(ts.get(0).isResetOnExpire() == false);
- }
-
- private Trigger createTrigger(String projName, String flowName, String source) {
- DateTime now = DateTime.now();
- ConditionChecker checker1 = new BasicTimeChecker("timeChecker1", now.getMillis(), now.getZone(), true, true, Utils.parsePeriodString("1h"));
- Map<String, ConditionChecker> checkers1 = new HashMap<String, ConditionChecker>();
- checkers1.put(checker1.getId(), checker1);
- String expr1 = checker1.getId() + ".eval()";
- Condition triggerCond = new Condition(checkers1, expr1);
- Condition expireCond = new Condition(checkers1, expr1);
- List<TriggerAction> actions = new ArrayList<TriggerAction>();
- TriggerAction action = new ExecuteFlowAction("executeAction", 1, projName, flowName, "azkaban", new ExecutionOptions(), null);
- actions.add(action);
- Trigger t = new Trigger(now.getMillis(), now.getMillis(), "azkaban", source, triggerCond, expireCond, actions);
- return t;
- }
-
- public static class CountHandler implements ResultSetHandler<Integer> {
- @Override
- public Integer handle(ResultSet rs) throws SQLException {
- int val = 0;
- while (rs.next()) {
- val++;
- }
-
- return val;
- }
- }
-
+ private static boolean testDBExists = false;
+ // @TODO remove this and turn into local host.
+ private static final String host = "localhost";
+ private static final int port = 3306;
+ private static final String database = "azkaban2";
+ private static final String user = "azkaban";
+ private static final String password = "azkaban";
+ private static final int numConnections = 10;
+
+ private TriggerLoader loader;
+ private CheckerTypeLoader checkerLoader;
+ private ActionTypeLoader actionLoader;
+
+ @Before
+ public void setup() throws TriggerException {
+ Props props = new Props();
+ props.put("database.type", "mysql");
+
+ props.put("mysql.host", host);
+ props.put("mysql.port", port);
+ props.put("mysql.user", user);
+ props.put("mysql.database", database);
+ props.put("mysql.password", password);
+ props.put("mysql.numconnections", numConnections);
+
+ loader = new JdbcTriggerLoader(props);
+ checkerLoader = new CheckerTypeLoader();
+ checkerLoader.init(new Props());
+ Condition.setCheckerLoader(checkerLoader);
+ actionLoader = new ActionTypeLoader();
+ actionLoader.init(new Props());
+ Trigger.setActionTypeLoader(actionLoader);
+ setupDB();
+ }
+
+ public void setupDB() {
+ DataSource dataSource =
+ DataSourceUtils.getMySQLDataSource(host, port, database, user,
+ password, numConnections);
+ testDBExists = true;
+
+ Connection connection = null;
+ try {
+ connection = dataSource.getConnection();
+ } catch (SQLException e) {
+ e.printStackTrace();
+ testDBExists = false;
+ DbUtils.closeQuietly(connection);
+ return;
+ }
+
+ CountHandler countHandler = new CountHandler();
+ QueryRunner runner = new QueryRunner();
+ try {
+ runner.query(connection, "SELECT COUNT(1) FROM triggers", countHandler);
+ } catch (SQLException e) {
+ e.printStackTrace();
+ testDBExists = false;
+ DbUtils.closeQuietly(connection);
+ return;
+ }
+
+ DbUtils.closeQuietly(connection);
+
+ clearDB();
+ }
+
+ @After
+ public void clearDB() {
+ if (!testDBExists) {
+ return;
+ }
+
+ DataSource dataSource =
+ DataSourceUtils.getMySQLDataSource(host, port, database, user,
+ password, numConnections);
+ Connection connection = null;
+ try {
+ connection = dataSource.getConnection();
+ } catch (SQLException e) {
+ e.printStackTrace();
+ testDBExists = false;
+ DbUtils.closeQuietly(connection);
+ return;
+ }
+
+ QueryRunner runner = new QueryRunner();
+ try {
+ runner.update(connection, "DELETE FROM triggers");
+
+ } catch (SQLException e) {
+ e.printStackTrace();
+ testDBExists = false;
+ DbUtils.closeQuietly(connection);
+ return;
+ }
+
+ DbUtils.closeQuietly(connection);
+ }
+
+ @Test
+ public void addTriggerTest() throws TriggerLoaderException {
+ Trigger t1 = createTrigger("testProj1", "testFlow1", "source1");
+ Trigger t2 = createTrigger("testProj2", "testFlow2", "source2");
+ loader.addTrigger(t1);
+ List<Trigger> ts = loader.loadTriggers();
+ assertTrue(ts.size() == 1);
+
+ Trigger t3 = ts.get(0);
+ assertTrue(t3.getSource().equals("source1"));
+
+ loader.addTrigger(t2);
+ ts = loader.loadTriggers();
+ assertTrue(ts.size() == 2);
+
+ for (Trigger t : ts) {
+ if (t.getTriggerId() == t2.getTriggerId()) {
+ t.getSource().equals(t2.getSource());
+ }
+ }
+ }
+
+ @Test
+ public void removeTriggerTest() throws TriggerLoaderException {
+ Trigger t1 = createTrigger("testProj1", "testFlow1", "source1");
+ Trigger t2 = createTrigger("testProj2", "testFlow2", "source2");
+ loader.addTrigger(t1);
+ loader.addTrigger(t2);
+ List<Trigger> ts = loader.loadTriggers();
+ assertTrue(ts.size() == 2);
+ loader.removeTrigger(t2);
+ ts = loader.loadTriggers();
+ assertTrue(ts.size() == 1);
+ assertTrue(ts.get(0).getTriggerId() == t1.getTriggerId());
+ }
+
+ @Test
+ public void updateTriggerTest() throws TriggerLoaderException {
+ Trigger t1 = createTrigger("testProj1", "testFlow1", "source1");
+ t1.setResetOnExpire(true);
+ loader.addTrigger(t1);
+ List<Trigger> ts = loader.loadTriggers();
+ assertTrue(ts.get(0).isResetOnExpire() == true);
+ t1.setResetOnExpire(false);
+ loader.updateTrigger(t1);
+ ts = loader.loadTriggers();
+ assertTrue(ts.get(0).isResetOnExpire() == false);
+ }
+
+ private Trigger createTrigger(String projName, String flowName, String source) {
+ DateTime now = DateTime.now();
+ ConditionChecker checker1 =
+ new BasicTimeChecker("timeChecker1", now.getMillis(), now.getZone(),
+ true, true, Utils.parsePeriodString("1h"));
+ Map<String, ConditionChecker> checkers1 =
+ new HashMap<String, ConditionChecker>();
+ checkers1.put(checker1.getId(), checker1);
+ String expr1 = checker1.getId() + ".eval()";
+ Condition triggerCond = new Condition(checkers1, expr1);
+ Condition expireCond = new Condition(checkers1, expr1);
+ List<TriggerAction> actions = new ArrayList<TriggerAction>();
+ TriggerAction action =
+ new ExecuteFlowAction("executeAction", 1, projName, flowName,
+ "azkaban", new ExecutionOptions(), null);
+ actions.add(action);
+ Trigger t =
+ new Trigger(now.getMillis(), now.getMillis(), "azkaban", source,
+ triggerCond, expireCond, actions);
+ return t;
+ }
+
+ public static class CountHandler implements ResultSetHandler<Integer> {
+ @Override
+ public Integer handle(ResultSet rs) throws SQLException {
+ int val = 0;
+ while (rs.next()) {
+ val++;
+ }
+
+ return val;
+ }
+ }
+
}
diff --git a/unit/java/azkaban/test/trigger/MockTriggerLoader.java b/unit/java/azkaban/test/trigger/MockTriggerLoader.java
index 67ef5c7..0f657f8 100644
--- a/unit/java/azkaban/test/trigger/MockTriggerLoader.java
+++ b/unit/java/azkaban/test/trigger/MockTriggerLoader.java
@@ -11,43 +11,47 @@ import azkaban.trigger.TriggerLoaderException;
public class MockTriggerLoader implements TriggerLoader {
- Map<Integer, Trigger> triggers = new HashMap<Integer, Trigger>();
- int triggerCount = 0;
-
- @Override
- public synchronized void addTrigger(Trigger t) throws TriggerLoaderException {
- t.setTriggerId(triggerCount);
- t.setLastModifyTime(System.currentTimeMillis());
- triggers.put(t.getTriggerId(), t);
- triggerCount++;
- }
-
- @Override
- public synchronized void removeTrigger(Trigger s) throws TriggerLoaderException {
- triggers.remove(s);
- }
-
- @Override
- public synchronized void updateTrigger(Trigger t) throws TriggerLoaderException {
- t.setLastModifyTime(System.currentTimeMillis());
- triggers.put(t.getTriggerId(), t);
- }
-
- @Override
- public synchronized List<Trigger> loadTriggers() throws TriggerLoaderException {
- return new ArrayList<Trigger>(triggers.values());
- }
-
- @Override
- public synchronized Trigger loadTrigger(int triggerId) throws TriggerLoaderException {
- return triggers.get(triggerId);
- }
-
- @Override
- public List<Trigger> getUpdatedTriggers(long lastUpdateTime)
- throws TriggerLoaderException {
- // TODO Auto-generated method stub
- return null;
- }
+ Map<Integer, Trigger> triggers = new HashMap<Integer, Trigger>();
+ int triggerCount = 0;
+
+ @Override
+ public synchronized void addTrigger(Trigger t) throws TriggerLoaderException {
+ t.setTriggerId(triggerCount);
+ t.setLastModifyTime(System.currentTimeMillis());
+ triggers.put(t.getTriggerId(), t);
+ triggerCount++;
+ }
+
+ @Override
+ public synchronized void removeTrigger(Trigger s)
+ throws TriggerLoaderException {
+ triggers.remove(s);
+ }
+
+ @Override
+ public synchronized void updateTrigger(Trigger t)
+ throws TriggerLoaderException {
+ t.setLastModifyTime(System.currentTimeMillis());
+ triggers.put(t.getTriggerId(), t);
+ }
+
+ @Override
+ public synchronized List<Trigger> loadTriggers()
+ throws TriggerLoaderException {
+ return new ArrayList<Trigger>(triggers.values());
+ }
+
+ @Override
+ public synchronized Trigger loadTrigger(int triggerId)
+ throws TriggerLoaderException {
+ return triggers.get(triggerId);
+ }
+
+ @Override
+ public List<Trigger> getUpdatedTriggers(long lastUpdateTime)
+ throws TriggerLoaderException {
+ // TODO Auto-generated method stub
+ return null;
+ }
}
unit/java/azkaban/test/trigger/ThresholdChecker.java 183(+91 -92)
diff --git a/unit/java/azkaban/test/trigger/ThresholdChecker.java b/unit/java/azkaban/test/trigger/ThresholdChecker.java
index 3368eae..1ecceb9 100644
--- a/unit/java/azkaban/test/trigger/ThresholdChecker.java
+++ b/unit/java/azkaban/test/trigger/ThresholdChecker.java
@@ -4,99 +4,98 @@ import java.util.Map;
import azkaban.trigger.ConditionChecker;
+public class ThresholdChecker implements ConditionChecker {
-public class ThresholdChecker implements ConditionChecker{
-
- private int threshold = -1;
-
- private static int curVal = -1;
-
- public static final String type = "ThresholdChecker";
-
- private String id;
-
- private boolean checkerMet = false;
- private boolean checkerReset = false;
-
- public ThresholdChecker(String id, int threshold){
- this.id = id;
- this.threshold = threshold;
- }
-
- public synchronized static void setVal(int val) {
- curVal = val;
- }
-
- @Override
- public Boolean eval() {
- if(curVal > threshold) {
- checkerMet = true;
- }
- return checkerMet;
- }
-
- public boolean isCheckerMet() {
- return checkerMet;
- }
-
- @Override
- public void reset() {
- checkerMet = false;
- checkerReset = true;
- }
-
- public boolean isCheckerReset() {
- return checkerReset;
- }
-
- /*
- * TimeChecker format:
- * type_first-time-in-millis_next-time-in-millis_timezone_is-recurring_skip-past-checks_period
- */
- @Override
- public String getId() {
- return id;
- }
-
- @Override
- public String getType() {
- return type;
- }
-
- @Override
- public ConditionChecker fromJson(Object obj) {
- return null;
- }
-
- @Override
- public Object getNum() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public Object toJson() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public void stopChecker() {
- return;
-
- }
-
- @Override
- public void setContext(Map<String, Object> context) {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public long getNextCheckTime() {
- // TODO Auto-generated method stub
- return 0;
- }
+ private int threshold = -1;
+ private static int curVal = -1;
+
+ public static final String type = "ThresholdChecker";
+
+ private String id;
+
+ private boolean checkerMet = false;
+ private boolean checkerReset = false;
+
+ public ThresholdChecker(String id, int threshold) {
+ this.id = id;
+ this.threshold = threshold;
+ }
+
+ public synchronized static void setVal(int val) {
+ curVal = val;
+ }
+
+ @Override
+ public Boolean eval() {
+ if (curVal > threshold) {
+ checkerMet = true;
+ }
+ return checkerMet;
+ }
+
+ public boolean isCheckerMet() {
+ return checkerMet;
+ }
+
+ @Override
+ public void reset() {
+ checkerMet = false;
+ checkerReset = true;
+ }
+
+ public boolean isCheckerReset() {
+ return checkerReset;
+ }
+
+ /*
+ * TimeChecker format:
+ * type_first-time-in-millis_next-time-in-millis_timezone_is
+ * -recurring_skip-past-checks_period
+ */
+ @Override
+ public String getId() {
+ return id;
+ }
+
+ @Override
+ public String getType() {
+ return type;
+ }
+
+ @Override
+ public ConditionChecker fromJson(Object obj) {
+ return null;
+ }
+
+ @Override
+ public Object getNum() {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public Object toJson() {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public void stopChecker() {
+ return;
+
+ }
+
+ @Override
+ public void setContext(Map<String, Object> context) {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public long getNextCheckTime() {
+ // TODO Auto-generated method stub
+ return 0;
+ }
}
diff --git a/unit/java/azkaban/test/trigger/TriggerManagerDeadlockTest.java b/unit/java/azkaban/test/trigger/TriggerManagerDeadlockTest.java
index a8a8b0e..902aeda 100644
--- a/unit/java/azkaban/test/trigger/TriggerManagerDeadlockTest.java
+++ b/unit/java/azkaban/test/trigger/TriggerManagerDeadlockTest.java
@@ -26,158 +26,171 @@ import azkaban.trigger.builtin.CreateTriggerAction;
import azkaban.utils.Props;
public class TriggerManagerDeadlockTest {
-
- TriggerLoader loader;
- TriggerManager triggerManager;
- ExecutorLoader execLoader;
-
- @Before
- public void setup() throws ExecutorManagerException, TriggerManagerException {
- loader = new MockTriggerLoader();
- Props props = new Props();
- props.put("trigger.scan.interval", 1000);
- props.put("executor.port", 12321);
- execLoader = new MockExecutorLoader();
- Map<String, Alerter> alerters = new HashMap<String, Alerter>();
- ExecutorManager executorManager = new ExecutorManager(props, execLoader, alerters);
- triggerManager = new TriggerManager(props, loader, executorManager);
- }
-
- @After
- public void tearDown() {
-
- }
-
- @Test
- public void deadlockTest() throws TriggerLoaderException, TriggerManagerException {
- // this should well saturate it
- for(int i = 0; i < 1000; i++) {
- Trigger t = createSelfRegenTrigger();
- loader.addTrigger(t);
- }
- // keep going and add more
- for(int i = 0; i < 10000; i++) {
- Trigger d = createDummyTrigger();
- triggerManager.insertTrigger(d);
- triggerManager.removeTrigger(d);
- }
-
- System.out.println("No dead lock.");
- }
-
- public class AlwaysOnChecker implements ConditionChecker {
-
- public static final String type = "AlwaysOnChecker";
-
- private final String id;
- private final Boolean alwaysOn;
-
- public AlwaysOnChecker(String id, Boolean alwaysOn) {
- this.id = id;
- this.alwaysOn = alwaysOn;
- }
-
- @Override
- public Object eval() {
- // TODO Auto-generated method stub
- return alwaysOn;
- }
-
- @Override
- public Object getNum() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public void reset() {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public String getId() {
- return id;
- }
-
- @Override
- public String getType() {
- // TODO Auto-generated method stub
- return type;
- }
-
- @Override
- public ConditionChecker fromJson(Object obj) throws Exception {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public Object toJson() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public void stopChecker() {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void setContext(Map<String, Object> context) {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public long getNextCheckTime() {
- // TODO Auto-generated method stub
- return 0;
- }
-
- }
-
- private Trigger createSelfRegenTrigger() {
- ConditionChecker alwaysOnChecker = new AlwaysOnChecker("alwaysOn", Boolean.TRUE);
- String triggerExpr = alwaysOnChecker.getId() + ".eval()";
- Map<String, ConditionChecker> triggerCheckers = new HashMap<String, ConditionChecker>();
- triggerCheckers.put(alwaysOnChecker.getId(), alwaysOnChecker);
- Condition triggerCond = new Condition(triggerCheckers, triggerExpr);
-
- TriggerAction triggerAct = new CreateTriggerAction("dummyTrigger", createDummyTrigger());
- List<TriggerAction> actions = new ArrayList<TriggerAction>();
- actions.add(triggerAct);
-
- ConditionChecker alwaysOffChecker = new AlwaysOnChecker("alwaysOff", Boolean.FALSE);
- String expireExpr = alwaysOffChecker.getId() + ".eval()";
- Map<String, ConditionChecker> expireCheckers = new HashMap<String, ConditionChecker>();
- expireCheckers.put(alwaysOffChecker.getId(), alwaysOffChecker);
- Condition expireCond = new Condition(expireCheckers, expireExpr);
-
- Trigger t = new Trigger("azkaban", "azkabanTest", triggerCond, expireCond, actions);
- return t;
- }
-
- private Trigger createDummyTrigger() {
- ConditionChecker alwaysOnChecker = new AlwaysOnChecker("alwaysOn", Boolean.TRUE);
- String triggerExpr = alwaysOnChecker.getId() + ".eval()";
- Map<String, ConditionChecker> triggerCheckers = new HashMap<String, ConditionChecker>();
- triggerCheckers.put(alwaysOnChecker.getId(), alwaysOnChecker);
- Condition triggerCond = new Condition(triggerCheckers, triggerExpr);
-
- TriggerAction triggerAct = new DummyTriggerAction("howdy!");
- List<TriggerAction> actions = new ArrayList<TriggerAction>();
- actions.add(triggerAct);
-
- ConditionChecker alwaysOffChecker = new AlwaysOnChecker("alwaysOff", Boolean.FALSE);
- String expireExpr = alwaysOffChecker.getId() + ".eval()";
- Map<String, ConditionChecker> expireCheckers = new HashMap<String, ConditionChecker>();
- expireCheckers.put(alwaysOffChecker.getId(), alwaysOffChecker);
- Condition expireCond = new Condition(expireCheckers, expireExpr);
-
- Trigger t = new Trigger("azkaban", "azkabanTest", triggerCond, expireCond, actions);
- return t;
- }
-
+
+ TriggerLoader loader;
+ TriggerManager triggerManager;
+ ExecutorLoader execLoader;
+
+ @Before
+ public void setup() throws ExecutorManagerException, TriggerManagerException {
+ loader = new MockTriggerLoader();
+ Props props = new Props();
+ props.put("trigger.scan.interval", 1000);
+ props.put("executor.port", 12321);
+ execLoader = new MockExecutorLoader();
+ Map<String, Alerter> alerters = new HashMap<String, Alerter>();
+ ExecutorManager executorManager =
+ new ExecutorManager(props, execLoader, alerters);
+ triggerManager = new TriggerManager(props, loader, executorManager);
+ }
+
+ @After
+ public void tearDown() {
+
+ }
+
+ @Test
+ public void deadlockTest() throws TriggerLoaderException,
+ TriggerManagerException {
+ // this should well saturate it
+ for (int i = 0; i < 1000; i++) {
+ Trigger t = createSelfRegenTrigger();
+ loader.addTrigger(t);
+ }
+ // keep going and add more
+ for (int i = 0; i < 10000; i++) {
+ Trigger d = createDummyTrigger();
+ triggerManager.insertTrigger(d);
+ triggerManager.removeTrigger(d);
+ }
+
+ System.out.println("No dead lock.");
+ }
+
+ public class AlwaysOnChecker implements ConditionChecker {
+
+ public static final String type = "AlwaysOnChecker";
+
+ private final String id;
+ private final Boolean alwaysOn;
+
+ public AlwaysOnChecker(String id, Boolean alwaysOn) {
+ this.id = id;
+ this.alwaysOn = alwaysOn;
+ }
+
+ @Override
+ public Object eval() {
+ // TODO Auto-generated method stub
+ return alwaysOn;
+ }
+
+ @Override
+ public Object getNum() {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public void reset() {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public String getId() {
+ return id;
+ }
+
+ @Override
+ public String getType() {
+ // TODO Auto-generated method stub
+ return type;
+ }
+
+ @Override
+ public ConditionChecker fromJson(Object obj) throws Exception {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public Object toJson() {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public void stopChecker() {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void setContext(Map<String, Object> context) {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public long getNextCheckTime() {
+ // TODO Auto-generated method stub
+ return 0;
+ }
+
+ }
+
+ private Trigger createSelfRegenTrigger() {
+ ConditionChecker alwaysOnChecker =
+ new AlwaysOnChecker("alwaysOn", Boolean.TRUE);
+ String triggerExpr = alwaysOnChecker.getId() + ".eval()";
+ Map<String, ConditionChecker> triggerCheckers =
+ new HashMap<String, ConditionChecker>();
+ triggerCheckers.put(alwaysOnChecker.getId(), alwaysOnChecker);
+ Condition triggerCond = new Condition(triggerCheckers, triggerExpr);
+
+ TriggerAction triggerAct =
+ new CreateTriggerAction("dummyTrigger", createDummyTrigger());
+ List<TriggerAction> actions = new ArrayList<TriggerAction>();
+ actions.add(triggerAct);
+
+ ConditionChecker alwaysOffChecker =
+ new AlwaysOnChecker("alwaysOff", Boolean.FALSE);
+ String expireExpr = alwaysOffChecker.getId() + ".eval()";
+ Map<String, ConditionChecker> expireCheckers =
+ new HashMap<String, ConditionChecker>();
+ expireCheckers.put(alwaysOffChecker.getId(), alwaysOffChecker);
+ Condition expireCond = new Condition(expireCheckers, expireExpr);
+
+ Trigger t =
+ new Trigger("azkaban", "azkabanTest", triggerCond, expireCond, actions);
+ return t;
+ }
+
+ private Trigger createDummyTrigger() {
+ ConditionChecker alwaysOnChecker =
+ new AlwaysOnChecker("alwaysOn", Boolean.TRUE);
+ String triggerExpr = alwaysOnChecker.getId() + ".eval()";
+ Map<String, ConditionChecker> triggerCheckers =
+ new HashMap<String, ConditionChecker>();
+ triggerCheckers.put(alwaysOnChecker.getId(), alwaysOnChecker);
+ Condition triggerCond = new Condition(triggerCheckers, triggerExpr);
+
+ TriggerAction triggerAct = new DummyTriggerAction("howdy!");
+ List<TriggerAction> actions = new ArrayList<TriggerAction>();
+ actions.add(triggerAct);
+
+ ConditionChecker alwaysOffChecker =
+ new AlwaysOnChecker("alwaysOff", Boolean.FALSE);
+ String expireExpr = alwaysOffChecker.getId() + ".eval()";
+ Map<String, ConditionChecker> expireCheckers =
+ new HashMap<String, ConditionChecker>();
+ expireCheckers.put(alwaysOffChecker.getId(), alwaysOffChecker);
+ Condition expireCond = new Condition(expireCheckers, expireExpr);
+
+ Trigger t =
+ new Trigger("azkaban", "azkabanTest", triggerCond, expireCond, actions);
+ return t;
+ }
+
}
unit/java/azkaban/test/trigger/TriggerManagerTest.java 334(+173 -161)
diff --git a/unit/java/azkaban/test/trigger/TriggerManagerTest.java b/unit/java/azkaban/test/trigger/TriggerManagerTest.java
index dfd2da5..e5fc466 100644
--- a/unit/java/azkaban/test/trigger/TriggerManagerTest.java
+++ b/unit/java/azkaban/test/trigger/TriggerManagerTest.java
@@ -24,166 +24,178 @@ import azkaban.trigger.TriggerManagerException;
import azkaban.utils.Props;
public class TriggerManagerTest {
-
- private TriggerLoader triggerLoader;
-
- @Before
- public void setup() throws TriggerException, TriggerManagerException {
- triggerLoader = new MockTriggerLoader();
-
- }
-
- @After
- public void tearDown() {
-
- }
-
- @Test
- public void TriggerManagerSimpleTest() throws TriggerManagerException {
-
-
- Props props = new Props();
- props.put("trigger.scan.interval", 4000);
- TriggerManager triggerManager = new TriggerManager(props, triggerLoader, null);
-
- triggerManager.registerCheckerType(ThresholdChecker.type, ThresholdChecker.class);
- triggerManager.registerActionType(DummyTriggerAction.type, DummyTriggerAction.class);
-
- ThresholdChecker.setVal(1);
-
- triggerManager.insertTrigger(createDummyTrigger("test1", "triggerLoader", 10), "testUser");
- List<Trigger> triggers = triggerManager.getTriggers();
- assertTrue(triggers.size() == 1);
- Trigger t1 = triggers.get(0);
- t1.setResetOnTrigger(false);
- triggerManager.updateTrigger(t1, "testUser");
- ThresholdChecker checker1 = (ThresholdChecker) t1.getTriggerCondition().getCheckers().values().toArray()[0];
- assertTrue(t1.getSource().equals("triggerLoader"));
-
- Trigger t2 = createDummyTrigger("test2: add new trigger", "addNewTriggerTest", 20);
- triggerManager.insertTrigger(t2, "testUser");
- ThresholdChecker checker2 = (ThresholdChecker) t2.getTriggerCondition().getCheckers().values().toArray()[0];
-
- ThresholdChecker.setVal(15);
- try {
- Thread.sleep(2000);
- } catch (InterruptedException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
-
- assertTrue(checker1.isCheckerMet() == false);
- assertTrue(checker2.isCheckerMet() == false);
- assertTrue(checker1.isCheckerReset() == false);
- assertTrue(checker2.isCheckerReset() == false);
-
- try {
- Thread.sleep(2000);
- } catch (InterruptedException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
-
- assertTrue(checker1.isCheckerMet() == true);
- assertTrue(checker2.isCheckerMet() == false);
- assertTrue(checker1.isCheckerReset() == false);
- assertTrue(checker2.isCheckerReset() == false);
-
- ThresholdChecker.setVal(25);
- try {
- Thread.sleep(4000);
- } catch (InterruptedException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
-
- assertTrue(checker1.isCheckerMet() == true);
- assertTrue(checker1.isCheckerReset() == false);
- assertTrue(checker2.isCheckerReset() == true);
-
- triggers = triggerManager.getTriggers();
- assertTrue(triggers.size() == 1);
-
- }
-
- public class MockTriggerLoader implements TriggerLoader {
-
- private Map<Integer, Trigger> triggers = new HashMap<Integer, Trigger>();
- private int idIndex = 0;
-
- @Override
- public void addTrigger(Trigger t) throws TriggerLoaderException {
- t.setTriggerId(idIndex++);
- triggers.put(t.getTriggerId(), t);
- }
-
- @Override
- public void removeTrigger(Trigger s) throws TriggerLoaderException {
- triggers.remove(s.getTriggerId());
-
- }
-
- @Override
- public void updateTrigger(Trigger t) throws TriggerLoaderException {
- triggers.put(t.getTriggerId(), t);
- }
-
- @Override
- public List<Trigger> loadTriggers() {
- return new ArrayList<Trigger>(triggers.values());
- }
-
- @Override
- public Trigger loadTrigger(int triggerId)
- throws TriggerLoaderException {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public List<Trigger> getUpdatedTriggers(long lastUpdateTime)
- throws TriggerLoaderException {
- // TODO Auto-generated method stub
- return null;
- }
-
- }
-
- private Trigger createDummyTrigger(String message, String source, int threshold) {
-
- Map<String, ConditionChecker> checkers = new HashMap<String, ConditionChecker>();
- ConditionChecker checker = new ThresholdChecker(ThresholdChecker.type, threshold);
- checkers.put(checker.getId(), checker);
-
- List<TriggerAction> actions = new ArrayList<TriggerAction>();
- TriggerAction act = new DummyTriggerAction(message);
- actions.add(act);
-
- String expr = checker.getId() + ".eval()";
-
- Condition triggerCond = new Condition(checkers, expr);
- Condition expireCond = new Condition(checkers, expr);
-
- Trigger fakeTrigger = new Trigger(DateTime.now().getMillis(), DateTime.now().getMillis(), "azkaban", source, triggerCond, expireCond, actions);
- fakeTrigger.setResetOnTrigger(true);
- fakeTrigger.setResetOnExpire(true);
-
- return fakeTrigger;
- }
-
-// public class MockCheckerLoader extends CheckerTypeLoader{
-//
-// @Override
-// public void init(Props props) {
-// checkerToClass.put(ThresholdChecker.type, ThresholdChecker.class);
-// }
-// }
-//
-// public class MockActionLoader extends ActionTypeLoader {
-// @Override
-// public void init(Props props) {
-// actionToClass.put(DummyTriggerAction.type, DummyTriggerAction.class);
-// }
-// }
+
+ private TriggerLoader triggerLoader;
+
+ @Before
+ public void setup() throws TriggerException, TriggerManagerException {
+ triggerLoader = new MockTriggerLoader();
+
+ }
+
+ @After
+ public void tearDown() {
+
+ }
+
+ @Test
+ public void TriggerManagerSimpleTest() throws TriggerManagerException {
+
+ Props props = new Props();
+ props.put("trigger.scan.interval", 4000);
+ TriggerManager triggerManager =
+ new TriggerManager(props, triggerLoader, null);
+
+ triggerManager.registerCheckerType(ThresholdChecker.type,
+ ThresholdChecker.class);
+ triggerManager.registerActionType(DummyTriggerAction.type,
+ DummyTriggerAction.class);
+
+ ThresholdChecker.setVal(1);
+
+ triggerManager.insertTrigger(
+ createDummyTrigger("test1", "triggerLoader", 10), "testUser");
+ List<Trigger> triggers = triggerManager.getTriggers();
+ assertTrue(triggers.size() == 1);
+ Trigger t1 = triggers.get(0);
+ t1.setResetOnTrigger(false);
+ triggerManager.updateTrigger(t1, "testUser");
+ ThresholdChecker checker1 =
+ (ThresholdChecker) t1.getTriggerCondition().getCheckers().values()
+ .toArray()[0];
+ assertTrue(t1.getSource().equals("triggerLoader"));
+
+ Trigger t2 =
+ createDummyTrigger("test2: add new trigger", "addNewTriggerTest", 20);
+ triggerManager.insertTrigger(t2, "testUser");
+ ThresholdChecker checker2 =
+ (ThresholdChecker) t2.getTriggerCondition().getCheckers().values()
+ .toArray()[0];
+
+ ThresholdChecker.setVal(15);
+ try {
+ Thread.sleep(2000);
+ } catch (InterruptedException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+
+ assertTrue(checker1.isCheckerMet() == false);
+ assertTrue(checker2.isCheckerMet() == false);
+ assertTrue(checker1.isCheckerReset() == false);
+ assertTrue(checker2.isCheckerReset() == false);
+
+ try {
+ Thread.sleep(2000);
+ } catch (InterruptedException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+
+ assertTrue(checker1.isCheckerMet() == true);
+ assertTrue(checker2.isCheckerMet() == false);
+ assertTrue(checker1.isCheckerReset() == false);
+ assertTrue(checker2.isCheckerReset() == false);
+
+ ThresholdChecker.setVal(25);
+ try {
+ Thread.sleep(4000);
+ } catch (InterruptedException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+
+ assertTrue(checker1.isCheckerMet() == true);
+ assertTrue(checker1.isCheckerReset() == false);
+ assertTrue(checker2.isCheckerReset() == true);
+
+ triggers = triggerManager.getTriggers();
+ assertTrue(triggers.size() == 1);
+
+ }
+
+ public class MockTriggerLoader implements TriggerLoader {
+
+ private Map<Integer, Trigger> triggers = new HashMap<Integer, Trigger>();
+ private int idIndex = 0;
+
+ @Override
+ public void addTrigger(Trigger t) throws TriggerLoaderException {
+ t.setTriggerId(idIndex++);
+ triggers.put(t.getTriggerId(), t);
+ }
+
+ @Override
+ public void removeTrigger(Trigger s) throws TriggerLoaderException {
+ triggers.remove(s.getTriggerId());
+
+ }
+
+ @Override
+ public void updateTrigger(Trigger t) throws TriggerLoaderException {
+ triggers.put(t.getTriggerId(), t);
+ }
+
+ @Override
+ public List<Trigger> loadTriggers() {
+ return new ArrayList<Trigger>(triggers.values());
+ }
+
+ @Override
+ public Trigger loadTrigger(int triggerId) throws TriggerLoaderException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public List<Trigger> getUpdatedTriggers(long lastUpdateTime)
+ throws TriggerLoaderException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ }
+
+ private Trigger createDummyTrigger(String message, String source,
+ int threshold) {
+
+ Map<String, ConditionChecker> checkers =
+ new HashMap<String, ConditionChecker>();
+ ConditionChecker checker =
+ new ThresholdChecker(ThresholdChecker.type, threshold);
+ checkers.put(checker.getId(), checker);
+
+ List<TriggerAction> actions = new ArrayList<TriggerAction>();
+ TriggerAction act = new DummyTriggerAction(message);
+ actions.add(act);
+
+ String expr = checker.getId() + ".eval()";
+
+ Condition triggerCond = new Condition(checkers, expr);
+ Condition expireCond = new Condition(checkers, expr);
+
+ Trigger fakeTrigger =
+ new Trigger(DateTime.now().getMillis(), DateTime.now().getMillis(),
+ "azkaban", source, triggerCond, expireCond, actions);
+ fakeTrigger.setResetOnTrigger(true);
+ fakeTrigger.setResetOnExpire(true);
+
+ return fakeTrigger;
+ }
+
+ // public class MockCheckerLoader extends CheckerTypeLoader{
+ //
+ // @Override
+ // public void init(Props props) {
+ // checkerToClass.put(ThresholdChecker.type, ThresholdChecker.class);
+ // }
+ // }
+ //
+ // public class MockActionLoader extends ActionTypeLoader {
+ // @Override
+ // public void init(Props props) {
+ // actionToClass.put(DummyTriggerAction.type, DummyTriggerAction.class);
+ // }
+ // }
}
unit/java/azkaban/test/trigger/TriggerTest.java 85(+46 -39)
diff --git a/unit/java/azkaban/test/trigger/TriggerTest.java b/unit/java/azkaban/test/trigger/TriggerTest.java
index 2b79266..0155c4c 100644
--- a/unit/java/azkaban/test/trigger/TriggerTest.java
+++ b/unit/java/azkaban/test/trigger/TriggerTest.java
@@ -27,44 +27,51 @@ import azkaban.utils.Props;
import azkaban.utils.Utils;
public class TriggerTest {
-
- private CheckerTypeLoader checkerLoader;
- private ActionTypeLoader actionLoader;
-
- @Before
- public void setup() throws TriggerException {
- checkerLoader = new CheckerTypeLoader();
- checkerLoader.init(new Props());
- Condition.setCheckerLoader(checkerLoader);
- actionLoader = new ActionTypeLoader();
- actionLoader.init(new Props());
- Trigger.setActionTypeLoader(actionLoader);
- }
-
- @Test
- public void jsonConversionTest() throws Exception {
- DateTime now = DateTime.now();
- ConditionChecker checker1 = new BasicTimeChecker("timeChecker1", now.getMillis(), now.getZone(), true, true, Utils.parsePeriodString("1h"));
- Map<String, ConditionChecker> checkers1 = new HashMap<String, ConditionChecker>();
- checkers1.put(checker1.getId(), checker1);
- String expr1 = checker1.getId() + ".eval()";
- Condition triggerCond = new Condition(checkers1, expr1);
- Condition expireCond = new Condition(checkers1, expr1);
- List<TriggerAction> actions = new ArrayList<TriggerAction>();
- TriggerAction action = new ExecuteFlowAction("executeAction", 1, "testProj", "testFlow", "azkaban", new ExecutionOptions(), null);
- actions.add(action);
- Trigger t = new Trigger(now.getMillis(), now.getMillis(), "azkaban", "test", triggerCond, expireCond, actions);
-
- File temp = File.createTempFile("temptest", "temptest");
- temp.deleteOnExit();
- Object obj = t.toJson();
- JSONUtils.toJSON(obj, temp);
-
- Trigger t2 = Trigger.fromJson(JSONUtils.parseJSONFromFile(temp));
-
- assertTrue(t.getSource().equals(t2.getSource()));
- assertTrue(t.getTriggerId() == t2.getTriggerId());
-
- }
+
+ private CheckerTypeLoader checkerLoader;
+ private ActionTypeLoader actionLoader;
+
+ @Before
+ public void setup() throws TriggerException {
+ checkerLoader = new CheckerTypeLoader();
+ checkerLoader.init(new Props());
+ Condition.setCheckerLoader(checkerLoader);
+ actionLoader = new ActionTypeLoader();
+ actionLoader.init(new Props());
+ Trigger.setActionTypeLoader(actionLoader);
+ }
+
+ @Test
+ public void jsonConversionTest() throws Exception {
+ DateTime now = DateTime.now();
+ ConditionChecker checker1 =
+ new BasicTimeChecker("timeChecker1", now.getMillis(), now.getZone(),
+ true, true, Utils.parsePeriodString("1h"));
+ Map<String, ConditionChecker> checkers1 =
+ new HashMap<String, ConditionChecker>();
+ checkers1.put(checker1.getId(), checker1);
+ String expr1 = checker1.getId() + ".eval()";
+ Condition triggerCond = new Condition(checkers1, expr1);
+ Condition expireCond = new Condition(checkers1, expr1);
+ List<TriggerAction> actions = new ArrayList<TriggerAction>();
+ TriggerAction action =
+ new ExecuteFlowAction("executeAction", 1, "testProj", "testFlow",
+ "azkaban", new ExecutionOptions(), null);
+ actions.add(action);
+ Trigger t =
+ new Trigger(now.getMillis(), now.getMillis(), "azkaban", "test",
+ triggerCond, expireCond, actions);
+
+ File temp = File.createTempFile("temptest", "temptest");
+ temp.deleteOnExit();
+ Object obj = t.toJson();
+ JSONUtils.toJSON(obj, temp);
+
+ Trigger t2 = Trigger.fromJson(JSONUtils.parseJSONFromFile(temp));
+
+ assertTrue(t.getSource().equals(t2.getSource()));
+ assertTrue(t.getTriggerId() == t2.getTriggerId());
+
+ }
}
unit/java/azkaban/test/user/PermissionTest.java 186(+94 -92)
diff --git a/unit/java/azkaban/test/user/PermissionTest.java b/unit/java/azkaban/test/user/PermissionTest.java
index d5f0675..d122e32 100644
--- a/unit/java/azkaban/test/user/PermissionTest.java
+++ b/unit/java/azkaban/test/user/PermissionTest.java
@@ -10,96 +10,98 @@ import azkaban.user.Permission;
import azkaban.user.Permission.Type;
public class PermissionTest {
- @Before
- public void setUp() throws Exception {
- }
-
- @After
- public void tearDown() throws Exception {
- }
-
- @Test
- public void testEmptyPermissionCreation() throws Exception {
- Permission permission = new Permission();
- permission.addPermissionsByName(new String[]{});
- }
-
- @Test
- public void testSinglePermissionCreation() throws Exception {
- Permission perm1 = new Permission();
- perm1.addPermissionsByName("READ");
-
- Permission perm2 = new Permission();
- perm2.addPermission(Type.READ);
- info("Compare " + perm1.toString() + " and " + perm2.toString());
- assertTrue(perm1.equals(perm2));
- }
-
- @Test
- public void testListPermissionCreation() throws Exception {
- Permission perm1 = new Permission();
- perm1.addPermissionsByName(new String[]{"READ", "EXECUTE"});
-
- Permission perm2 = new Permission();
- perm2.addPermission(new Type[]{Type.EXECUTE, Type.READ});
- info("Compare " + perm1.toString() + " and " + perm2.toString());
- assertTrue(perm1.equals(perm2));
- }
-
- @Test
- public void testRemovePermission() throws Exception {
- Permission perm1 = new Permission();
- perm1.addPermissionsByName(new String[]{"READ", "EXECUTE", "WRITE"});
- perm1.removePermissions(Type.EXECUTE);
-
- Permission perm2 = new Permission();
- perm2.addPermission(new Type[]{Type.READ, Type.WRITE});
- info("Compare " + perm1.toString() + " and " + perm2.toString());
- assertTrue(perm1.equals(perm2));
- }
-
- @Test
- public void testRemovePermissionByName() throws Exception {
- Permission perm1 = new Permission();
- perm1.addPermissionsByName(new String[]{"READ", "EXECUTE", "WRITE"});
- perm1.removePermissionsByName("EXECUTE");
-
- Permission perm2 = new Permission();
- perm2.addPermission(new Type[]{Type.READ, Type.WRITE});
- info("Compare " + perm1.toString() + " and " + perm2.toString());
- assertTrue(perm1.equals(perm2));
- }
-
- @Test
- public void testToAndFromObject() throws Exception {
- Permission permission = new Permission();
- permission.addPermissionsByName(new String[]{"READ", "EXECUTE", "WRITE"});
-
- String[] array = permission.toStringArray();
- Permission permission2 = new Permission();
- permission2.addPermissionsByName(array);
- assertTrue(permission.equals(permission2));
- }
-
- @Test
- public void testFlags() throws Exception {
- Permission permission = new Permission();
- permission.addPermission(new Type[]{Type.READ, Type.WRITE});
-
- int flags = permission.toFlags();
- Permission permission2 = new Permission(flags);
-
- assertTrue(permission2.isPermissionSet(Type.READ));
- assertTrue(permission2.isPermissionSet(Type.WRITE));
-
- assertTrue(permission.equals(permission2));
- }
-
- /**
- * Why? because it's quicker.
- * @param message
- */
- public void info(String message) {
- System.out.println(message);
- }
+ @Before
+ public void setUp() throws Exception {
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ }
+
+ @Test
+ public void testEmptyPermissionCreation() throws Exception {
+ Permission permission = new Permission();
+ permission.addPermissionsByName(new String[] {});
+ }
+
+ @Test
+ public void testSinglePermissionCreation() throws Exception {
+ Permission perm1 = new Permission();
+ perm1.addPermissionsByName("READ");
+
+ Permission perm2 = new Permission();
+ perm2.addPermission(Type.READ);
+ info("Compare " + perm1.toString() + " and " + perm2.toString());
+ assertTrue(perm1.equals(perm2));
+ }
+
+ @Test
+ public void testListPermissionCreation() throws Exception {
+ Permission perm1 = new Permission();
+ perm1.addPermissionsByName(new String[] { "READ", "EXECUTE" });
+
+ Permission perm2 = new Permission();
+ perm2.addPermission(new Type[] { Type.EXECUTE, Type.READ });
+ info("Compare " + perm1.toString() + " and " + perm2.toString());
+ assertTrue(perm1.equals(perm2));
+ }
+
+ @Test
+ public void testRemovePermission() throws Exception {
+ Permission perm1 = new Permission();
+ perm1.addPermissionsByName(new String[] { "READ", "EXECUTE", "WRITE" });
+ perm1.removePermissions(Type.EXECUTE);
+
+ Permission perm2 = new Permission();
+ perm2.addPermission(new Type[] { Type.READ, Type.WRITE });
+ info("Compare " + perm1.toString() + " and " + perm2.toString());
+ assertTrue(perm1.equals(perm2));
+ }
+
+ @Test
+ public void testRemovePermissionByName() throws Exception {
+ Permission perm1 = new Permission();
+ perm1.addPermissionsByName(new String[] { "READ", "EXECUTE", "WRITE" });
+ perm1.removePermissionsByName("EXECUTE");
+
+ Permission perm2 = new Permission();
+ perm2.addPermission(new Type[] { Type.READ, Type.WRITE });
+ info("Compare " + perm1.toString() + " and " + perm2.toString());
+ assertTrue(perm1.equals(perm2));
+ }
+
+ @Test
+ public void testToAndFromObject() throws Exception {
+ Permission permission = new Permission();
+ permission
+ .addPermissionsByName(new String[] { "READ", "EXECUTE", "WRITE" });
+
+ String[] array = permission.toStringArray();
+ Permission permission2 = new Permission();
+ permission2.addPermissionsByName(array);
+ assertTrue(permission.equals(permission2));
+ }
+
+ @Test
+ public void testFlags() throws Exception {
+ Permission permission = new Permission();
+ permission.addPermission(new Type[] { Type.READ, Type.WRITE });
+
+ int flags = permission.toFlags();
+ Permission permission2 = new Permission(flags);
+
+ assertTrue(permission2.isPermissionSet(Type.READ));
+ assertTrue(permission2.isPermissionSet(Type.WRITE));
+
+ assertTrue(permission.equals(permission2));
+ }
+
+ /**
+ * Why? because it's quicker.
+ *
+ * @param message
+ */
+ public void info(String message) {
+ System.out.println(message);
+ }
}
\ No newline at end of file
unit/java/azkaban/test/user/XmlUserManagerTest.java 365(+188 -177)
diff --git a/unit/java/azkaban/test/user/XmlUserManagerTest.java b/unit/java/azkaban/test/user/XmlUserManagerTest.java
index 26a9ce9..2dcebf2 100644
--- a/unit/java/azkaban/test/user/XmlUserManagerTest.java
+++ b/unit/java/azkaban/test/user/XmlUserManagerTest.java
@@ -16,188 +16,199 @@ import azkaban.utils.Props;
import azkaban.utils.UndefinedPropertyException;
public class XmlUserManagerTest {
- private Props baseProps = new Props();
-
- @Before
- public void setUp() throws Exception {
+ private Props baseProps = new Props();
+
+ @Before
+ public void setUp() throws Exception {
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ }
+
+ /**
+ * Testing for when the xml path isn't set in properties.
+ *
+ * @throws Exception
+ */
+ @Test
+ public void testFilePropNotSet() throws Exception {
+ Props props = new Props(baseProps);
+
+ // Should throw
+ try {
+ @SuppressWarnings("unused")
+ XmlUserManager manager = new XmlUserManager(props);
+ } catch (UndefinedPropertyException e) {
+ return;
}
-
- @After
- public void tearDown() throws Exception {
+
+ fail("XmlUserManager should throw an exception when the file property isn't set");
+ }
+
+ /**
+ * Testing for when the xml path doesn't exist.
+ *
+ * @throws Exception
+ */
+ @Test
+ public void testDoNotExist() throws Exception {
+ Props props = new Props(baseProps);
+ props.put(XmlUserManager.XML_FILE_PARAM, "unit/test-conf/doNotExist.xml");
+
+ try {
+ @SuppressWarnings("unused")
+ UserManager manager = new XmlUserManager(props);
+ } catch (RuntimeException e) {
+ return;
}
-
- /**
- * Testing for when the xml path isn't set in properties.
- * @throws Exception
- */
- @Test
- public void testFilePropNotSet() throws Exception {
- Props props = new Props(baseProps);
-
- // Should throw
- try {
- @SuppressWarnings("unused")
- XmlUserManager manager = new XmlUserManager(props);
- } catch (UndefinedPropertyException e) {
- return;
- }
-
- fail("XmlUserManager should throw an exception when the file property isn't set");
+
+ fail("XmlUserManager should throw an exception when the file doesn't exist");
+ }
+
+ @Test
+ public void testBasicLoad() throws Exception {
+ Props props = new Props(baseProps);
+ props.put(XmlUserManager.XML_FILE_PARAM,
+ "unit/test-conf/azkaban-users-test1.xml");
+
+ UserManager manager = null;
+ try {
+ manager = new XmlUserManager(props);
+ } catch (RuntimeException e) {
+ e.printStackTrace();
+ fail("XmlUserManager should've found file azkaban-users.xml");
+ }
+
+ try {
+ manager.getUser("user0", null);
+ } catch (UserManagerException e) {
+ System.out.println("Exception handled correctly: " + e.getMessage());
}
-
- /**
- * Testing for when the xml path doesn't exist.
- * @throws Exception
- */
- @Test
- public void testDoNotExist() throws Exception {
- Props props = new Props(baseProps);
- props.put(XmlUserManager.XML_FILE_PARAM, "unit/test-conf/doNotExist.xml");
-
- try {
- @SuppressWarnings("unused")
- UserManager manager = new XmlUserManager(props);
- } catch (RuntimeException e) {
- return;
- }
-
- fail("XmlUserManager should throw an exception when the file doesn't exist");
+
+ try {
+ manager.getUser(null, "etw");
+ } catch (UserManagerException e) {
+ System.out.println("Exception handled correctly: " + e.getMessage());
}
-
- @Test
- public void testBasicLoad() throws Exception {
- Props props = new Props(baseProps);
- props.put(XmlUserManager.XML_FILE_PARAM, "unit/test-conf/azkaban-users-test1.xml");
-
- UserManager manager = null;
- try {
- manager = new XmlUserManager(props);
- } catch (RuntimeException e) {
- e.printStackTrace();
- fail("XmlUserManager should've found file azkaban-users.xml");
- }
-
- try {
- manager.getUser("user0", null);
- } catch (UserManagerException e) {
- System.out.println("Exception handled correctly: " + e.getMessage());
- }
-
- try {
- manager.getUser(null, "etw");
- } catch (UserManagerException e) {
- System.out.println("Exception handled correctly: " + e.getMessage());
- }
-
- try {
- manager.getUser("user0", "user0");
- } catch (UserManagerException e) {
- System.out.println("Exception handled correctly: " + e.getMessage());
- }
-
- try {
- manager.getUser("user0", "password0");
- } catch (UserManagerException e) {
- e.printStackTrace();
- fail("XmlUserManager should've returned a user.");
- }
-
- User user0 = manager.getUser("user0", "password0");
- checkUser(user0, "role0","group0");
-
- User user1 = manager.getUser("user1", "password1");
- checkUser(user1, "role0,role1", "group1,group2");
-
- User user2 = manager.getUser("user2", "password2");
- checkUser(user2, "role0,role1,role2", "group1,group2,group3");
-
- User user3 = manager.getUser("user3", "password3");
- checkUser(user3, "role1,role2", "group1,group2");
-
- User user4 = manager.getUser("user4", "password4");
- checkUser(user4, "role1,role2", "group1,group2");
-
- User user5 = manager.getUser("user5", "password5");
- checkUser(user5, "role1,role2", "group1,group2");
-
- User user6 = manager.getUser("user6", "password6");
- checkUser(user6, "role3,role2", "group1,group2");
-
- User user7 = manager.getUser("user7", "password7");
- checkUser(user7, "", "group1");
-
- User user8 = manager.getUser("user8", "password8");
- checkUser(user8, "role3", "");
-
- User user9 = manager.getUser("user9", "password9");
- checkUser(user9, "", "");
+
+ try {
+ manager.getUser("user0", "user0");
+ } catch (UserManagerException e) {
+ System.out.println("Exception handled correctly: " + e.getMessage());
}
-
- private void checkUser(User user, String rolesStr, String groupsStr) {
- // Validating roles
- HashSet<String> roleSet = new HashSet<String>(user.getRoles());
- if (rolesStr.isEmpty()) {
- if (!roleSet.isEmpty()) {
- String outputRoleStr = "";
- for (String role: roleSet) {
- outputRoleStr += role + ",";
- }
- throw new RuntimeException("Roles mismatch for " + user.getUserId() + ". Expected roles to be empty but got " + outputRoleStr);
- }
- }
- else {
- String outputRoleStr = "";
- for (String role: roleSet) {
- outputRoleStr += role + ",";
- }
-
- String[] splitRoles = rolesStr.split(",");
- HashSet<String> expectedRoles = new HashSet<String>();
- for (String role: splitRoles) {
- if (!roleSet.contains(role)) {
- throw new RuntimeException("Roles mismatch for user " + user.getUserId() + " role " + role + ". Expected roles to " + rolesStr + " but got " + outputRoleStr);
- }
- expectedRoles.add(role);
- }
-
- for (String role: roleSet) {
- if (!expectedRoles.contains(role)) {
- throw new RuntimeException("Roles mismatch for user " + user.getUserId() + " role " + role + ". Expected roles to " + rolesStr + " but got " + outputRoleStr);
- }
- }
- }
-
- HashSet<String> groupSet = new HashSet<String>(user.getGroups());
- if (groupsStr.isEmpty()) {
- if (!groupSet.isEmpty()) {
- String outputGroupStr = "";
- for (String role: roleSet) {
- outputGroupStr += role + ",";
- }
- throw new RuntimeException("Roles mismatch for " + user.getUserId() + ". Expected roles to be empty but got " + outputGroupStr);
- }
- }
- else {
- String outputGroupStr = "";
- for (String group: groupSet) {
- outputGroupStr += group + ",";
- }
-
- String[] splitGroups = groupsStr.split(",");
- HashSet<String> expectedGroups = new HashSet<String>();
- for (String group: splitGroups) {
- if (!groupSet.contains(group)) {
- throw new RuntimeException("Groups mismatch for user " + user.getUserId() + " group " + group + ". Expected groups to " + groupsStr + " but got " + outputGroupStr);
- }
- expectedGroups.add(group);
- }
-
- for (String group: groupSet) {
- if (!expectedGroups.contains(group)) {
- throw new RuntimeException("Groups mismatch for user " + user.getUserId() + " group " + group + ". Expected groups to " + groupsStr + " but got " + outputGroupStr);
- }
- }
- }
+ try {
+ manager.getUser("user0", "password0");
+ } catch (UserManagerException e) {
+ e.printStackTrace();
+ fail("XmlUserManager should've returned a user.");
}
+
+ User user0 = manager.getUser("user0", "password0");
+ checkUser(user0, "role0", "group0");
+
+ User user1 = manager.getUser("user1", "password1");
+ checkUser(user1, "role0,role1", "group1,group2");
+
+ User user2 = manager.getUser("user2", "password2");
+ checkUser(user2, "role0,role1,role2", "group1,group2,group3");
+
+ User user3 = manager.getUser("user3", "password3");
+ checkUser(user3, "role1,role2", "group1,group2");
+
+ User user4 = manager.getUser("user4", "password4");
+ checkUser(user4, "role1,role2", "group1,group2");
+
+ User user5 = manager.getUser("user5", "password5");
+ checkUser(user5, "role1,role2", "group1,group2");
+
+ User user6 = manager.getUser("user6", "password6");
+ checkUser(user6, "role3,role2", "group1,group2");
+
+ User user7 = manager.getUser("user7", "password7");
+ checkUser(user7, "", "group1");
+
+ User user8 = manager.getUser("user8", "password8");
+ checkUser(user8, "role3", "");
+
+ User user9 = manager.getUser("user9", "password9");
+ checkUser(user9, "", "");
+ }
+
+ private void checkUser(User user, String rolesStr, String groupsStr) {
+ // Validating roles
+ HashSet<String> roleSet = new HashSet<String>(user.getRoles());
+ if (rolesStr.isEmpty()) {
+ if (!roleSet.isEmpty()) {
+ String outputRoleStr = "";
+ for (String role : roleSet) {
+ outputRoleStr += role + ",";
+ }
+ throw new RuntimeException("Roles mismatch for " + user.getUserId()
+ + ". Expected roles to be empty but got " + outputRoleStr);
+ }
+ } else {
+ String outputRoleStr = "";
+ for (String role : roleSet) {
+ outputRoleStr += role + ",";
+ }
+
+ String[] splitRoles = rolesStr.split(",");
+ HashSet<String> expectedRoles = new HashSet<String>();
+ for (String role : splitRoles) {
+ if (!roleSet.contains(role)) {
+ throw new RuntimeException("Roles mismatch for user "
+ + user.getUserId() + " role " + role + ". Expected roles to "
+ + rolesStr + " but got " + outputRoleStr);
+ }
+ expectedRoles.add(role);
+ }
+
+ for (String role : roleSet) {
+ if (!expectedRoles.contains(role)) {
+ throw new RuntimeException("Roles mismatch for user "
+ + user.getUserId() + " role " + role + ". Expected roles to "
+ + rolesStr + " but got " + outputRoleStr);
+ }
+ }
+ }
+
+ HashSet<String> groupSet = new HashSet<String>(user.getGroups());
+ if (groupsStr.isEmpty()) {
+ if (!groupSet.isEmpty()) {
+ String outputGroupStr = "";
+ for (String role : roleSet) {
+ outputGroupStr += role + ",";
+ }
+ throw new RuntimeException("Roles mismatch for " + user.getUserId()
+ + ". Expected roles to be empty but got " + outputGroupStr);
+ }
+ } else {
+ String outputGroupStr = "";
+ for (String group : groupSet) {
+ outputGroupStr += group + ",";
+ }
+
+ String[] splitGroups = groupsStr.split(",");
+ HashSet<String> expectedGroups = new HashSet<String>();
+ for (String group : splitGroups) {
+ if (!groupSet.contains(group)) {
+ throw new RuntimeException("Groups mismatch for user "
+ + user.getUserId() + " group " + group + ". Expected groups to "
+ + groupsStr + " but got " + outputGroupStr);
+ }
+ expectedGroups.add(group);
+ }
+
+ for (String group : groupSet) {
+ if (!expectedGroups.contains(group)) {
+ throw new RuntimeException("Groups mismatch for user "
+ + user.getUserId() + " group " + group + ". Expected groups to "
+ + groupsStr + " but got " + outputGroupStr);
+ }
+ }
+ }
+
+ }
}
unit/java/azkaban/test/utils/cache/CacheTest.java 268(+134 -134)
diff --git a/unit/java/azkaban/test/utils/cache/CacheTest.java b/unit/java/azkaban/test/utils/cache/CacheTest.java
index 3fcc0f9..9f33795 100644
--- a/unit/java/azkaban/test/utils/cache/CacheTest.java
+++ b/unit/java/azkaban/test/utils/cache/CacheTest.java
@@ -8,138 +8,138 @@ import azkaban.utils.cache.Cache.EjectionPolicy;
import azkaban.utils.cache.CacheManager;
public class CacheTest {
- @Test
- public void testLRU() {
- CacheManager manager = CacheManager.getInstance();
- Cache cache = manager.createCache();
- cache.setEjectionPolicy(EjectionPolicy.LRU);
- cache.setMaxCacheSize(4);
-
- cache.insertElement("key1", "val1");
- cache.insertElement("key2", "val2");
- cache.insertElement("key3", "val3");
- cache.insertElement("key4", "val4");
-
- Assert.assertEquals(cache.get("key2"), "val2");
- Assert.assertEquals(cache.get("key3"), "val3");
- Assert.assertEquals(cache.get("key4"), "val4");
- Assert.assertEquals(cache.get("key1"), "val1");
- Assert.assertEquals(4, cache.getSize());
-
- cache.insertElement("key5", "val5");
- Assert.assertEquals(4, cache.getSize());
- Assert.assertEquals(cache.get("key3"), "val3");
- Assert.assertEquals(cache.get("key4"), "val4");
- Assert.assertEquals(cache.get("key1"), "val1");
- Assert.assertEquals(cache.get("key5"), "val5");
- Assert.assertNull(cache.get("key2"));
- }
-
- @Test
- public void testFIFO() {
- CacheManager manager = CacheManager.getInstance();
- Cache cache = manager.createCache();
- cache.setEjectionPolicy(EjectionPolicy.FIFO);
- cache.setMaxCacheSize(4);
-
- cache.insertElement("key1", "val1");
- synchronized (this) {
- try {
- wait(10);
- } catch (InterruptedException e) {
- }
- }
- cache.insertElement("key2", "val2");
- cache.insertElement("key3", "val3");
- cache.insertElement("key4", "val4");
-
- Assert.assertEquals(cache.get("key2"), "val2");
- Assert.assertEquals(cache.get("key3"), "val3");
- Assert.assertEquals(cache.get("key4"), "val4");
- Assert.assertEquals(cache.get("key1"), "val1");
- Assert.assertEquals(4, cache.getSize());
-
- cache.insertElement("key5", "val5");
- Assert.assertEquals(4, cache.getSize());
- Assert.assertEquals(cache.get("key3"), "val3");
- Assert.assertEquals(cache.get("key4"), "val4");
- Assert.assertEquals(cache.get("key2"), "val2");
- Assert.assertEquals(cache.get("key5"), "val5");
- Assert.assertNull(cache.get("key1"));
- }
-
- @Test
- public void testTimeToLiveExpiry() {
- CacheManager.setUpdateFrequency(200);
- CacheManager manager = CacheManager.getInstance();
- Cache cache = manager.createCache();
-
- cache.setUpdateFrequencyMs(200);
- cache.setEjectionPolicy(EjectionPolicy.FIFO);
- cache.setExpiryTimeToLiveMs(4500);
- cache.insertElement("key1", "val1");
-
- synchronized (this) {
- try {
- wait(1000);
- } catch (InterruptedException e) {
- }
- }
- Assert.assertEquals(cache.get("key1"), "val1");
- cache.insertElement("key2", "val2");
- synchronized (this) {
- try {
- wait(4000);
- } catch (InterruptedException e) {
- }
- }
- Assert.assertNull(cache.get("key1"));
- Assert.assertEquals("val2", cache.get("key2"));
-
- synchronized (this) {
- try {
- wait(1000);
- } catch (InterruptedException e) {
- }
- }
-
- Assert.assertNull(cache.get("key2"));
- }
-
- @Test
- public void testIdleExpireExpiry() {
- CacheManager.setUpdateFrequency(250);
- CacheManager manager = CacheManager.getInstance();
- Cache cache = manager.createCache();
-
- cache.setUpdateFrequencyMs(250);
- cache.setEjectionPolicy(EjectionPolicy.FIFO);
- cache.setExpiryIdleTimeMs(4500);
- cache.insertElement("key1", "val1");
- cache.insertElement("key3", "val3");
- synchronized (this) {
- try {
- wait(1000);
- } catch (InterruptedException e) {
- }
- }
- Assert.assertEquals(cache.get("key1"), "val1");
- cache.insertElement("key2", "val2");
- synchronized (this) {
- try {
- wait(4000);
- } catch (InterruptedException e) {
- }
- }
- Assert.assertEquals("val1", cache.get("key1"));
- Assert.assertNull(cache.get("key3"));
- synchronized (this) {
- try {
- wait(1000);
- } catch (InterruptedException e) {
- }
- }
-
- Assert.assertNull(cache.get("key2"));
- }
+ @Test
+ public void testLRU() {
+ CacheManager manager = CacheManager.getInstance();
+ Cache cache = manager.createCache();
+ cache.setEjectionPolicy(EjectionPolicy.LRU);
+ cache.setMaxCacheSize(4);
+
+ cache.insertElement("key1", "val1");
+ cache.insertElement("key2", "val2");
+ cache.insertElement("key3", "val3");
+ cache.insertElement("key4", "val4");
+
+ Assert.assertEquals(cache.get("key2"), "val2");
+ Assert.assertEquals(cache.get("key3"), "val3");
+ Assert.assertEquals(cache.get("key4"), "val4");
+ Assert.assertEquals(cache.get("key1"), "val1");
+ Assert.assertEquals(4, cache.getSize());
+
+ cache.insertElement("key5", "val5");
+ Assert.assertEquals(4, cache.getSize());
+ Assert.assertEquals(cache.get("key3"), "val3");
+ Assert.assertEquals(cache.get("key4"), "val4");
+ Assert.assertEquals(cache.get("key1"), "val1");
+ Assert.assertEquals(cache.get("key5"), "val5");
+ Assert.assertNull(cache.get("key2"));
+ }
+
+ @Test
+ public void testFIFO() {
+ CacheManager manager = CacheManager.getInstance();
+ Cache cache = manager.createCache();
+ cache.setEjectionPolicy(EjectionPolicy.FIFO);
+ cache.setMaxCacheSize(4);
+
+ cache.insertElement("key1", "val1");
+ synchronized (this) {
+ try {
+ wait(10);
+ } catch (InterruptedException e) {
+ }
+ }
+ cache.insertElement("key2", "val2");
+ cache.insertElement("key3", "val3");
+ cache.insertElement("key4", "val4");
+
+ Assert.assertEquals(cache.get("key2"), "val2");
+ Assert.assertEquals(cache.get("key3"), "val3");
+ Assert.assertEquals(cache.get("key4"), "val4");
+ Assert.assertEquals(cache.get("key1"), "val1");
+ Assert.assertEquals(4, cache.getSize());
+
+ cache.insertElement("key5", "val5");
+ Assert.assertEquals(4, cache.getSize());
+ Assert.assertEquals(cache.get("key3"), "val3");
+ Assert.assertEquals(cache.get("key4"), "val4");
+ Assert.assertEquals(cache.get("key2"), "val2");
+ Assert.assertEquals(cache.get("key5"), "val5");
+ Assert.assertNull(cache.get("key1"));
+ }
+
+ @Test
+ public void testTimeToLiveExpiry() {
+ CacheManager.setUpdateFrequency(200);
+ CacheManager manager = CacheManager.getInstance();
+ Cache cache = manager.createCache();
+
+ cache.setUpdateFrequencyMs(200);
+ cache.setEjectionPolicy(EjectionPolicy.FIFO);
+ cache.setExpiryTimeToLiveMs(4500);
+ cache.insertElement("key1", "val1");
+
+ synchronized (this) {
+ try {
+ wait(1000);
+ } catch (InterruptedException e) {
+ }
+ }
+ Assert.assertEquals(cache.get("key1"), "val1");
+ cache.insertElement("key2", "val2");
+ synchronized (this) {
+ try {
+ wait(4000);
+ } catch (InterruptedException e) {
+ }
+ }
+ Assert.assertNull(cache.get("key1"));
+ Assert.assertEquals("val2", cache.get("key2"));
+
+ synchronized (this) {
+ try {
+ wait(1000);
+ } catch (InterruptedException e) {
+ }
+ }
+
+ Assert.assertNull(cache.get("key2"));
+ }
+
+ @Test
+ public void testIdleExpireExpiry() {
+ CacheManager.setUpdateFrequency(250);
+ CacheManager manager = CacheManager.getInstance();
+ Cache cache = manager.createCache();
+
+ cache.setUpdateFrequencyMs(250);
+ cache.setEjectionPolicy(EjectionPolicy.FIFO);
+ cache.setExpiryIdleTimeMs(4500);
+ cache.insertElement("key1", "val1");
+ cache.insertElement("key3", "val3");
+ synchronized (this) {
+ try {
+ wait(1000);
+ } catch (InterruptedException e) {
+ }
+ }
+ Assert.assertEquals(cache.get("key1"), "val1");
+ cache.insertElement("key2", "val2");
+ synchronized (this) {
+ try {
+ wait(4000);
+ } catch (InterruptedException e) {
+ }
+ }
+ Assert.assertEquals("val1", cache.get("key1"));
+ Assert.assertNull(cache.get("key3"));
+ synchronized (this) {
+ try {
+ wait(1000);
+ } catch (InterruptedException e) {
+ }
+ }
+
+ Assert.assertNull(cache.get("key2"));
+ }
}
diff --git a/unit/java/azkaban/test/utils/DirectoryFlowLoaderTest.java b/unit/java/azkaban/test/utils/DirectoryFlowLoaderTest.java
index 800df48..538fd44 100644
--- a/unit/java/azkaban/test/utils/DirectoryFlowLoaderTest.java
+++ b/unit/java/azkaban/test/utils/DirectoryFlowLoaderTest.java
@@ -10,35 +10,35 @@ import azkaban.utils.DirectoryFlowLoader;
public class DirectoryFlowLoaderTest {
- @Test
- public void testDirectoryLoad() {
- Logger logger = Logger.getLogger(this.getClass());
- DirectoryFlowLoader loader = new DirectoryFlowLoader(logger);
-
- loader.loadProjectFlow(new File("unit/executions/exectest1"));
- logger.info(loader.getFlowMap().size());
- }
-
- @Test
- public void testLoadEmbeddedFlow() {
- Logger logger = Logger.getLogger(this.getClass());
- DirectoryFlowLoader loader = new DirectoryFlowLoader(logger);
-
- loader.loadProjectFlow(new File("unit/executions/embedded"));
- Assert.assertEquals(0, loader.getErrors().size());
- }
-
- @Test
- public void testRecursiveLoadEmbeddedFlow() {
- Logger logger = Logger.getLogger(this.getClass());
- DirectoryFlowLoader loader = new DirectoryFlowLoader(logger);
-
- loader.loadProjectFlow(new File("unit/executions/embeddedBad"));
- for (String error: loader.getErrors()) {
- System.out.println(error);
- }
-
- // Should be 3 errors: jobe->innerFlow, innerFlow->jobe, innerFlow
- Assert.assertEquals(3, loader.getErrors().size());
- }
+ @Test
+ public void testDirectoryLoad() {
+ Logger logger = Logger.getLogger(this.getClass());
+ DirectoryFlowLoader loader = new DirectoryFlowLoader(logger);
+
+ loader.loadProjectFlow(new File("unit/executions/exectest1"));
+ logger.info(loader.getFlowMap().size());
+ }
+
+ @Test
+ public void testLoadEmbeddedFlow() {
+ Logger logger = Logger.getLogger(this.getClass());
+ DirectoryFlowLoader loader = new DirectoryFlowLoader(logger);
+
+ loader.loadProjectFlow(new File("unit/executions/embedded"));
+ Assert.assertEquals(0, loader.getErrors().size());
+ }
+
+ @Test
+ public void testRecursiveLoadEmbeddedFlow() {
+ Logger logger = Logger.getLogger(this.getClass());
+ DirectoryFlowLoader loader = new DirectoryFlowLoader(logger);
+
+ loader.loadProjectFlow(new File("unit/executions/embeddedBad"));
+ for (String error : loader.getErrors()) {
+ System.out.println(error);
+ }
+
+ // Should be 3 errors: jobe->innerFlow, innerFlow->jobe, innerFlow
+ Assert.assertEquals(3, loader.getErrors().size());
+ }
}
diff --git a/unit/java/azkaban/test/utils/EmailMessageTest.java b/unit/java/azkaban/test/utils/EmailMessageTest.java
index 8ab509e..5c0eac3 100644
--- a/unit/java/azkaban/test/utils/EmailMessageTest.java
+++ b/unit/java/azkaban/test/utils/EmailMessageTest.java
@@ -13,37 +13,38 @@ import azkaban.utils.EmailMessage;
public class EmailMessageTest {
- String host = "";
- String sender = "";
- String user = "";
- String password = "";
-
- String toAddr = "";
-
- private EmailMessage em;
- @Before
- public void setUp() throws Exception {
- em = new EmailMessage(host, user, password);
- em.setFromAddress(sender);
- }
-
- @After
- public void tearDown() throws Exception {
- }
-
- @Ignore
- @Test
- public void testSendEmail() throws IOException {
- em.addToAddress(toAddr);
- //em.addToAddress("cyu@linkedin.com");
- em.setSubject("azkaban test email");
- em.setBody("azkaban test email");
- try {
- em.sendEmail();
- } catch (MessagingException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
- }
-
+ String host = "";
+ String sender = "";
+ String user = "";
+ String password = "";
+
+ String toAddr = "";
+
+ private EmailMessage em;
+
+ @Before
+ public void setUp() throws Exception {
+ em = new EmailMessage(host, user, password);
+ em.setFromAddress(sender);
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ }
+
+ @Ignore
+ @Test
+ public void testSendEmail() throws IOException {
+ em.addToAddress(toAddr);
+ // em.addToAddress("cyu@linkedin.com");
+ em.setSubject("azkaban test email");
+ em.setBody("azkaban test email");
+ try {
+ em.sendEmail();
+ } catch (MessagingException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ }
+
}
unit/java/azkaban/test/utils/FileIOUtilsTest.java 210(+109 -101)
diff --git a/unit/java/azkaban/test/utils/FileIOUtilsTest.java b/unit/java/azkaban/test/utils/FileIOUtilsTest.java
index 3157663..8f3101d 100644
--- a/unit/java/azkaban/test/utils/FileIOUtilsTest.java
+++ b/unit/java/azkaban/test/utils/FileIOUtilsTest.java
@@ -14,104 +14,112 @@ import azkaban.utils.FileIOUtils;
import azkaban.utils.Pair;
public class FileIOUtilsTest {
- File sourceDir = new File("unit/project/testjob");
- File destDir = new File("unit/executions/unixsymlink");
-
- @Before
- public void setUp() throws Exception {
- if (destDir.exists()) {
- FileUtils.deleteDirectory(destDir);
- }
- destDir.mkdirs();
- }
-
- @After
- public void tearDown() throws Exception {
- }
-
- @Test
- public void testSymlinkCopy() throws IOException {
- FileIOUtils.createDeepSymlink(sourceDir, destDir);
- }
-
- @Test
- public void testSymlinkCopyNonSource() {
- boolean exception = false;
- try {
- FileIOUtils.createDeepSymlink(new File(sourceDir, "idonotexist"), destDir);
- } catch (IOException e) {
- System.out.println(e.getMessage());
- System.out.println("Handled this case nicely.");
- exception = true;
- }
-
- Assert.assertTrue(exception);
- }
-
- @Test
- public void testAsciiUTF() throws IOException {
- String foreignText = "abcdefghijklmnopqrstuvwxyz";
- byte[] utf8ByteArray = createUTF8ByteArray(foreignText);
-
- int length = utf8ByteArray.length;
- System.out.println("char length:" + foreignText.length() + " utf8BytesLength:" + utf8ByteArray.length + " for:" + foreignText);
-
- Pair<Integer,Integer> pair = FileIOUtils.getUtf8Range(utf8ByteArray, 1, length - 6);
- System.out.println("Pair :" + pair.toString());
-
- String recreatedString = new String(utf8ByteArray, 1, length - 6, "UTF-8");
- System.out.println("recreatedString:" + recreatedString);
-
- String correctString = new String(utf8ByteArray, pair.getFirst(), pair.getSecond(), "UTF-8");
- System.out.println("correctString:" + correctString);
-
- Assert.assertEquals(pair, new Pair<Integer,Integer>(1, 20));
- // Two characters stripped from this.
- Assert.assertEquals(correctString.length(), foreignText.length() - 6);
-
- }
-
- @Test
- public void testForeignUTF() throws IOException {
- String foreignText = "안녕하세요, 제 이름은 박병호입니다";
- byte[] utf8ByteArray = createUTF8ByteArray(foreignText);
-
- int length = utf8ByteArray.length;
- System.out.println("char length:" + foreignText.length() + " utf8BytesLength:" + utf8ByteArray.length + " for:" + foreignText);
-
- Pair<Integer,Integer> pair = FileIOUtils.getUtf8Range(utf8ByteArray, 1, length - 6);
- System.out.println("Pair :" + pair.toString());
-
- String recreatedString = new String(utf8ByteArray, 1, length - 6, "UTF-8");
- System.out.println("recreatedString:" + recreatedString);
-
- String correctString = new String(utf8ByteArray, pair.getFirst(), pair.getSecond(), "UTF-8");
- System.out.println("correctString:" + correctString);
-
- Assert.assertEquals(pair, new Pair<Integer,Integer>(3, 40));
- // Two characters stripped from this.
- Assert.assertEquals(correctString.length(), foreignText.length() - 3);
-
-
- // Testing mixed bytes
- String mixedText = "abc안녕하세요, 제 이름은 박병호입니다";
- byte[] mixedBytes = createUTF8ByteArray(mixedText);
- Pair<Integer,Integer> pair2 = FileIOUtils.getUtf8Range(mixedBytes, 1, length - 4);
- correctString = new String(mixedBytes, pair2.getFirst(), pair2.getSecond(), "UTF-8");
- System.out.println("correctString:" + correctString);
- Assert.assertEquals(pair2, new Pair<Integer,Integer>(1, 45));
- // Two characters stripped from this.
- Assert.assertEquals(correctString.length(), mixedText.length() - 3);
-
- }
-
- private byte[] createUTF8ByteArray(String text) {
- byte[] textBytes= null;
- try {
- textBytes = text.getBytes("UTF-8");
- } catch (UnsupportedEncodingException e) {
- e.printStackTrace();
- }
- return textBytes;
- }
-}
\ No newline at end of file
+ File sourceDir = new File("unit/project/testjob");
+ File destDir = new File("unit/executions/unixsymlink");
+
+ @Before
+ public void setUp() throws Exception {
+ if (destDir.exists()) {
+ FileUtils.deleteDirectory(destDir);
+ }
+ destDir.mkdirs();
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ }
+
+ @Test
+ public void testSymlinkCopy() throws IOException {
+ FileIOUtils.createDeepSymlink(sourceDir, destDir);
+ }
+
+ @Test
+ public void testSymlinkCopyNonSource() {
+ boolean exception = false;
+ try {
+ FileIOUtils.createDeepSymlink(new File(sourceDir, "idonotexist"), destDir);
+ } catch (IOException e) {
+ System.out.println(e.getMessage());
+ System.out.println("Handled this case nicely.");
+ exception = true;
+ }
+
+ Assert.assertTrue(exception);
+ }
+
+ @Test
+ public void testAsciiUTF() throws IOException {
+ String foreignText = "abcdefghijklmnopqrstuvwxyz";
+ byte[] utf8ByteArray = createUTF8ByteArray(foreignText);
+
+ int length = utf8ByteArray.length;
+ System.out.println("char length:" + foreignText.length() +
+ " utf8BytesLength:" + utf8ByteArray.length + " for:" + foreignText);
+
+ Pair<Integer,Integer> pair = FileIOUtils.getUtf8Range(utf8ByteArray, 1,
+ length - 6);
+ System.out.println("Pair :" + pair.toString());
+
+ String recreatedString = new String(utf8ByteArray, 1, length - 6, "UTF-8");
+ System.out.println("recreatedString:" + recreatedString);
+
+ String correctString = new String(utf8ByteArray, pair.getFirst(),
+ pair.getSecond(), "UTF-8");
+ System.out.println("correctString:" + correctString);
+
+ Assert.assertEquals(pair, new Pair<Integer,Integer>(1, 20));
+ // Two characters stripped from this.
+ Assert.assertEquals(correctString.length(), foreignText.length() - 6);
+
+ }
+
+ @Test
+ public void testForeignUTF() throws IOException {
+ String foreignText = "안녕하세요, 제 이름은 박병호입니다";
+ byte[] utf8ByteArray = createUTF8ByteArray(foreignText);
+
+ int length = utf8ByteArray.length;
+ System.out.println("char length:" + foreignText.length()
+ + " utf8BytesLength:" + utf8ByteArray.length + " for:" + foreignText);
+
+ Pair<Integer,Integer> pair = FileIOUtils.getUtf8Range(utf8ByteArray, 1,
+ length - 6);
+ System.out.println("Pair :" + pair.toString());
+
+ String recreatedString = new String(utf8ByteArray, 1, length - 6, "UTF-8");
+ System.out.println("recreatedString:" + recreatedString);
+
+ String correctString = new String(utf8ByteArray, pair.getFirst(),
+ pair.getSecond(), "UTF-8");
+ System.out.println("correctString:" + correctString);
+
+ Assert.assertEquals(pair, new Pair<Integer,Integer>(3, 40));
+ // Two characters stripped from this.
+ Assert.assertEquals(correctString.length(), foreignText.length() - 3);
+
+
+ // Testing mixed bytes
+ String mixedText = "abc안녕하세요, 제 이름은 박병호입니다";
+ byte[] mixedBytes = createUTF8ByteArray(mixedText);
+ Pair<Integer,Integer> pair2 = FileIOUtils.getUtf8Range(mixedBytes, 1,
+ length - 4);
+ correctString = new String(mixedBytes, pair2.getFirst(), pair2.getSecond(),
+ "UTF-8");
+ System.out.println("correctString:" + correctString);
+ Assert.assertEquals(pair2, new Pair<Integer,Integer>(1, 45));
+ // Two characters stripped from this.
+ Assert.assertEquals(correctString.length(), mixedText.length() - 3);
+
+ }
+
+ private byte[] createUTF8ByteArray(String text) {
+ byte[] textBytes= null;
+ try {
+ textBytes = text.getBytes("UTF-8");
+ } catch (UnsupportedEncodingException e) {
+ e.printStackTrace();
+ }
+ return textBytes;
+ }
+}
unit/java/azkaban/test/utils/JsonUtilsTest.java 89(+46 -43)
diff --git a/unit/java/azkaban/test/utils/JsonUtilsTest.java b/unit/java/azkaban/test/utils/JsonUtilsTest.java
index c12f17c..3f8a992 100644
--- a/unit/java/azkaban/test/utils/JsonUtilsTest.java
+++ b/unit/java/azkaban/test/utils/JsonUtilsTest.java
@@ -11,47 +11,50 @@ import org.junit.Test;
import azkaban.utils.JSONUtils;
public class JsonUtilsTest {
- @Test
- public void writePropsNoJarDependencyTest1() throws IOException {
- Map<String, String> test = new HashMap<String,String>();
- test.put("\"myTest\n\b", "myValue\t\\");
- test.put("normalKey", "Other key");
-
- StringWriter writer = new StringWriter();
- JSONUtils.writePropsNoJarDependency(test, writer);
-
- String jsonStr = writer.toString();
- System.out.println(writer.toString());
-
- @SuppressWarnings("unchecked")
- Map<String,String> result = (Map<String,String>)JSONUtils.parseJSONFromString(jsonStr);
- checkInAndOut(test, result);
- }
-
- @Test
- public void writePropsNoJarDependencyTest2() throws IOException {
- Map<String, String> test = new HashMap<String,String>();
- test.put("\"myTest\n\b", "myValue\t\\");
-
- StringWriter writer = new StringWriter();
- JSONUtils.writePropsNoJarDependency(test, writer);
-
- String jsonStr = writer.toString();
- System.out.println(writer.toString());
-
- @SuppressWarnings("unchecked")
- Map<String,String> result = (Map<String,String>)JSONUtils.parseJSONFromString(jsonStr);
- checkInAndOut(test, result);
- }
-
- private static void checkInAndOut(Map<String, String> before, Map<String, String> after) {
- for (Map.Entry<String, String> entry: before.entrySet()) {
- String key = entry.getKey();
- String value = entry.getValue();
-
- String retValue = after.get(key);
- Assert.assertEquals(value, retValue);
- }
- }
-
+ @Test
+ public void writePropsNoJarDependencyTest1() throws IOException {
+ Map<String, String> test = new HashMap<String, String>();
+ test.put("\"myTest\n\b", "myValue\t\\");
+ test.put("normalKey", "Other key");
+
+ StringWriter writer = new StringWriter();
+ JSONUtils.writePropsNoJarDependency(test, writer);
+
+ String jsonStr = writer.toString();
+ System.out.println(writer.toString());
+
+ @SuppressWarnings("unchecked")
+ Map<String, String> result =
+ (Map<String, String>) JSONUtils.parseJSONFromString(jsonStr);
+ checkInAndOut(test, result);
+ }
+
+ @Test
+ public void writePropsNoJarDependencyTest2() throws IOException {
+ Map<String, String> test = new HashMap<String, String>();
+ test.put("\"myTest\n\b", "myValue\t\\");
+
+ StringWriter writer = new StringWriter();
+ JSONUtils.writePropsNoJarDependency(test, writer);
+
+ String jsonStr = writer.toString();
+ System.out.println(writer.toString());
+
+ @SuppressWarnings("unchecked")
+ Map<String, String> result =
+ (Map<String, String>) JSONUtils.parseJSONFromString(jsonStr);
+ checkInAndOut(test, result);
+ }
+
+ private static void checkInAndOut(Map<String, String> before,
+ Map<String, String> after) {
+ for (Map.Entry<String, String> entry : before.entrySet()) {
+ String key = entry.getKey();
+ String value = entry.getValue();
+
+ String retValue = after.get(key);
+ Assert.assertEquals(value, retValue);
+ }
+ }
+
}
\ No newline at end of file
unit/java/azkaban/test/utils/PropsUtilsTest.java 307(+149 -158)
diff --git a/unit/java/azkaban/test/utils/PropsUtilsTest.java b/unit/java/azkaban/test/utils/PropsUtilsTest.java
index 75bc979..7e8e001 100644
--- a/unit/java/azkaban/test/utils/PropsUtilsTest.java
+++ b/unit/java/azkaban/test/utils/PropsUtilsTest.java
@@ -10,162 +10,153 @@ import azkaban.utils.PropsUtils;
import azkaban.utils.UndefinedPropertyException;
public class PropsUtilsTest {
- @Test
- public void testGoodResolveProps() throws IOException {
- Props propsGrandParent = new Props();
- Props propsParent = new Props(propsGrandParent);
- Props props = new Props(propsParent);
-
- // Testing props in general
- props.put("letter", "a");
- propsParent.put("letter", "b");
- propsGrandParent.put("letter", "c");
-
- Assert.assertEquals("a", props.get("letter"));
- propsParent.put("my", "name");
- propsParent.put("your", "eyes");
- propsGrandParent.put("their", "ears");
- propsGrandParent.put("your", "hair");
-
- Assert.assertEquals("name", props.get("my"));
- Assert.assertEquals("eyes", props.get("your"));
- Assert.assertEquals("ears", props.get("their"));
-
- props.put("res1", "${my}");
- props.put("res2", "${their} ${letter}");
- props.put("res7", "${my} ${res5}");
-
- propsParent.put("res3", "${your} ${their} ${res4}");
- propsGrandParent.put("res4", "${letter}");
- propsGrandParent.put("res5", "${their}");
- propsParent.put("res6", " t ${your} ${your} ${their} ${res5}");
-
- Props resolved = PropsUtils.resolveProps(props);
- Assert.assertEquals("name", resolved.get("res1"));
- Assert.assertEquals("ears a", resolved.get("res2"));
- Assert.assertEquals("eyes ears a", resolved.get("res3"));
- Assert.assertEquals("a", resolved.get("res4"));
- Assert.assertEquals("ears", resolved.get("res5"));
- Assert.assertEquals(" t eyes eyes ears ears", resolved.get("res6"));
- Assert.assertEquals("name ears", resolved.get("res7"));
- }
-
- @Test
- public void testInvalidSyntax() throws Exception {
- Props propsGrandParent = new Props();
- Props propsParent = new Props(propsGrandParent);
- Props props = new Props(propsParent);
-
- propsParent.put("my", "name");
- props.put("res1", "$(my)");
-
- Props resolved = PropsUtils.resolveProps(props);
- Assert.assertEquals("$(my)", resolved.get("res1"));
- }
-
- @Test
- public void testExpressionResolution() throws IOException {
- Props props = Props.of(
- "normkey", "normal",
- "num1", "1",
- "num2", "2",
- "num3", "3",
- "variablereplaced", "${num1}",
- "expression1", "$(1+10)",
- "expression2", "$(1+10)*2",
- "expression3", "$($(${num1} + ${num3})*10)",
- "expression4", "$(${num1} + ${expression3})",
- "expression5", "$($($(2+3)) + 3) + $(${expression3} + 1)",
- "expression6", "$(1 + ${normkey})",
- "expression7", "$(\"${normkey}\" + 1)",
- "expression8", "${expression1}",
- "expression9", "$((2+3) + 3)"
- );
-
- Props resolved = PropsUtils.resolveProps(props);
- Assert.assertEquals("normal", resolved.get("normkey"));
- Assert.assertEquals("1", resolved.get("num1"));
- Assert.assertEquals("2", resolved.get("num2"));
- Assert.assertEquals("3", resolved.get("num3"));
- Assert.assertEquals("1", resolved.get("variablereplaced"));
- Assert.assertEquals("11", resolved.get("expression1"));
- Assert.assertEquals("11*2", resolved.get("expression2"));
- Assert.assertEquals("40", resolved.get("expression3"));
- Assert.assertEquals("41", resolved.get("expression4"));
- Assert.assertEquals("8 + 41", resolved.get("expression5"));
- Assert.assertEquals("1", resolved.get("expression6"));
- Assert.assertEquals("normal1", resolved.get("expression7"));
- Assert.assertEquals("11", resolved.get("expression8"));
- Assert.assertEquals("8", resolved.get("expression9"));
- }
-
- @Test
- public void testMalformedExpressionProps() throws IOException {
- // unclosed
- Props props = Props.of("key", "$(1+2");
- failIfNotException(props);
-
- props = Props.of("key", "$((1+2)");
- failIfNotException(props);
-
- // bad variable replacement
- props = Props.of("key", "$(${dontexist}+2)");
- failIfNotException(props);
-
- // bad expression
- props = Props.of("key", "$(2 +)");
- failIfNotException(props);
-
- // bad expression
- props = Props.of("key", "$(2 + #hello)");
- failIfNotException(props);
- }
-
- @Test
- public void testCyclesResolveProps() throws IOException {
- Props propsGrandParent = new Props();
- Props propsParent = new Props(propsGrandParent);
- Props props = new Props(propsParent);
-
- // Testing props in general
- props.put("a", "${a}");
- failIfNotException(props);
-
- props.put("a", "${b}");
- props.put("b", "${a}");
- failIfNotException(props);
-
- props.clearLocal();
- props.put("a", "${b}");
- props.put("b", "${c}");
- propsParent.put("d", "${a}");
- failIfNotException(props);
-
- props.clearLocal();
- props.put("a", "testing ${b}");
- props.put("b", "${c}");
- propsGrandParent.put("c", "${d}");
- propsParent.put("d", "${a}");
- failIfNotException(props);
-
- props.clearLocal();
- props.put("a", "testing ${c} ${b}");
- props.put("b", "${c} test");
- propsGrandParent.put("c", "${d}");
- propsParent.put("d", "${a}");
- failIfNotException(props);
- }
-
- private void failIfNotException(Props props) {
- try {
- PropsUtils.resolveProps(props);
- Assert.fail();
- }
- catch (UndefinedPropertyException e) {
- e.printStackTrace();
- }
- catch (IllegalArgumentException e) {
- e.printStackTrace();
- }
- }
+ @Test
+ public void testGoodResolveProps() throws IOException {
+ Props propsGrandParent = new Props();
+ Props propsParent = new Props(propsGrandParent);
+ Props props = new Props(propsParent);
+
+ // Testing props in general
+ props.put("letter", "a");
+ propsParent.put("letter", "b");
+ propsGrandParent.put("letter", "c");
+
+ Assert.assertEquals("a", props.get("letter"));
+ propsParent.put("my", "name");
+ propsParent.put("your", "eyes");
+ propsGrandParent.put("their", "ears");
+ propsGrandParent.put("your", "hair");
+
+ Assert.assertEquals("name", props.get("my"));
+ Assert.assertEquals("eyes", props.get("your"));
+ Assert.assertEquals("ears", props.get("their"));
+
+ props.put("res1", "${my}");
+ props.put("res2", "${their} ${letter}");
+ props.put("res7", "${my} ${res5}");
+
+ propsParent.put("res3", "${your} ${their} ${res4}");
+ propsGrandParent.put("res4", "${letter}");
+ propsGrandParent.put("res5", "${their}");
+ propsParent.put("res6", " t ${your} ${your} ${their} ${res5}");
+
+ Props resolved = PropsUtils.resolveProps(props);
+ Assert.assertEquals("name", resolved.get("res1"));
+ Assert.assertEquals("ears a", resolved.get("res2"));
+ Assert.assertEquals("eyes ears a", resolved.get("res3"));
+ Assert.assertEquals("a", resolved.get("res4"));
+ Assert.assertEquals("ears", resolved.get("res5"));
+ Assert.assertEquals(" t eyes eyes ears ears", resolved.get("res6"));
+ Assert.assertEquals("name ears", resolved.get("res7"));
+ }
+
+ @Test
+ public void testInvalidSyntax() throws Exception {
+ Props propsGrandParent = new Props();
+ Props propsParent = new Props(propsGrandParent);
+ Props props = new Props(propsParent);
+
+ propsParent.put("my", "name");
+ props.put("res1", "$(my)");
+
+ Props resolved = PropsUtils.resolveProps(props);
+ Assert.assertEquals("$(my)", resolved.get("res1"));
+ }
+
+ @Test
+ public void testExpressionResolution() throws IOException {
+ Props props =
+ Props.of("normkey", "normal", "num1", "1", "num2", "2", "num3", "3",
+ "variablereplaced", "${num1}", "expression1", "$(1+10)",
+ "expression2", "$(1+10)*2", "expression3",
+ "$($(${num1} + ${num3})*10)", "expression4",
+ "$(${num1} + ${expression3})", "expression5",
+ "$($($(2+3)) + 3) + $(${expression3} + 1)", "expression6",
+ "$(1 + ${normkey})", "expression7", "$(\"${normkey}\" + 1)",
+ "expression8", "${expression1}", "expression9", "$((2+3) + 3)");
+
+ Props resolved = PropsUtils.resolveProps(props);
+ Assert.assertEquals("normal", resolved.get("normkey"));
+ Assert.assertEquals("1", resolved.get("num1"));
+ Assert.assertEquals("2", resolved.get("num2"));
+ Assert.assertEquals("3", resolved.get("num3"));
+ Assert.assertEquals("1", resolved.get("variablereplaced"));
+ Assert.assertEquals("11", resolved.get("expression1"));
+ Assert.assertEquals("11*2", resolved.get("expression2"));
+ Assert.assertEquals("40", resolved.get("expression3"));
+ Assert.assertEquals("41", resolved.get("expression4"));
+ Assert.assertEquals("8 + 41", resolved.get("expression5"));
+ Assert.assertEquals("1", resolved.get("expression6"));
+ Assert.assertEquals("normal1", resolved.get("expression7"));
+ Assert.assertEquals("11", resolved.get("expression8"));
+ Assert.assertEquals("8", resolved.get("expression9"));
+ }
+
+ @Test
+ public void testMalformedExpressionProps() throws IOException {
+ // unclosed
+ Props props = Props.of("key", "$(1+2");
+ failIfNotException(props);
+
+ props = Props.of("key", "$((1+2)");
+ failIfNotException(props);
+
+ // bad variable replacement
+ props = Props.of("key", "$(${dontexist}+2)");
+ failIfNotException(props);
+
+ // bad expression
+ props = Props.of("key", "$(2 +)");
+ failIfNotException(props);
+
+ // bad expression
+ props = Props.of("key", "$(2 + #hello)");
+ failIfNotException(props);
+ }
+
+ @Test
+ public void testCyclesResolveProps() throws IOException {
+ Props propsGrandParent = new Props();
+ Props propsParent = new Props(propsGrandParent);
+ Props props = new Props(propsParent);
+
+ // Testing props in general
+ props.put("a", "${a}");
+ failIfNotException(props);
+
+ props.put("a", "${b}");
+ props.put("b", "${a}");
+ failIfNotException(props);
+
+ props.clearLocal();
+ props.put("a", "${b}");
+ props.put("b", "${c}");
+ propsParent.put("d", "${a}");
+ failIfNotException(props);
+
+ props.clearLocal();
+ props.put("a", "testing ${b}");
+ props.put("b", "${c}");
+ propsGrandParent.put("c", "${d}");
+ propsParent.put("d", "${a}");
+ failIfNotException(props);
+
+ props.clearLocal();
+ props.put("a", "testing ${c} ${b}");
+ props.put("b", "${c} test");
+ propsGrandParent.put("c", "${d}");
+ propsParent.put("d", "${a}");
+ failIfNotException(props);
+ }
+
+ private void failIfNotException(Props props) {
+ try {
+ PropsUtils.resolveProps(props);
+ Assert.fail();
+ } catch (UndefinedPropertyException e) {
+ e.printStackTrace();
+ } catch (IllegalArgumentException e) {
+ e.printStackTrace();
+ }
+ }
}