You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by lu...@apache.org on 2015/01/14 15:15:58 UTC

[23/51] [partial] incubator-kylin git commit: cleanup for migration from github.com

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/a4fd4268/job/.settings/org.eclipse.jdt.core.prefs
----------------------------------------------------------------------
diff --git a/job/.settings/org.eclipse.jdt.core.prefs b/job/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644
index a903301..0000000
--- a/job/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,379 +0,0 @@
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.annotation.inheritNullAnnotations=disabled
-org.eclipse.jdt.core.compiler.annotation.missingNonNullByDefaultAnnotation=ignore
-org.eclipse.jdt.core.compiler.annotation.nonnull=org.eclipse.jdt.annotation.NonNull
-org.eclipse.jdt.core.compiler.annotation.nonnullbydefault=org.eclipse.jdt.annotation.NonNullByDefault
-org.eclipse.jdt.core.compiler.annotation.nullable=org.eclipse.jdt.annotation.Nullable
-org.eclipse.jdt.core.compiler.annotation.nullanalysis=disabled
-org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.7
-org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
-org.eclipse.jdt.core.compiler.compliance=1.7
-org.eclipse.jdt.core.compiler.debug.lineNumber=generate
-org.eclipse.jdt.core.compiler.debug.localVariable=generate
-org.eclipse.jdt.core.compiler.debug.sourceFile=generate
-org.eclipse.jdt.core.compiler.problem.annotationSuperInterface=warning
-org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
-org.eclipse.jdt.core.compiler.problem.autoboxing=ignore
-org.eclipse.jdt.core.compiler.problem.comparingIdentical=warning
-org.eclipse.jdt.core.compiler.problem.deadCode=warning
-org.eclipse.jdt.core.compiler.problem.deprecation=warning
-org.eclipse.jdt.core.compiler.problem.deprecationInDeprecatedCode=disabled
-org.eclipse.jdt.core.compiler.problem.deprecationWhenOverridingDeprecatedMethod=disabled
-org.eclipse.jdt.core.compiler.problem.discouragedReference=warning
-org.eclipse.jdt.core.compiler.problem.emptyStatement=ignore
-org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
-org.eclipse.jdt.core.compiler.problem.explicitlyClosedAutoCloseable=ignore
-org.eclipse.jdt.core.compiler.problem.fallthroughCase=ignore
-org.eclipse.jdt.core.compiler.problem.fatalOptionalError=disabled
-org.eclipse.jdt.core.compiler.problem.fieldHiding=ignore
-org.eclipse.jdt.core.compiler.problem.finalParameterBound=warning
-org.eclipse.jdt.core.compiler.problem.finallyBlockNotCompletingNormally=warning
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.problem.hiddenCatchBlock=warning
-org.eclipse.jdt.core.compiler.problem.includeNullInfoFromAsserts=disabled
-org.eclipse.jdt.core.compiler.problem.incompatibleNonInheritedInterfaceMethod=warning
-org.eclipse.jdt.core.compiler.problem.incompleteEnumSwitch=warning
-org.eclipse.jdt.core.compiler.problem.indirectStaticAccess=ignore
-org.eclipse.jdt.core.compiler.problem.localVariableHiding=ignore
-org.eclipse.jdt.core.compiler.problem.methodWithConstructorName=warning
-org.eclipse.jdt.core.compiler.problem.missingDefaultCase=ignore
-org.eclipse.jdt.core.compiler.problem.missingDeprecatedAnnotation=ignore
-org.eclipse.jdt.core.compiler.problem.missingEnumCaseDespiteDefault=disabled
-org.eclipse.jdt.core.compiler.problem.missingHashCodeMethod=ignore
-org.eclipse.jdt.core.compiler.problem.missingOverrideAnnotation=ignore
-org.eclipse.jdt.core.compiler.problem.missingOverrideAnnotationForInterfaceMethodImplementation=enabled
-org.eclipse.jdt.core.compiler.problem.missingSerialVersion=warning
-org.eclipse.jdt.core.compiler.problem.missingSynchronizedOnInheritedMethod=ignore
-org.eclipse.jdt.core.compiler.problem.noEffectAssignment=warning
-org.eclipse.jdt.core.compiler.problem.noImplicitStringConversion=warning
-org.eclipse.jdt.core.compiler.problem.nonExternalizedStringLiteral=ignore
-org.eclipse.jdt.core.compiler.problem.nonnullParameterAnnotationDropped=warning
-org.eclipse.jdt.core.compiler.problem.nullAnnotationInferenceConflict=error
-org.eclipse.jdt.core.compiler.problem.nullReference=warning
-org.eclipse.jdt.core.compiler.problem.nullSpecViolation=error
-org.eclipse.jdt.core.compiler.problem.nullUncheckedConversion=warning
-org.eclipse.jdt.core.compiler.problem.overridingPackageDefaultMethod=warning
-org.eclipse.jdt.core.compiler.problem.parameterAssignment=ignore
-org.eclipse.jdt.core.compiler.problem.possibleAccidentalBooleanAssignment=ignore
-org.eclipse.jdt.core.compiler.problem.potentialNullReference=ignore
-org.eclipse.jdt.core.compiler.problem.potentiallyUnclosedCloseable=ignore
-org.eclipse.jdt.core.compiler.problem.rawTypeReference=warning
-org.eclipse.jdt.core.compiler.problem.redundantNullAnnotation=warning
-org.eclipse.jdt.core.compiler.problem.redundantNullCheck=ignore
-org.eclipse.jdt.core.compiler.problem.redundantSpecificationOfTypeArguments=ignore
-org.eclipse.jdt.core.compiler.problem.redundantSuperinterface=ignore
-org.eclipse.jdt.core.compiler.problem.reportMethodCanBePotentiallyStatic=ignore
-org.eclipse.jdt.core.compiler.problem.reportMethodCanBeStatic=ignore
-org.eclipse.jdt.core.compiler.problem.specialParameterHidingField=disabled
-org.eclipse.jdt.core.compiler.problem.staticAccessReceiver=warning
-org.eclipse.jdt.core.compiler.problem.suppressOptionalErrors=disabled
-org.eclipse.jdt.core.compiler.problem.suppressWarnings=enabled
-org.eclipse.jdt.core.compiler.problem.syntacticNullAnalysisForFields=disabled
-org.eclipse.jdt.core.compiler.problem.syntheticAccessEmulation=ignore
-org.eclipse.jdt.core.compiler.problem.typeParameterHiding=warning
-org.eclipse.jdt.core.compiler.problem.unavoidableGenericTypeProblems=enabled
-org.eclipse.jdt.core.compiler.problem.uncheckedTypeOperation=warning
-org.eclipse.jdt.core.compiler.problem.unclosedCloseable=warning
-org.eclipse.jdt.core.compiler.problem.undocumentedEmptyBlock=ignore
-org.eclipse.jdt.core.compiler.problem.unhandledWarningToken=warning
-org.eclipse.jdt.core.compiler.problem.unnecessaryElse=ignore
-org.eclipse.jdt.core.compiler.problem.unnecessaryTypeCheck=ignore
-org.eclipse.jdt.core.compiler.problem.unqualifiedFieldAccess=ignore
-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownException=ignore
-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionExemptExceptionAndThrowable=enabled
-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionIncludeDocCommentReference=enabled
-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionWhenOverriding=disabled
-org.eclipse.jdt.core.compiler.problem.unusedImport=warning
-org.eclipse.jdt.core.compiler.problem.unusedLabel=warning
-org.eclipse.jdt.core.compiler.problem.unusedLocal=warning
-org.eclipse.jdt.core.compiler.problem.unusedObjectAllocation=ignore
-org.eclipse.jdt.core.compiler.problem.unusedParameter=ignore
-org.eclipse.jdt.core.compiler.problem.unusedParameterIncludeDocCommentReference=enabled
-org.eclipse.jdt.core.compiler.problem.unusedParameterWhenImplementingAbstract=disabled
-org.eclipse.jdt.core.compiler.problem.unusedParameterWhenOverridingConcrete=disabled
-org.eclipse.jdt.core.compiler.problem.unusedPrivateMember=warning
-org.eclipse.jdt.core.compiler.problem.unusedTypeParameter=ignore
-org.eclipse.jdt.core.compiler.problem.unusedWarningToken=warning
-org.eclipse.jdt.core.compiler.problem.varargsArgumentNeedCast=warning
-org.eclipse.jdt.core.compiler.source=1.7
-org.eclipse.jdt.core.formatter.align_type_members_on_columns=false
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression=16
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation=0
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant=16
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call=16
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation=16
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression=16
-org.eclipse.jdt.core.formatter.alignment_for_assignment=0
-org.eclipse.jdt.core.formatter.alignment_for_binary_expression=16
-org.eclipse.jdt.core.formatter.alignment_for_compact_if=16
-org.eclipse.jdt.core.formatter.alignment_for_conditional_expression=80
-org.eclipse.jdt.core.formatter.alignment_for_enum_constants=0
-org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer=16
-org.eclipse.jdt.core.formatter.alignment_for_method_declaration=0
-org.eclipse.jdt.core.formatter.alignment_for_multiple_fields=16
-org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_resources_in_try=80
-org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation=16
-org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_union_type_in_multicatch=16
-org.eclipse.jdt.core.formatter.blank_lines_after_imports=1
-org.eclipse.jdt.core.formatter.blank_lines_after_package=1
-org.eclipse.jdt.core.formatter.blank_lines_before_field=0
-org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration=0
-org.eclipse.jdt.core.formatter.blank_lines_before_imports=1
-org.eclipse.jdt.core.formatter.blank_lines_before_member_type=1
-org.eclipse.jdt.core.formatter.blank_lines_before_method=1
-org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk=1
-org.eclipse.jdt.core.formatter.blank_lines_before_package=0
-org.eclipse.jdt.core.formatter.blank_lines_between_import_groups=1
-org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations=1
-org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_array_initializer=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_block=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_block_in_case=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_enum_constant=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_method_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_switch=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_type_declaration=end_of_line
-org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment=false
-org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment=false
-org.eclipse.jdt.core.formatter.comment.format_block_comments=false
-org.eclipse.jdt.core.formatter.comment.format_header=false
-org.eclipse.jdt.core.formatter.comment.format_html=true
-org.eclipse.jdt.core.formatter.comment.format_javadoc_comments=false
-org.eclipse.jdt.core.formatter.comment.format_line_comments=false
-org.eclipse.jdt.core.formatter.comment.format_source_code=true
-org.eclipse.jdt.core.formatter.comment.indent_parameter_description=true
-org.eclipse.jdt.core.formatter.comment.indent_root_tags=true
-org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags=insert
-org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter=insert
-org.eclipse.jdt.core.formatter.comment.line_length=80
-org.eclipse.jdt.core.formatter.comment.new_lines_at_block_boundaries=true
-org.eclipse.jdt.core.formatter.comment.new_lines_at_javadoc_boundaries=true
-org.eclipse.jdt.core.formatter.comment.preserve_white_space_between_code_and_line_comments=false
-org.eclipse.jdt.core.formatter.compact_else_if=true
-org.eclipse.jdt.core.formatter.continuation_indentation=2
-org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer=2
-org.eclipse.jdt.core.formatter.disabling_tag=@formatter\:off
-org.eclipse.jdt.core.formatter.enabling_tag=@formatter\:on
-org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line=false
-org.eclipse.jdt.core.formatter.format_line_comment_starting_on_first_column=true
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header=true
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header=true
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header=true
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header=true
-org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases=true
-org.eclipse.jdt.core.formatter.indent_empty_lines=false
-org.eclipse.jdt.core.formatter.indent_statements_compare_to_block=true
-org.eclipse.jdt.core.formatter.indent_statements_compare_to_body=true
-org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases=true
-org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch=false
-org.eclipse.jdt.core.formatter.indentation.size=4
-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_field=insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable=insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_method=insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_package=insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_type=insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_label=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter=insert
-org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_binary_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_ellipsis=insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_try=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_try_resources=insert
-org.eclipse.jdt.core.formatter.insert_space_after_unary_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter=insert
-org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_binary_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_try=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert=insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_ellipsis=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_try=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while=insert
-org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return=insert
-org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw=insert
-org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_semicolon=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_try_resources=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_unary_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.join_lines_in_comments=true
-org.eclipse.jdt.core.formatter.join_wrapped_lines=true
-org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line=false
-org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line=false
-org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line=false
-org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line=false
-org.eclipse.jdt.core.formatter.lineSplit=999
-org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column=false
-org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column=false
-org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body=0
-org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve=1
-org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line=true
-org.eclipse.jdt.core.formatter.tabulation.char=space
-org.eclipse.jdt.core.formatter.tabulation.size=4
-org.eclipse.jdt.core.formatter.use_on_off_tags=false
-org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations=false
-org.eclipse.jdt.core.formatter.wrap_before_binary_operator=true
-org.eclipse.jdt.core.formatter.wrap_before_or_operator_multicatch=true
-org.eclipse.jdt.core.formatter.wrap_outer_expressions_when_nested=true

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/a4fd4268/job/.settings/org.eclipse.jdt.ui.prefs
----------------------------------------------------------------------
diff --git a/job/.settings/org.eclipse.jdt.ui.prefs b/job/.settings/org.eclipse.jdt.ui.prefs
deleted file mode 100644
index dece0e6..0000000
--- a/job/.settings/org.eclipse.jdt.ui.prefs
+++ /dev/null
@@ -1,7 +0,0 @@
-eclipse.preferences.version=1
-formatter_profile=_Space Indent & Long Lines
-formatter_settings_version=12
-org.eclipse.jdt.ui.ignorelowercasenames=true
-org.eclipse.jdt.ui.importorder=java;javax;org;com;
-org.eclipse.jdt.ui.ondemandthreshold=99
-org.eclipse.jdt.ui.staticondemandthreshold=1

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/a4fd4268/job/pom.xml
----------------------------------------------------------------------
diff --git a/job/pom.xml b/job/pom.xml
deleted file mode 100644
index b039957..0000000
--- a/job/pom.xml
+++ /dev/null
@@ -1,196 +0,0 @@
-<?xml version="1.0"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>com.kylinolap</groupId>
-        <artifactId>kylin</artifactId>
-        <version>0.6.3-SNAPSHOT</version>
-    </parent>
-
-    <artifactId>kylin-job</artifactId>
-    <name>Kylin:Job</name>
-    <url>http://maven.apache.org</url>
-    <properties>
-        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-    </properties>
-
-    <dependencies>
-        <!--Kylin Jar -->
-        <dependency>
-            <groupId>com.kylinolap</groupId>
-            <artifactId>kylin-cube</artifactId>
-            <version>${project.parent.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>commons-cli</groupId>
-            <artifactId>commons-cli</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>commons-lang</groupId>
-            <artifactId>commons-lang</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>commons-io</groupId>
-            <artifactId>commons-io</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>commons-configuration</groupId>
-            <artifactId>commons-configuration</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>log4j</groupId>
-            <artifactId>log4j</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>com.fasterxml.jackson.core</groupId>
-            <artifactId>jackson-databind</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>commons-httpclient</groupId>
-            <artifactId>commons-httpclient</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>com.google.guava</groupId>
-            <artifactId>guava</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.quartz-scheduler</groupId>
-            <artifactId>quartz</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.quartz-scheduler</groupId>
-            <artifactId>quartz-jobs</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>commons-daemon</groupId>
-            <artifactId>commons-daemon</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.curator</groupId>
-            <artifactId>curator-framework</artifactId>
-            <exclusions>
-                <exclusion>
-                    <groupId>org.apache.zookeeper</groupId>
-                    <artifactId>zookeeper</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.curator</groupId>
-            <artifactId>curator-recipes</artifactId>
-            <exclusions>
-                <exclusion>
-                    <groupId>org.apache.zookeeper</groupId>
-                    <artifactId>zookeeper</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.commons</groupId>
-            <artifactId>commons-email</artifactId>
-            <version>1.1</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-common</artifactId>
-            <!-- This is default scope to be included in the job jar, we depends on
-                hbase utils like Bytes, ImmutableBytesWritable etc. -->
-        </dependency>
-
-        <!-- Env & Test -->
-
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-common</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-annotations</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-mapreduce-client-core</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-minicluster</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.mrunit</groupId>
-            <artifactId>mrunit</artifactId>
-            <classifier>hadoop2</classifier>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-hadoop2-compat</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-client</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-server</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-yarn-server-resourcemanager</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.maven</groupId>
-            <artifactId>maven-model</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-yarn-api</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-hdfs</artifactId>
-            <scope>provided</scope>
-        </dependency>
-
-    </dependencies>
-
-    <build>
-        <plugins>
-            <plugin>
-                <artifactId>maven-assembly-plugin</artifactId>
-                <executions>
-                    <execution>
-                        <id>create-job-jar</id>
-                        <phase>package</phase>
-                        <goals>
-                            <goal>single</goal>
-                        </goals>
-                        <configuration>
-                            <descriptors>
-                                <descriptor>src/main/assembly/job.xml</descriptor>
-                            </descriptors>
-                        </configuration>
-                    </execution>
-                </executions>
-            </plugin>
-        </plugins>
-    </build>
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/a4fd4268/job/src/main/assembly/job.xml
----------------------------------------------------------------------
diff --git a/job/src/main/assembly/job.xml b/job/src/main/assembly/job.xml
deleted file mode 100644
index d0a56ee..0000000
--- a/job/src/main/assembly/job.xml
+++ /dev/null
@@ -1,22 +0,0 @@
-<assembly>
-	<id>job</id>
-	<formats>
-		<format>jar</format>
-	</formats>
-	<includeBaseDirectory>false</includeBaseDirectory>
-	<dependencySets>
-		<dependencySet>
-			<unpack>true</unpack>
-			<scope>runtime</scope>
-			<excludes>
-				<exclude>${groupId}:${artifactId}</exclude>
-			</excludes>
-		</dependencySet>
-		<dependencySet>
-			<unpack>true</unpack>
-			<includes>
-				<include>${groupId}:${artifactId}</include>
-			</includes>
-		</dependencySet>
-	</dependencySets>
-</assembly>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/a4fd4268/job/src/main/java/com/kylinolap/job/JobDAO.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/com/kylinolap/job/JobDAO.java b/job/src/main/java/com/kylinolap/job/JobDAO.java
deleted file mode 100644
index b9fb688..0000000
--- a/job/src/main/java/com/kylinolap/job/JobDAO.java
+++ /dev/null
@@ -1,240 +0,0 @@
-/*
- * Copyright 2013-2014 eBay Software Foundation
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.kylinolap.job;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.concurrent.ConcurrentHashMap;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.kylinolap.common.KylinConfig;
-import com.kylinolap.common.persistence.JsonSerializer;
-import com.kylinolap.common.persistence.ResourceStore;
-import com.kylinolap.common.persistence.Serializer;
-import com.kylinolap.job.JobInstance.JobStep;
-import com.kylinolap.job.constant.JobStatusEnum;
-import com.kylinolap.job.constant.JobStepStatusEnum;
-import com.kylinolap.metadata.MetadataManager;
-
-/**
- * @author ysong1
- */
-public class JobDAO {
-    private static Logger log = LoggerFactory.getLogger(JobDAO.class);
-
-    private static final Serializer<JobInstance> JOB_SERIALIZER = new JsonSerializer<JobInstance>(JobInstance.class);
-    private static final Serializer<JobStepOutput> JOB_OUTPUT_SERIALIZER = new JsonSerializer<JobStepOutput>(JobStepOutput.class);
-
-    private ResourceStore store;
-
-    private static final Logger logger = LoggerFactory.getLogger(JobDAO.class);
-
-    private static final ConcurrentHashMap<KylinConfig, JobDAO> CACHE = new ConcurrentHashMap<KylinConfig, JobDAO>();
-
-    public static JobDAO getInstance(KylinConfig config) {
-        JobDAO r = CACHE.get(config);
-        if (r == null) {
-            r = new JobDAO(config);
-            CACHE.put(config, r);
-            if (CACHE.size() > 1) {
-                logger.warn("More than one singleton exist");
-            }
-
-        }
-        return r;
-    }
-
-    private JobDAO(KylinConfig config) {
-        log.info("Using metadata url: " + config);
-        this.store = MetadataManager.getInstance(config).getStore();
-    }
-
-    public List<JobInstance> listAllJobs() throws IOException {
-        ArrayList<String> jobResources = store.listResources(ResourceStore.JOB_PATH_ROOT);
-        if (jobResources == null)
-            return Collections.emptyList();
-
-        ArrayList<JobInstance> result = new ArrayList<JobInstance>(jobResources.size());
-        for (String path : jobResources) {
-            JobInstance job = readJobResource(path);
-            if (job != null) {
-                result.add(job);
-            }
-        }
-
-        return result;
-    }
-
-    public List<JobInstance> listAllJobs(String cubeName) throws IOException {
-
-        List<JobInstance> allJobs = listAllJobs();
-        if (allJobs.size() == 0) {
-            return Collections.emptyList();
-        }
-
-        if (null == cubeName || cubeName.trim().length() == 0) {
-            return allJobs;
-        }
-
-        ArrayList<JobInstance> result = new ArrayList<JobInstance>();
-        for (JobInstance job : allJobs) {
-            if (job != null) {
-                if (job.getRelatedCube().toLowerCase().contains(cubeName.toLowerCase())) {
-                    result.add(job);
-                }
-            }
-        }
-
-        return result;
-    }
-
-    public List<JobInstance> listAllJobs(JobStatusEnum status) throws IOException {
-
-        List<JobInstance> allJobs = listAllJobs();
-        if (allJobs.size() == 0) {
-            return Collections.emptyList();
-        }
-
-        ArrayList<JobInstance> result = new ArrayList<JobInstance>();
-        for (JobInstance job : allJobs) {
-            if (job != null) {
-                if (job.getStatus().equals(status)) {
-                    result.add(job);
-                }
-            }
-        }
-
-        return result;
-    }
-
-    public JobStepOutput getJobOutput(String jobUuid, int stepSequenceId) throws IOException {
-        return readJobOutputResource(ResourceStore.JOB_OUTPUT_PATH_ROOT + "/" + JobStepOutput.nameOfOutput(jobUuid, stepSequenceId));
-    }
-
-    public JobStepOutput getJobOutput(JobStep jobStep) throws IOException {
-        return getJobOutput(jobStep.getJobInstance().getUuid(), jobStep.getSequenceID());
-    }
-
-    public void saveJobOutput(String jobUuid, int stepSequenceId, String outputString) throws IOException {
-        JobStepOutput output = this.getJobOutput(jobUuid, stepSequenceId);
-
-        if (output == null) {
-            output = new JobStepOutput();
-            output.setName(JobStepOutput.nameOfOutput(jobUuid, stepSequenceId));
-        }
-
-        output.setOutput(outputString);
-        writeJobOutputResource(pathOfJobOutput(output), output);
-    }
-
-    public void saveJobOutput(JobStep jobStep, String outputString) throws IOException {
-        saveJobOutput(jobStep.getJobInstance().getUuid(), jobStep.getSequenceID(), outputString);
-    }
-
-    private void saveJob(JobInstance job) throws IOException {
-        writeJobResource(pathOfJob(job), job);
-    }
-
-    public JobInstance getJob(String uuid) throws IOException {
-        return readJobResource(ResourceStore.JOB_PATH_ROOT + "/" + uuid);
-    }
-
-    public void deleteJob(JobInstance job) throws IOException {
-        store.deleteResource(pathOfJob(job));
-    }
-
-    public void deleteJob(String uuid) throws IOException {
-        store.deleteResource(ResourceStore.JOB_PATH_ROOT + "/" + uuid);
-    }
-
-    public void updateJobInstance(JobInstance jobInstance) throws IOException {
-        try {
-            JobInstance updatedJob = getJob(jobInstance.getUuid());
-            if (updatedJob == null) {
-                saveJob(jobInstance);
-                return;
-            }
-
-            updatedJob.setExecEndTime(jobInstance.getExecEndTime());
-            updatedJob.setExecStartTime(jobInstance.getExecStartTime());
-            updatedJob.setDuration(jobInstance.getDuration());
-            updatedJob.setMrWaiting(jobInstance.getMrWaiting());
-            updatedJob.setRelatedCube(jobInstance.getRelatedCube());
-            updatedJob.setRelatedSegment(jobInstance.getRelatedSegment());
-            updatedJob.setType(jobInstance.getType());
-
-            updatedJob.clearSteps();
-            updatedJob.addSteps(jobInstance.getSteps());
-
-            saveJob(updatedJob);
-        } catch (IOException e) {
-            log.error(e.getLocalizedMessage(), e);
-            throw e;
-        }
-    }
-
-    public void updateRunningJobToError() throws IOException {
-        List<JobInstance> runningJobs = listAllJobs(JobStatusEnum.RUNNING);
-        for (JobInstance job : runningJobs) {
-            // job.setStatus(JobStatusEnum.ERROR);
-
-            // set the last running step to ERROR
-            int lastRunningStepIndex = 0;
-            for (int i = job.getSteps().size() - 1; i >= 0; i--) {
-                JobStep currentStep = job.getSteps().get(i);
-                if (currentStep.getStatus() != JobStepStatusEnum.RUNNING && currentStep.getStatus() != JobStepStatusEnum.WAITING) {
-                    continue;
-                } else {
-                    lastRunningStepIndex = i;
-                    break;
-                }
-            }
-
-            job.getSteps().get(lastRunningStepIndex).setStatus(JobStepStatusEnum.ERROR);
-            this.updateJobInstance(job);
-
-            this.saveJobOutput(job.getUuid(), lastRunningStepIndex, "ERROR state set by job engine");
-        }
-    }
-
-    private String pathOfJob(JobInstance job) {
-        return ResourceStore.JOB_PATH_ROOT + "/" + job.getUuid();
-    }
-
-    private JobInstance readJobResource(String path) throws IOException {
-        return store.getResource(path, JobInstance.class, JOB_SERIALIZER);
-    }
-
-    private void writeJobResource(String path, JobInstance job) throws IOException {
-        store.putResource(path, job, JOB_SERIALIZER);
-    }
-
-    private String pathOfJobOutput(JobStepOutput output) {
-        return ResourceStore.JOB_OUTPUT_PATH_ROOT + "/" + output.getName();
-    }
-
-    private JobStepOutput readJobOutputResource(String path) throws IOException {
-        return store.getResource(path, JobStepOutput.class, JOB_OUTPUT_SERIALIZER);
-    }
-
-    private void writeJobOutputResource(String path, JobStepOutput output) throws IOException {
-        store.putResource(path, output, JOB_OUTPUT_SERIALIZER);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/a4fd4268/job/src/main/java/com/kylinolap/job/JobInstance.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/com/kylinolap/job/JobInstance.java b/job/src/main/java/com/kylinolap/job/JobInstance.java
deleted file mode 100644
index 0a7308a..0000000
--- a/job/src/main/java/com/kylinolap/job/JobInstance.java
+++ /dev/null
@@ -1,479 +0,0 @@
-/*
- * Copyright 2013-2014 eBay Software Foundation
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.kylinolap.job;
-
-import java.util.Collection;
-import java.util.List;
-import java.util.concurrent.ConcurrentHashMap;
-
-import com.fasterxml.jackson.annotation.JsonAutoDetect;
-import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
-import com.fasterxml.jackson.annotation.JsonBackReference;
-import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
-import com.fasterxml.jackson.annotation.JsonManagedReference;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.collect.Lists;
-import com.kylinolap.common.persistence.RootPersistentEntity;
-import com.kylinolap.cube.CubeBuildTypeEnum;
-import com.kylinolap.job.constant.JobStatusEnum;
-import com.kylinolap.job.constant.JobStepCmdTypeEnum;
-import com.kylinolap.job.constant.JobStepStatusEnum;
-import com.kylinolap.job.engine.JobEngineConfig;
-
-@JsonAutoDetect(fieldVisibility = Visibility.NONE, getterVisibility = Visibility.NONE, isGetterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE)
-public class JobInstance extends RootPersistentEntity implements Comparable<JobInstance>{
-
-    public static final String JOB_WORKING_DIR_PREFIX = "kylin-";
-
-    public static final String YARN_APP_ID = "yarn_application_id";
-    public static final String YARN_APP_URL = "yarn_application_tracking_url";
-    public static final String MR_JOB_ID = "mr_job_id";
-    public static final String HDFS_BYTES_WRITTEN = "hdfs_bytes_written";
-    public static final String SOURCE_RECORDS_COUNT = "source_records_count";
-    public static final String SOURCE_RECORDS_SIZE = "source_records_size";
-
-    public static String getStepIdentity(JobInstance jobInstance, JobStep jobStep) {
-        return jobInstance.getRelatedCube() + "." + jobInstance.getUuid() + "." + jobStep.getSequenceID();
-    }
-
-    public static String getJobIdentity(JobInstance jobInstance) {
-        return jobInstance.getRelatedCube() + "." + jobInstance.getUuid();
-    }
-
-    public static String getJobWorkingDir(JobInstance jobInstance, JobEngineConfig engineConfig) {
-        return getJobWorkingDir(jobInstance.getUuid(), engineConfig.getHdfsWorkingDirectory());
-    }
-
-    public static String getJobWorkingDir(String jobUuid, String hdfsWorkdingDir) {
-        if (jobUuid == null || jobUuid.equals("")) {
-            throw new IllegalArgumentException("jobUuid can't be null or empty");
-        }
-        return hdfsWorkdingDir + "/" + JOB_WORKING_DIR_PREFIX + jobUuid;
-    }
-
-    @JsonProperty("name")
-    private String name;
-    @JsonProperty("type")
-    private CubeBuildTypeEnum type; // java implementation
-    @JsonProperty("duration")
-    private long duration;
-    @JsonProperty("related_cube")
-    private String relatedCube;
-    @JsonProperty("related_segment")
-    private String relatedSegment;
-    @JsonProperty("exec_start_time")
-    private long execStartTime;
-    @JsonProperty("exec_end_time")
-    private long execEndTime;
-    @JsonProperty("mr_waiting")
-    private long mrWaiting = 0;
-    @JsonManagedReference
-    @JsonProperty("steps")
-    private List<JobStep> steps;
-    @JsonProperty("submitter")
-    private String submitter;
-    
-    public JobStep getRunningStep() {
-        for (JobStep step : this.getSteps()) {
-            if (step.getStatus().equals(JobStepStatusEnum.RUNNING) || step.getStatus().equals(JobStepStatusEnum.WAITING)) {
-                return step;
-            }
-        }
-
-        return null;
-    }
-
-    @JsonProperty("progress")
-    public double getProgress() {
-        int completedStepCount = 0;
-        for (JobStep step : this.getSteps()) {
-            if (step.getStatus().equals(JobStepStatusEnum.FINISHED)) {
-                completedStepCount++;
-            }
-        }
-
-        return 100.0 * completedStepCount / steps.size();
-    }
-
-    @JsonProperty("job_status")
-    public JobStatusEnum getStatus() {
-        // JobStatusEnum finalJobStatus;
-        int compositResult = 0;
-
-        // if steps status are all NEW, then job status is NEW
-        // if steps status are all FINISHED, then job status is FINISHED
-        // if steps status are all PENDING, then job status is PENDING
-        // if steps status are FINISHED and PENDING, the job status is PENDING
-        // if one of steps status is RUNNING, then job status is RUNNING
-        // if one of steps status is ERROR, then job status is ERROR
-        // if one of steps status is KILLED, then job status is KILLED
-        // default status is RUNNING
-
-        for (JobStep step : this.getSteps()) {
-            compositResult = compositResult | step.getStatus().getCode();
-        }
-
-        if (compositResult == JobStatusEnum.FINISHED.getCode()) {
-            return JobStatusEnum.FINISHED;
-        } else if (compositResult == JobStatusEnum.NEW.getCode()) {
-            return JobStatusEnum.NEW;
-        } else if (compositResult == JobStatusEnum.PENDING.getCode()) {
-            return JobStatusEnum.PENDING;
-        } else if (compositResult == (JobStatusEnum.FINISHED.getCode() | JobStatusEnum.PENDING.getCode())) {
-            return JobStatusEnum.PENDING;
-        } else if ((compositResult & JobStatusEnum.ERROR.getCode()) == JobStatusEnum.ERROR.getCode()) {
-            return JobStatusEnum.ERROR;
-        } else if ((compositResult & JobStatusEnum.DISCARDED.getCode()) == JobStatusEnum.DISCARDED.getCode()) {
-            return JobStatusEnum.DISCARDED;
-        } else if ((compositResult & JobStatusEnum.RUNNING.getCode()) == JobStatusEnum.RUNNING.getCode()) {
-            return JobStatusEnum.RUNNING;
-        }
-
-        return JobStatusEnum.RUNNING;
-    }
-
-    public String getName() {
-        return name;
-    }
-
-    public void setName(String name) {
-        this.name = name;
-    }
-
-    public CubeBuildTypeEnum getType() {
-        return type;
-    }
-
-    public void setType(CubeBuildTypeEnum type) {
-        this.type = type;
-    }
-
-    public long getDuration() {
-        return duration;
-    }
-
-    public void setDuration(long duration) {
-        this.duration = duration;
-    }
-
-    public String getRelatedCube() {
-        return relatedCube;
-    }
-
-    public void setRelatedCube(String relatedCube) {
-        this.relatedCube = relatedCube;
-    }
-
-    public String getRelatedSegment() {
-        return relatedSegment;
-    }
-
-    public void setRelatedSegment(String relatedSegment) {
-        this.relatedSegment = relatedSegment;
-    }
-
-    /**
-     * @return the execStartTime
-     */
-    public long getExecStartTime() {
-        return execStartTime;
-    }
-
-    /**
-     * @param execStartTime
-     *            the execStartTime to set
-     */
-    public void setExecStartTime(long execStartTime) {
-        this.execStartTime = execStartTime;
-    }
-
-    /**
-     * @return the execEndTime
-     */
-    public long getExecEndTime() {
-        return execEndTime;
-    }
-
-    /**
-     * @param execEndTime
-     *            the execEndTime to set
-     */
-    public void setExecEndTime(long execEndTime) {
-        this.execEndTime = execEndTime;
-    }
-
-    public long getMrWaiting() {
-        return this.mrWaiting;
-    }
-
-    public void setMrWaiting(long mrWaiting) {
-        this.mrWaiting = mrWaiting;
-    }
-
-    public List<JobStep> getSteps() {
-        if (steps == null) {
-            steps = Lists.newArrayList();
-        }
-        return steps;
-    }
-
-    public void clearSteps() {
-        getSteps().clear();
-    }
-
-    public void addSteps(Collection<JobStep> steps) {
-        this.getSteps().addAll(steps);
-    }
-
-    public void addStep(JobStep step) {
-        getSteps().add(step);
-    }
-
-    public void addStep(int index, JobStep step) {
-        getSteps().add(index, step);
-    }
-
-    public JobStep findStep(String stepName) {
-        for (JobStep step : getSteps()) {
-            if (stepName.equals(step.getName())) {
-                return step;
-            }
-        }
-        return null;
-    }
-
-        
-    public String getSubmitter() {
-        return submitter;
-    }
-
-    public void setSubmitter(String submitter) {
-        this.submitter = submitter;
-    }
-
-
-
-
-    @JsonIgnoreProperties(ignoreUnknown = true)
-    public static class JobStep implements Comparable<JobStep> {
-
-        @JsonBackReference
-        private JobInstance jobInstance;
-
-        @JsonProperty("name")
-        private String name;
-
-        @JsonProperty("sequence_id")
-        private int sequenceID;
-
-        @JsonProperty("exec_cmd")
-        private String execCmd;
-
-        @JsonProperty("interrupt_cmd")
-        private String InterruptCmd;
-
-        @JsonProperty("exec_start_time")
-        private long execStartTime;
-        @JsonProperty("exec_end_time")
-        private long execEndTime;
-        @JsonProperty("exec_wait_time")
-        private long execWaitTime;
-
-        @JsonProperty("step_status")
-        private JobStepStatusEnum status;
-
-        @JsonProperty("cmd_type")
-        private JobStepCmdTypeEnum cmdType = JobStepCmdTypeEnum.SHELL_CMD_HADOOP;
-
-        @JsonProperty("info")
-        private ConcurrentHashMap<String, String> info;
-
-        @JsonProperty("run_async")
-        private boolean runAsync = false;
-
-        private ConcurrentHashMap<String, String> getInfo() {
-            if (info == null) {
-                info = new ConcurrentHashMap<String, String>();
-            }
-            return info;
-        }
-
-        public void putInfo(String key, String value) {
-            getInfo().put(key, value);
-        }
-
-        public String getInfo(String key) {
-            return getInfo().get(key);
-        }
-
-        public void clearInfo() {
-            getInfo().clear();
-        }
-
-        public String getName() {
-            return name;
-        }
-
-        public void setName(String name) {
-            this.name = name;
-        }
-
-        public int getSequenceID() {
-            return sequenceID;
-        }
-
-        public void setSequenceID(int sequenceID) {
-            this.sequenceID = sequenceID;
-        }
-
-        public String getExecCmd() {
-            return execCmd;
-        }
-
-        public void setExecCmd(String execCmd) {
-            this.execCmd = execCmd;
-        }
-
-        public JobStepStatusEnum getStatus() {
-            return status;
-        }
-
-        public void setStatus(JobStepStatusEnum status) {
-            this.status = status;
-        }
-
-        /**
-         * @return the execStartTime
-         */
-        public long getExecStartTime() {
-            return execStartTime;
-        }
-
-        /**
-         * @param execStartTime
-         *            the execStartTime to set
-         */
-        public void setExecStartTime(long execStartTime) {
-            this.execStartTime = execStartTime;
-        }
-
-        /**
-         * @return the execEndTime
-         */
-        public long getExecEndTime() {
-            return execEndTime;
-        }
-
-        /**
-         * @param execEndTime
-         *            the execEndTime to set
-         */
-        public void setExecEndTime(long execEndTime) {
-            this.execEndTime = execEndTime;
-        }
-
-        public long getExecWaitTime() {
-            return execWaitTime;
-        }
-
-        public void setExecWaitTime(long execWaitTime) {
-            this.execWaitTime = execWaitTime;
-        }
-
-        public String getInterruptCmd() {
-            return InterruptCmd;
-        }
-
-        public void setInterruptCmd(String interruptCmd) {
-            InterruptCmd = interruptCmd;
-        }
-
-        public JobStepCmdTypeEnum getCmdType() {
-            return cmdType;
-        }
-
-        public void setCmdType(JobStepCmdTypeEnum cmdType) {
-            this.cmdType = cmdType;
-        }
-
-        /**
-         * @return the runAsync
-         */
-        public boolean isRunAsync() {
-            return runAsync;
-        }
-
-        /**
-         * @param runAsync
-         *            the runAsync to set
-         */
-        public void setRunAsync(boolean runAsync) {
-            this.runAsync = runAsync;
-        }
-
-        /**
-         * @return the jobInstance
-         */
-        public JobInstance getJobInstance() {
-            return jobInstance;
-        }
-
-        @Override
-        public int hashCode() {
-            final int prime = 31;
-            int result = 1;
-            result = prime * result + ((name == null) ? 0 : name.hashCode());
-            result = prime * result + sequenceID;
-            return result;
-        }
-
-        @Override
-        public boolean equals(Object obj) {
-            if (this == obj)
-                return true;
-            if (obj == null)
-                return false;
-            if (getClass() != obj.getClass())
-                return false;
-            JobStep other = (JobStep) obj;
-            if (name == null) {
-                if (other.name != null)
-                    return false;
-            } else if (!name.equals(other.name))
-                return false;
-            if (sequenceID != other.sequenceID)
-                return false;
-            return true;
-        }
-
-        @Override
-        public int compareTo(JobStep o) {
-            if (this.sequenceID < o.sequenceID) {
-                return -1;
-            } else if (this.sequenceID > o.sequenceID) {
-                return 1;
-            } else {
-                return 0;
-            }
-        }
-    }
-
-    @Override
-    public int compareTo(JobInstance o) {
-        return (int) (o.lastModified - this.lastModified);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/a4fd4268/job/src/main/java/com/kylinolap/job/JobInstanceBuilder.java
----------------------------------------------------------------------
diff --git a/job/src/main/java/com/kylinolap/job/JobInstanceBuilder.java b/job/src/main/java/com/kylinolap/job/JobInstanceBuilder.java
deleted file mode 100644
index a17c0ff..0000000
--- a/job/src/main/java/com/kylinolap/job/JobInstanceBuilder.java
+++ /dev/null
@@ -1,490 +0,0 @@
-/*
- * Copyright 2013-2014 eBay Software Foundation
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.kylinolap.job;
-
-import java.io.IOException;
-import java.util.List;
-
-import com.google.common.collect.Lists;
-import org.apache.commons.lang3.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.kylinolap.common.util.JsonUtil;
-import com.kylinolap.cube.CubeInstance;
-import com.kylinolap.cube.CubeManager;
-import com.kylinolap.cube.CubeSegment;
-import com.kylinolap.cube.CubeSegmentStatusEnum;
-import com.kylinolap.dict.lookup.HiveTable;
-import com.kylinolap.job.JobInstance.JobStep;
-import com.kylinolap.job.constant.JobConstants;
-import com.kylinolap.job.constant.JobStepCmdTypeEnum;
-import com.kylinolap.job.constant.JobStepStatusEnum;
-import com.kylinolap.job.engine.JobEngineConfig;
-import com.kylinolap.job.hadoop.hive.JoinedFlatTableDesc;
-import com.kylinolap.metadata.MetadataManager;
-
-/**
- * @author George Song (ysong1)
- */
-public class JobInstanceBuilder {
-
-    private static Logger log = LoggerFactory.getLogger(JobInstanceBuilder.class);
-
-    private CubeInstance cube;
-    private String htablename;
-    private String cubeName;
-    private String segmentName;
-    private CubeSegment cubeSegment;
-    private String jobUUID;
-    private final JobEngineConfig engineConfig;
-
-    private String jobWorkingDir;
-
-    public JobInstanceBuilder(JobEngineConfig engineCfg) {
-        this.engineConfig = engineCfg;
-    }
-
-    public List<JobStep> buildSteps(JobInstance jobInstance) throws IOException {
-        init(jobInstance);
-        switch (jobInstance.getType()) {
-        case BUILD:
-            return createBuildCubeSegmentSteps(jobInstance);
-        case MERGE:
-            return createMergeCubeSegmentsSteps(jobInstance);
-        default:
-            throw new IllegalArgumentException("job type:" + jobInstance.getType() + " not supported");
-        }
-    }
-
-    private void init(JobInstance jobInstance) {
-        cubeName = jobInstance.getRelatedCube();
-        if (cubeName == null) {
-            throw new IllegalArgumentException("Cube name is null or empty!");
-        }
-        cube = CubeManager.getInstance(this.engineConfig.getConfig()).getCube(cubeName);
-        jobUUID = jobInstance.getUuid();
-        if (jobUUID == null || jobUUID.equals("")) {
-            throw new IllegalArgumentException("Job UUID is null or empty!");
-        }
-
-        segmentName = jobInstance.getRelatedSegment();
-        if (segmentName == null || segmentName.equals("")) {
-            throw new IllegalArgumentException("Cube segment name is null or empty!");
-        }
-
-        // only the segment which can be build
-        cubeSegment = cube.getSegment(segmentName, CubeSegmentStatusEnum.NEW);
-        htablename = cubeSegment.getStorageLocationIdentifier();
-
-        this.jobWorkingDir = JobInstance.getJobWorkingDir(jobInstance, engineConfig);
-    }
-
-    private String appendMapReduceParameters(String cmd, JobInstance jobInstance) throws IOException {
-        StringBuffer buf = new StringBuffer(cmd);
-        String jobConf = engineConfig.getHadoopJobConfFilePath(cube.getDescriptor().getCapacity());
-        if (StringUtils.isBlank(jobConf) == false) {
-            buf.append(" -conf " + jobConf);
-        }
-        
-        String extraArgs = engineConfig.getMapReduceCmdExtraArgs();
-        if (StringUtils.isBlank(extraArgs) == false) {
-            extraArgs = extraArgs.replace("${CUBE}", jobInstance.getRelatedCube());
-            extraArgs = extraArgs.replace("${TYPE}", jobInstance.getType().toString());
-            extraArgs = extraArgs.replace("${UUID}", jobInstance.getUuid());
-            buf.append(" ").append(extraArgs);
-        }
-
-        return buf.toString();
-    }
-
-    private String appendExecCmdParameters(String cmd, String paraName, String paraValue) {
-        StringBuffer buf = new StringBuffer(cmd);
-        buf.append(" -" + paraName + " " + paraValue);
-        return buf.toString();
-    }
-
-    private String getIntermediateHiveTablePath() {
-        JoinedFlatTableDesc intermediateTableDesc = new JoinedFlatTableDesc(cube.getDescriptor(), this.cubeSegment);
-        return JoinedFlatTable.getTableDir(intermediateTableDesc, jobWorkingDir, jobUUID);
-    }
-
-    private String[] getCuboidOutputPaths(String cuboidRootPath, int totalRowkeyColumnCount, int groupRowkeyColumnsCount) {
-        String[] paths = new String[groupRowkeyColumnsCount + 1];
-        for (int i = 0; i <= groupRowkeyColumnsCount; i++) {
-            int dimNum = totalRowkeyColumnCount - i;
-            if (dimNum == totalRowkeyColumnCount) {
-                paths[i] = cuboidRootPath + "base_cuboid";
-            } else {
-                paths[i] = cuboidRootPath + dimNum + "d_cuboid";
-            }
-        }
-        return paths;
-    }
-
-    private String getFactDistinctColumnsPath() {
-        return jobWorkingDir + "/" + cubeName + "/fact_distinct_columns";
-    }
-
-    private String getRowkeyDistributionOutputPath() {
-        return jobWorkingDir + "/" + cubeName + "/rowkey_stats";
-    }
-
-    private List<JobStep> createMergeCubeSegmentsSteps(JobInstance jobInstance) throws IOException {
-
-        List<CubeSegment> mergingSegments = cube.getMergingSegments();
-        if (mergingSegments == null || mergingSegments.size() < 2) {
-            throw new IllegalArgumentException("Merging segments count should be more than 2");
-        }
-
-
-        String[] cuboidPaths = new String[mergingSegments.size()];
-        for (int i = 0; i < mergingSegments.size(); i++) {
-            cuboidPaths[i] = getPathToMerge(jobInstance, mergingSegments.get(i));
-        }
-        String formattedPath = formatPaths(cuboidPaths);
-
-        // clear existing steps
-//        jobInstance.clearSteps();
-        int stepSeqNum = 0;
-        List<JobStep> result = Lists.newArrayList();
-        final String mergedCuboidPath = jobWorkingDir + "/" + cubeName + "/cuboid";
-
-        // merge cuboid data of ancestor segments
-        result.add(createMergeCuboidDataStep(jobInstance, stepSeqNum++, formattedPath, mergedCuboidPath));
-
-        // get output distribution step
-        result.add(createRangeRowkeyDistributionStep(jobInstance, stepSeqNum++, mergedCuboidPath));
-
-        // create htable step
-        result.add(createCreateHTableStep(jobInstance, stepSeqNum++));
-
-        // generate hfiles step
-        result.add(createConvertCuboidToHfileStep(jobInstance, stepSeqNum++, mergedCuboidPath));
-
-        // bulk load step
-        result.add(createBulkLoadStep(jobInstance, stepSeqNum++));
-
-        try {
-            log.debug(JsonUtil.writeValueAsIndentString(jobInstance));
-        } catch (Exception e) {
-            log.error(e.getMessage());
-        }
-        return result;
-    }
-
-    private List<JobStep> createBuildCubeSegmentSteps(JobInstance jobInstance) throws IOException {
-
-        // clear existing steps
-//        jobInstance.clearSteps();
-
-        int groupRowkeyColumnsCount = cube.getDescriptor().getRowkey().getNCuboidBuildLevels();
-        int totalRowkeyColumnsCount = cube.getDescriptor().getRowkey().getRowKeyColumns().length;
-
-
-        int stepSeqNum = 0;
-        List<JobStep> result = Lists.newArrayList();
-        if (this.engineConfig.isFlatTableByHive()) {
-            // by default in here
-
-            // flat hive table step
-            result.add(createIntermediateHiveTableStep(jobInstance, stepSeqNum++));
-        }
-
-        // fact distinct columns step
-        result.add(createFactDistinctColumnsStep(jobInstance, stepSeqNum++));
-
-        // build dictionary step
-        result.add(createBuildDictionaryStep(jobInstance, stepSeqNum++));
-
-        final String cuboidRootPath = jobWorkingDir + "/" + cubeName + "/cuboid/";
-        final String cuboidTmpRootPath = jobWorkingDir + "/" + cubeName + "/tmp_cuboid/";
-        final boolean incBuildMerge = cube.needMergeImmediatelyAfterBuild(cubeSegment);
-
-        String[] cuboidOutputTempPath = getCuboidOutputPaths(incBuildMerge?cuboidTmpRootPath:cuboidRootPath, totalRowkeyColumnsCount, groupRowkeyColumnsCount);
-        // base cuboid step
-        result.add(createBaseCuboidStep(jobInstance, stepSeqNum++, cuboidOutputTempPath));
-
-        // n dim cuboid steps
-        for (int i = 1; i <= groupRowkeyColumnsCount; i++) {
-            int dimNum = totalRowkeyColumnsCount - i;
-            result.add(createNDimensionCuboidStep(jobInstance, stepSeqNum++, cuboidOutputTempPath, dimNum, totalRowkeyColumnsCount));
-        }
-
-        if (incBuildMerge) {
-            List<String> pathToMerge = Lists.newArrayList();
-            for (CubeSegment segment: cube.getSegments(CubeSegmentStatusEnum.READY)) {
-                pathToMerge.add(getPathToMerge(jobInstance, segment));
-            }
-            pathToMerge.add(cuboidTmpRootPath + "*");
-            result.add(createMergeCuboidDataStep(jobInstance, stepSeqNum++, formatPaths(pathToMerge), cuboidRootPath));
-        }
-        String cuboidPath = incBuildMerge?cuboidRootPath:cuboidRootPath+"*";
-
-        // get output distribution step
-        result.add(createRangeRowkeyDistributionStep(jobInstance, stepSeqNum++, cuboidPath));
-
-        // create htable step
-        result.add(createCreateHTableStep(jobInstance, stepSeqNum++));
-        // generate hfiles step
-        result.add(createConvertCuboidToHfileStep(jobInstance, stepSeqNum++, cuboidPath));
-        // bulk load step
-        result.add(createBulkLoadStep(jobInstance, stepSeqNum++));
-
-        try {
-            log.debug(JsonUtil.writeValueAsIndentString(jobInstance));
-        } catch (Exception e) {
-            log.error(e.getMessage());
-        }
-        return result;
-    }
-
-    private String getPathToMerge(JobInstance jobInstance, CubeSegment segment) {
-        String uuid = segment.getUuid();
-        if (uuid == null) {
-            uuid = segment.getLastBuildJobID();
-        }
-        return JobInstance.getJobWorkingDir(uuid, engineConfig.getHdfsWorkingDirectory()) + "/" + jobInstance.getRelatedCube() + "/cuboid/*";
-    }
-
-    private String formatPaths(String[] paths) {
-        return StringUtils.join(paths, ",");
-    }
-
-    private String formatPaths(List<String> paths) {
-        return StringUtils.join(paths, ",");
-    }
-
-    private JobStep createBuildDictionaryStep(JobInstance jobInstance, int stepSeqNum) {
-        // base cuboid job
-        JobStep buildDictionaryStep = new JobStep();
-        buildDictionaryStep.setName(JobConstants.STEP_NAME_BUILD_DICTIONARY);
-        String cmd = "";
-        cmd = appendExecCmdParameters(cmd, "cubename", cubeName);
-        cmd = appendExecCmdParameters(cmd, "segmentname", segmentName);
-        cmd = appendExecCmdParameters(cmd, "input", getFactDistinctColumnsPath());
-
-        buildDictionaryStep.setExecCmd(cmd);
-        buildDictionaryStep.setSequenceID(stepSeqNum);
-        buildDictionaryStep.setStatus(JobStepStatusEnum.PENDING);
-        buildDictionaryStep.setRunAsync(false);
-        buildDictionaryStep.setCmdType(JobStepCmdTypeEnum.JAVA_CMD_HADOOP_NO_MR_DICTIONARY);
-        return buildDictionaryStep;
-    }
-
-    private JobStep createIntermediateHiveTableStep(JobInstance jobInstance, int stepSeqNum) throws IOException {
-        JoinedFlatTableDesc intermediateTableDesc = new JoinedFlatTableDesc(cube.getDescriptor(), this.cubeSegment);
-        String dropTableHql = JoinedFlatTable.generateDropTableStatement(intermediateTableDesc, jobUUID);
-        String createTableHql = JoinedFlatTable.generateCreateTableStatement(intermediateTableDesc, jobWorkingDir, jobUUID);
-        String insertDataHql = JoinedFlatTable.generateInsertDataStatement(intermediateTableDesc, jobUUID, this.engineConfig);
-
-        JobStep intermediateHiveTableStep = new JobStep();
-        intermediateHiveTableStep.setName(JobConstants.STEP_NAME_CREATE_FLAT_HIVE_TABLE);
-
-        StringBuffer buf = new StringBuffer();
-        buf.append("hive -e \"");
-        buf.append(dropTableHql + "\n");
-        buf.append(createTableHql + "\n");
-        buf.append(insertDataHql + "\n");
-        buf.append("\"");
-
-        intermediateHiveTableStep.setSequenceID(stepSeqNum);
-        intermediateHiveTableStep.setExecCmd(buf.toString());
-        intermediateHiveTableStep.setStatus(JobStepStatusEnum.PENDING);
-        intermediateHiveTableStep.setRunAsync(false);
-        intermediateHiveTableStep.setCmdType(JobStepCmdTypeEnum.SHELL_CMD_HADOOP);
-        return intermediateHiveTableStep;
-    }
-
-    private JobStep createFactDistinctColumnsStep(JobInstance jobInstance, int stepSeqNum) throws IOException {
-        // base cuboid job
-        JobStep factDistinctColumnsStep = new JobStep();
-
-        String inputLocation;
-        String cmd = "";
-
-        inputLocation = getIntermediateHiveTablePath();
-        cmd = appendMapReduceParameters(cmd, jobInstance);
-
-        factDistinctColumnsStep.setName(JobConstants.STEP_NAME_FACT_DISTINCT_COLUMNS);
-
-        cmd = appendExecCmdParameters(cmd, "cubename", cubeName);
-        cmd = appendExecCmdParameters(cmd, "input", inputLocation);
-        cmd = appendExecCmdParameters(cmd, "output", getFactDistinctColumnsPath());
-        cmd = appendExecCmdParameters(cmd, "jobname", "Kylin_Fact_Distinct_Columns_" + jobInstance.getRelatedCube() + "_Step_" + stepSeqNum);
-
-        factDistinctColumnsStep.setExecCmd(cmd);
-        factDistinctColumnsStep.setSequenceID(stepSeqNum);
-        factDistinctColumnsStep.setStatus(JobStepStatusEnum.PENDING);
-        factDistinctColumnsStep.setRunAsync(true);
-        factDistinctColumnsStep.setCmdType(JobStepCmdTypeEnum.JAVA_CMD_HADOOP_FACTDISTINCT);
-        return factDistinctColumnsStep;
-    }
-
-    private JobStep createBaseCuboidStep(JobInstance jobInstance, int stepSeqNum, String[] cuboidOutputTempPath) throws IOException {
-        // base cuboid job
-        JobStep baseCuboidStep = new JobStep();
-
-        String inputLocation;
-        String cmd = "";
-
-        if (this.engineConfig.isFlatTableByHive()) {
-            inputLocation = getIntermediateHiveTablePath();
-            cmd = appendMapReduceParameters(cmd, jobInstance);
-        } else {
-            HiveTable factTableInHive = new HiveTable(MetadataManager.getInstance(this.engineConfig.getConfig()), cube.getDescriptor().getFactTable());
-            inputLocation = factTableInHive.getHDFSLocation(false);
-            cmd = appendMapReduceParameters(cmd, jobInstance);
-            cmd = appendExecCmdParameters(cmd, "inputformat", "TextInputFormat");
-        }
-
-        baseCuboidStep.setName(JobConstants.STEP_NAME_BUILD_BASE_CUBOID);
-
-        cmd = appendExecCmdParameters(cmd, "cubename", cubeName);
-        cmd = appendExecCmdParameters(cmd, "segmentname", segmentName);
-        cmd = appendExecCmdParameters(cmd, "input", inputLocation);
-        cmd = appendExecCmdParameters(cmd, "output", cuboidOutputTempPath[0]);
-        cmd = appendExecCmdParameters(cmd, "jobname", "Kylin_Base_Cuboid_Builder_" + jobInstance.getRelatedCube() + "_Step_" + stepSeqNum);
-        cmd = appendExecCmdParameters(cmd, "level", "0");
-
-        baseCuboidStep.setExecCmd(cmd);
-        baseCuboidStep.setSequenceID(stepSeqNum);
-        baseCuboidStep.setStatus(JobStepStatusEnum.PENDING);
-        baseCuboidStep.setRunAsync(true);
-        baseCuboidStep.setCmdType(JobStepCmdTypeEnum.JAVA_CMD_HADOOP_BASECUBOID);
-        return baseCuboidStep;
-    }
-
-    private JobStep createNDimensionCuboidStep(JobInstance jobInstance, int stepSeqNum, String[] cuboidOutputTempPath, int dimNum, int totalRowkeyColumnCount) throws IOException {
-        // ND cuboid job
-        JobStep ndCuboidStep = new JobStep();
-
-        ndCuboidStep.setName(JobConstants.STEP_NAME_BUILD_N_D_CUBOID + " : " + dimNum + "-Dimension");
-        String cmd = "";
-
-        cmd = appendMapReduceParameters(cmd, jobInstance);
-        cmd = appendExecCmdParameters(cmd, "cubename", cubeName);
-        cmd = appendExecCmdParameters(cmd, "segmentname", segmentName);
-        cmd = appendExecCmdParameters(cmd, "input", cuboidOutputTempPath[totalRowkeyColumnCount - dimNum - 1]);
-        cmd = appendExecCmdParameters(cmd, "output", cuboidOutputTempPath[totalRowkeyColumnCount - dimNum]);
-        cmd = appendExecCmdParameters(cmd, "jobname", "Kylin_ND-Cuboid_Builder_" + jobInstance.getRelatedCube() + "_Step_" + stepSeqNum);
-        cmd = appendExecCmdParameters(cmd, "level", "" + (totalRowkeyColumnCount - dimNum));
-
-        ndCuboidStep.setExecCmd(cmd);
-        ndCuboidStep.setSequenceID(stepSeqNum);
-        ndCuboidStep.setStatus(JobStepStatusEnum.PENDING);
-        ndCuboidStep.setRunAsync(true);
-        ndCuboidStep.setCmdType(JobStepCmdTypeEnum.JAVA_CMD_HADOOP_NDCUBOID);
-        return ndCuboidStep;
-    }
-
-    private JobStep createRangeRowkeyDistributionStep(JobInstance jobInstance, int stepSeqNum, String inputPath) throws IOException {
-        JobStep rowkeyDistributionStep = new JobStep();
-        rowkeyDistributionStep.setName(JobConstants.STEP_NAME_GET_CUBOID_KEY_DISTRIBUTION);
-        String cmd = "";
-
-        cmd = appendMapReduceParameters(cmd, jobInstance);
-        cmd = appendExecCmdParameters(cmd, "input", inputPath);
-        cmd = appendExecCmdParameters(cmd, "output", getRowkeyDistributionOutputPath());
-        cmd = appendExecCmdParameters(cmd, "jobname", "Kylin_Region_Splits_Calculator_" + jobInstance.getRelatedCube() + "_Step_" + stepSeqNum);
-        cmd = appendExecCmdParameters(cmd, "cubename", cubeName);
-
-        rowkeyDistributionStep.setExecCmd(cmd);
-        rowkeyDistributionStep.setSequenceID(stepSeqNum);
-        rowkeyDistributionStep.setStatus(JobStepStatusEnum.PENDING);
-        rowkeyDistributionStep.setRunAsync(true);
-        rowkeyDistributionStep.setCmdType(JobStepCmdTypeEnum.JAVA_CMD_HADOOP_RANGEKEYDISTRIBUTION);
-        return rowkeyDistributionStep;
-    }
-
-    private JobStep createMergeCuboidDataStep(JobInstance jobInstance, int stepSeqNum, String inputPath, String outputPath) throws IOException {
-        JobStep mergeCuboidDataStep = new JobStep();
-        mergeCuboidDataStep.setName(JobConstants.STEP_NAME_MERGE_CUBOID);
-        String cmd = "";
-
-        cmd = appendMapReduceParameters(cmd, jobInstance);
-        cmd = appendExecCmdParameters(cmd, "cubename", cubeName);
-        cmd = appendExecCmdParameters(cmd, "segmentname", segmentName);
-        cmd = appendExecCmdParameters(cmd, "input", inputPath);
-        cmd = appendExecCmdParameters(cmd, "output", outputPath);
-        cmd = appendExecCmdParameters(cmd, "jobname", "Kylin_Merge_Cuboid_" + jobInstance.getRelatedCube() + "_Step_" + stepSeqNum);
-
-        mergeCuboidDataStep.setExecCmd(cmd);
-        mergeCuboidDataStep.setSequenceID(stepSeqNum);
-        mergeCuboidDataStep.setStatus(JobStepStatusEnum.PENDING);
-        mergeCuboidDataStep.setRunAsync(true);
-        mergeCuboidDataStep.setCmdType(JobStepCmdTypeEnum.JAVA_CMD_HADOOP_MERGECUBOID);
-        return mergeCuboidDataStep;
-    }
-
-    private JobStep createCreateHTableStep(JobInstance jobInstance, int stepSeqNum) {
-        JobStep createHtableStep = new JobStep();
-        createHtableStep.setName(JobConstants.STEP_NAME_CREATE_HBASE_TABLE);
-        String cmd = "";
-        cmd = appendExecCmdParameters(cmd, "cubename", cubeName);
-        cmd = appendExecCmdParameters(cmd, "input", getRowkeyDistributionOutputPath() + "/part-r-00000");
-        cmd = appendExecCmdParameters(cmd, "htablename", htablename);
-
-        createHtableStep.setExecCmd(cmd);
-        createHtableStep.setSequenceID(stepSeqNum);
-        createHtableStep.setStatus(JobStepStatusEnum.PENDING);
-        createHtableStep.setRunAsync(false);
-        createHtableStep.setCmdType(JobStepCmdTypeEnum.JAVA_CMD_HADDOP_NO_MR_CREATEHTABLE);
-
-        return createHtableStep;
-    }
-
-    private JobStep createConvertCuboidToHfileStep(JobInstance jobInstance, int stepSeqNum, String inputPath) throws IOException {
-        JobStep createHFilesStep = new JobStep();
-        createHFilesStep.setName(JobConstants.STEP_NAME_CONVERT_CUBOID_TO_HFILE);
-        String cmd = "";
-
-        cmd = appendMapReduceParameters(cmd, jobInstance);
-        cmd = appendExecCmdParameters(cmd, "cubename", cubeName);
-        cmd = appendExecCmdParameters(cmd, "input", inputPath);
-        cmd = appendExecCmdParameters(cmd, "output", jobWorkingDir + "/" + cubeName + "/hfile");
-        cmd = appendExecCmdParameters(cmd, "htablename", htablename);
-        cmd = appendExecCmdParameters(cmd, "jobname", "Kylin_HFile_Generator_" + jobInstance.getRelatedCube() + "_Step_" + stepSeqNum);
-
-        createHFilesStep.setExecCmd(cmd);
-        createHFilesStep.setSequenceID(stepSeqNum);
-        createHFilesStep.setStatus(JobStepStatusEnum.PENDING);
-        createHFilesStep.setRunAsync(true);
-        createHFilesStep.setCmdType(JobStepCmdTypeEnum.JAVA_CMD_HADOOP_CONVERTHFILE);
-
-        return createHFilesStep;
-
-    }
-
-    private JobStep createBulkLoadStep(JobInstance jobInstance, int stepSeqNum) {
-        JobStep bulkLoadStep = new JobStep();
-        bulkLoadStep.setName(JobConstants.STEP_NAME_BULK_LOAD_HFILE);
-
-        String cmd = "";
-        cmd = appendExecCmdParameters(cmd, "input", jobWorkingDir + "/" + cubeName + "/hfile/");
-        cmd = appendExecCmdParameters(cmd, "htablename", htablename);
-        cmd = appendExecCmdParameters(cmd, "cubename", cubeName);
-
-        bulkLoadStep.setSequenceID(stepSeqNum);
-        bulkLoadStep.setExecCmd(cmd);
-        bulkLoadStep.setStatus(JobStepStatusEnum.PENDING);
-        bulkLoadStep.setRunAsync(false);
-        bulkLoadStep.setCmdType(JobStepCmdTypeEnum.JAVA_CMD_HADOOP_NO_MR_BULKLOAD);
-
-        return bulkLoadStep;
-
-    }
-}