You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hawq.apache.org by rl...@apache.org on 2015/11/05 03:55:00 UTC
[14/17] incubator-hawq git commit: HAWQ-100. Code Cleanup:
gpmapreduce.
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/4e392375/src/bin/gpmapreduce/src/yaml_parse.y
----------------------------------------------------------------------
diff --git a/src/bin/gpmapreduce/src/yaml_parse.y b/src/bin/gpmapreduce/src/yaml_parse.y
deleted file mode 100644
index c1ceb27..0000000
--- a/src/bin/gpmapreduce/src/yaml_parse.y
+++ /dev/null
@@ -1,1066 +0,0 @@
-%output="yaml_parse.c"
-%name-prefix="yaml_yy"
-%pure-parser /* Because global variables are bad */
-%error-verbose /* A little extra debugging info */
-
-%{
- #include <yaml_parse.h>
- #include <yaml.h>
- #include <parser.h>
-
-/*
- * Ancient flex versions, like the ones on our build machines don't support
- * flex .h file generation.
- */
-#if USE_FLEX_HFILE
-# include <yaml_scan.h>
-#endif
-
- #include <stdio.h>
-
- int yaml_yylex(YYSTYPE *lvalp, mapred_parser_t *parser);
- void yaml_yyerror(mapred_parser_t *parser, char const *);
-%}
-%parse-param {mapred_parser_t *parser} /* So we can pass it to the lexer */
-%lex-param {mapred_parser_t *parser}
-
-%union {
- int keyword;
- char *token;
-}
-
-/* Keyword tokens - be sure to keep in sync with keyword rules below */
-%token <keyword>
- _COLUMNS_ _CONSOLIDATE_
- _DATABASE_ _DEFINE_ _DELIMITER_
- _ENCODING_ _ERROR_LIMIT_ _ESCAPE_ _EXEC_ _EXECUTE_
- _FILE_ _FINALIZE_ _FORMAT_ _FUNCTION_
- _GPFDIST_
- _HOST_
- _INITIALIZE_ _INPUT_
- _KEYS_
- _LANGUAGE_ _LIBRARY_
- _MAP_ _MODE_
- _NAME_ _NULL_
- _OPTIMIZE_ _ORDERING_ _OUTPUT_
- _PARAMETERS_ _PORT_
- _QUERY_ _QUOTE_
- _REDUCE_ _RETURNS_ _RUN_
- _SOURCE_
- _TABLE_ _TARGET_ _TASK_ _TRANSITION_
- _USER_
- _VERSION_
-
-
- /* Non-keyword scalar tokens */
-%token <token> _INTEGER_
-%token <token> _VERSION_STRING_
-%token <token> _ID_
-%token <token> _STRING_
-%type <token> scalar
-
-/* YAML State tokens */
-%token START_STREAM END_STREAM
-%token START_DOCUMENT END_DOCUMENT
-%token START_LIST END_LIST
-%token START_MAP END_MAP
-
-/* and the special "ERROR" token */
-%token ERROR
-
-%% /* Grammar rules and actions follow */
-
-
-stream:
- START_STREAM
- document_list
- END_STREAM
- {
- if (parser->current_doc &&
- parser->current_doc->u.document.flags & mapred_document_error)
- {
- YYABORT; /* If we found an error, return error */
- }
- }
- ;
-
-/*
- * For error recovery we often need all the keywords except 'foo', to help
- * facilitate this I break the keywords into logical groupings.
- */
-doc_keywords:
- _DATABASE_|_DEFINE_|_EXECUTE_|_HOST_|_PORT_|_USER_|_VERSION_
- ;
-
-obj_keywords:
- def_keywords|exec_keywords
- ;
-
-def_keywords:
- _CONSOLIDATE_|_FINALIZE_|_INPUT_|_MAP_|_OUTPUT_|_REDUCE_|
- _TASK_|_TRANSITION_
- ;
-
-exec_keywords:
- _RUN_
- ;
-
-func_keywords:
- _FUNCTION_|_LANGUAGE_|_OPTIMIZE_|_PARAMETERS_|_RETURNS_|_MODE_|_LIBRARY_
- ;
-
-io_keywords:
- _COLUMNS_|_DELIMITER_|_ENCODING_|_ERROR_LIMIT_|_ESCAPE_|_EXEC_|
- _FILE_|_FORMAT_|_GPFDIST_|_NULL_|_QUERY_|_QUOTE_|_TABLE_
- ;
-
-misc_keywords:
- _INITIALIZE_|_KEYS_|_ORDERING_|_SOURCE_|_TARGET_
- ;
-
-keyword:
- _NAME_|doc_keywords|obj_keywords|func_keywords|io_keywords|misc_keywords ;
-
-document_list:
- | document_list document
- ;
-
-document:
- START_DOCUMENT { parser_begin_document(parser); }
- document_contents
- END_DOCUMENT
- | error END_DOCUMENT
- ;
-
-document_contents:
- START_MAP doc_item_list END_MAP
-
- | valid_yaml_list
- { yaml_yyerror(parser, "Greenplum MapReduce document must begin with a YAML MAPPING"); }
- | scalar
- { yaml_yyerror(parser, "Greenplum MapReduce document must begin with a YAML MAPPING"); }
- ;
-
-doc_item_list:
- doc_item
- | doc_item_list doc_item
- ;
-
-doc_item:
- _VERSION_ doc_version
- | _DATABASE_ doc_database
- | _USER_ doc_user
- | _HOST_ doc_host
- | _PORT_ doc_port
- | _DEFINE_ doc_define
- | _EXECUTE_ doc_execute
-
- | _NAME_ { yaml_yyerror(parser, "Invalid Document Attribute"); } valid_yaml
- | obj_keywords { yaml_yyerror(parser, "Invalid Document Attribute"); } valid_yaml
- | func_keywords { yaml_yyerror(parser, "Invalid Document Attribute"); } valid_yaml
- | io_keywords { yaml_yyerror(parser, "Invalid Document Attribute"); } valid_yaml
- | misc_keywords { yaml_yyerror(parser, "Invalid Document Attribute"); } valid_yaml
- | scalar { yaml_yyerror(parser, "Invalid Document Attribute"); } valid_yaml
- | valid_yaml_list { yaml_yyerror(parser, "Invalid Document Attribute"); } valid_yaml
- | valid_yaml_map { yaml_yyerror(parser, "Invalid Document Attribute"); } valid_yaml
- ;
-
-doc_version:
- _VERSION_STRING_ { parser_set_version(parser, $1); }
-
- | valid_yaml_list { yaml_yyerror(parser, "VERSION must be a scalar value"); }
- | valid_yaml_map { yaml_yyerror(parser, "VERSION must be a scalar value"); }
- | _STRING_ { yaml_yyerror(parser, "Invalid VERSION format"); }
- | _INTEGER_ { yaml_yyerror(parser, "Invalid VERSION format"); }
- | _ID_ { yaml_yyerror(parser, "Invalid VERSION format"); }
- | error { yaml_yyerror(parser, "Invalid VERSION format"); }
- ;
-
-doc_database:
- _ID_ { parser_set_database(parser, $1); }
-
- | valid_yaml_list { yaml_yyerror(parser, "DATABASE must be a scalar value"); }
- | valid_yaml_map { yaml_yyerror(parser, "DATABASE must be a scalar value"); }
- | _STRING_ { yaml_yyerror(parser, "Invalid DATABASE format"); }
- | _INTEGER_ { yaml_yyerror(parser, "Invalid DATABASE format"); }
- | _VERSION_STRING_ { yaml_yyerror(parser, "Invalid DATABASE format"); }
- | error { yaml_yyerror(parser, "Invalid DATABASE format"); }
- ;
-
-doc_user:
- _ID_ { parser_set_user(parser, $1); }
-
- | valid_yaml_list { yaml_yyerror(parser, "USER must be a scalar value"); }
- | valid_yaml_map { yaml_yyerror(parser, "USER must be a scalar value"); }
- | _STRING_ { yaml_yyerror(parser, "Invalid USER format"); }
- | _INTEGER_ { yaml_yyerror(parser, "Invalid USER format"); }
- | _VERSION_STRING_ { yaml_yyerror(parser, "Invalid USER format"); }
- | error { yaml_yyerror(parser, "Invalid USER format"); }
- ;
-
-doc_host:
- scalar { parser_set_host(parser, $1); }
-
- | valid_yaml_list { yaml_yyerror(parser, "HOST must be a scalar value"); }
- | valid_yaml_map { yaml_yyerror(parser, "HOST must be a scalar value"); }
- | error { yaml_yyerror(parser, "Invalid HOST format"); }
- ;
-
-doc_port:
- _INTEGER_ { parser_set_port(parser, $1); }
-
- | valid_yaml_list { yaml_yyerror(parser, "PORT must be an integer value"); }
- | valid_yaml_map { yaml_yyerror(parser, "PORT must be an integer value"); }
- | _ID_ { yaml_yyerror(parser, "PORT must be an integer value"); }
- | _STRING_ { yaml_yyerror(parser, "PORT must be an integer value"); }
- | _VERSION_STRING_ { yaml_yyerror(parser, "PORT must be an integer value"); }
- | error { yaml_yyerror(parser, "PORT must be an integer value"); }
- ;
-
-doc_define:
- START_LIST { parser_begin_define(parser); }
- define_list
- END_LIST
-
- | scalar { yaml_yyerror(parser, "DEFINE must be a YAML LIST"); }
- | keyword { yaml_yyerror(parser, "DEFINE must be a YAML LIST"); }
- | valid_yaml_map { yaml_yyerror(parser, "DEFINE must be a YAML LIST"); }
-
-doc_execute:
- START_LIST { parser_begin_execute(parser); }
- execute_list
- END_LIST
-
- | scalar { yaml_yyerror(parser, "EXECUTE must be a YAML LIST"); }
- | keyword { yaml_yyerror(parser, "EXECUTE must be a YAML LIST"); }
- | valid_yaml_map { yaml_yyerror(parser, "EXECUTE must be a YAML LIST"); }
-
-
-execute_list:
- /* empty */
- | execute_list
- START_MAP
- execute_item
- END_MAP
-
- | valid_yaml_list { yaml_yyerror(parser, "List element found in EXECUTE"); }
- | scalar
- {
- char buffer[128];
- snprintf(buffer, sizeof(buffer), "Scalar value '%s' found in EXECUTE", $1);
- yaml_yyerror(parser, buffer);
- }
- ;
-
-execute_item:
- _RUN_ { parser_add_run(parser); }
- run_map
-
- | _NAME_ { yaml_yyerror(parser, "Invalid EXECUTE Attribute"); } valid_yaml
- | def_keywords { yaml_yyerror(parser, "Invalid EXECUTE Attribute"); } valid_yaml
- | func_keywords { yaml_yyerror(parser, "Invalid EXECUTE Attribute"); } valid_yaml
- | io_keywords { yaml_yyerror(parser, "Invalid EXECUTE Attribute"); } valid_yaml
- | misc_keywords { yaml_yyerror(parser, "Invalid EXECUTE Attribute"); } valid_yaml
- | valid_yaml_list { yaml_yyerror(parser, "YAML LIST element found in EXECUTE"); } valid_yaml
- | valid_yaml_map { yaml_yyerror(parser, "YAML MAPPING element found in EXECUTE"); } valid_yaml
- | scalar
- {
- char buffer[128];
- snprintf(buffer, sizeof(buffer), "'%s' is not a valid EXECUTE element", $1);
- yaml_yyerror(parser, buffer);
- }
- valid_yaml
- ;
-
-define_list:
- /* empty */
- | define_list
- START_MAP
- define_item
- more_define_items /* errors if there actually is something */
- END_MAP
-
- | valid_yaml_list { yaml_yyerror(parser, "List element found in DEFINE"); }
- | scalar
- {
- char buffer[128];
- snprintf(buffer, sizeof(buffer), "Scalar value '%s' found in DEFINE", $1);
- yaml_yyerror(parser, buffer);
- }
- ;
-
-more_define_items:
- /* only good second item is one that doesn't exist */
- | keyword { yaml_yyerror(parser, "Multiple objects in one list element"); } valid_yaml
- more_define_items
- | valid_yaml { yaml_yyerror(parser, "Multiple objects in one list element"); } valid_yaml
- more_define_items
-
-
-define_item:
- _INPUT_ { parser_add_object(parser, MAPRED_INPUT); } input_map
- | _OUTPUT_ { parser_add_object(parser, MAPRED_OUTPUT); } output_map
- | _REDUCE_ { parser_add_object(parser, MAPRED_REDUCER); } reduce_map
- | _MAP_ { parser_add_object(parser, MAPRED_MAPPER); } function_map
- | _TRANSITION_ { parser_add_object(parser, MAPRED_TRANSITION); } function_map
- | _CONSOLIDATE_ { parser_add_object(parser, MAPRED_COMBINER); } function_map
- | _FINALIZE_ { parser_add_object(parser, MAPRED_FINALIZER); } function_map
- | _TASK_ { parser_add_object(parser, MAPRED_TASK); } task_map
-
-
- | _NAME_ { yaml_yyerror(parser, "Invalid DEFINE Attribute"); } valid_yaml
- | exec_keywords { yaml_yyerror(parser, "Invalid DEFINE Attribute"); } valid_yaml
- | func_keywords { yaml_yyerror(parser, "Invalid DEFINE Attribute"); } valid_yaml
- | io_keywords { yaml_yyerror(parser, "Invalid DEFINE Attribute"); } valid_yaml
- | misc_keywords { yaml_yyerror(parser, "Invalid DEFINE Attribute"); } valid_yaml
- | valid_yaml_list { yaml_yyerror(parser, "YAML LIST element found in DEFINE"); } valid_yaml
- | valid_yaml_map { yaml_yyerror(parser, "YAML MAPPING element found in DEFINE"); } valid_yaml
- | scalar
- {
- char buffer[128];
- snprintf(buffer, sizeof(buffer), "'%s' is not a valid DEFINE element", $1);
- yaml_yyerror(parser, buffer);
- } valid_yaml
- ;
-
-input_map:
- START_MAP input_item_map END_MAP
-
- | valid_yaml_list { yaml_yyerror(parser, "INPUT must contain a YAML MAPPING"); }
- | scalar { yaml_yyerror(parser, "INPUT must contain a YAML MAPPING"); }
- ;
-
-output_map:
- START_MAP output_item_map END_MAP
-
- | valid_yaml_list { yaml_yyerror(parser, "OUTPUT must contain a YAML MAPPING"); }
- | scalar { yaml_yyerror(parser, "OUTPUT must contain a YAML MAPPING"); }
- ;
-
-reduce_map:
- START_MAP reduce_item_map END_MAP
-
- | valid_yaml_list { yaml_yyerror(parser, "REDUCE must contain a YAML MAPPING"); }
- | scalar { yaml_yyerror(parser, "REDUCE must contain a YAML MAPPING"); }
- ;
-
-function_map:
- START_MAP function_item_map END_MAP
-
- /* FIXME: error should refer to MAP/TRANSITION/... not FUNCTION */
- | valid_yaml_list { yaml_yyerror(parser, "FUNCTION must contain a YAML MAPPING"); }
- | scalar { yaml_yyerror(parser, "FUNCTION must contain a YAML MAPPING"); }
- ;
-
-task_map:
- START_MAP task_item_map END_MAP
-
- | valid_yaml_list { yaml_yyerror(parser, "TASK must contain a YAML MAPPING"); }
- | scalar { yaml_yyerror(parser, "TASK must contain a YAML MAPPING"); }
- ;
-
-run_map:
- START_MAP run_item_map END_MAP
-
- | valid_yaml_list { yaml_yyerror(parser, "RUN must contain a YAML MAPPING"); }
- | scalar { yaml_yyerror(parser, "RUN must contain a YAML MAPPING"); }
- ;
-
-
-input_item_map: input_item | input_item_map input_item ;
-output_item_map: output_item | output_item_map output_item ;
-reduce_item_map: reduce_item | reduce_item_map reduce_item ;
-function_item_map: function_item | function_item_map function_item ;
-task_item_map: task_item | task_item_map task_item ;
-run_item_map: run_item | run_item_map run_item ;
-
-input_item:
- _NAME_ obj_name
- | _COLUMNS_ { parser_begin_columns(parser); } column_list
- | _FILE_ { parser_begin_files(parser); } file_list
- | _GPFDIST_ { parser_begin_gpfdist(parser); } file_list
- | _TABLE_ io_table
- | _QUERY_ input_query
- | _EXEC_ input_exec
- | _FORMAT_ io_format
- | _DELIMITER_ scalar { parser_set_delimiter(parser, $2); }
- | _NULL_ scalar { parser_set_null(parser, $2); }
- | _QUOTE_ scalar { parser_set_quote(parser, $2); }
- | _ESCAPE_ scalar { parser_set_escape(parser, $2); }
- | _ENCODING_ scalar { parser_set_encoding(parser, $2); }
- | _ERROR_LIMIT_ _INTEGER_ { parser_set_error_limit(parser, $2); }
-
- /* Error recovery */
- | _DELIMITER_ valid_yaml_list { parser_set_delimiter(parser, 0); }
- | _DELIMITER_ valid_yaml_map { parser_set_delimiter(parser, 0); }
- | _NULL_ valid_yaml_list { parser_set_null(parser, 0); }
- | _NULL_ valid_yaml_map { parser_set_null(parser, 0); }
- | _QUOTE_ valid_yaml_list { parser_set_quote(parser, 0); }
- | _QUOTE_ valid_yaml_map { parser_set_quote(parser, 0); }
- | _ESCAPE_ valid_yaml_list { parser_set_escape(parser, 0); }
- | _ESCAPE_ valid_yaml_map { parser_set_escape(parser, 0); }
- | _ENCODING_ valid_yaml_list { parser_set_encoding(parser, 0); }
- | _ENCODING_ valid_yaml_map { parser_set_encoding(parser, 0); }
- | _ERROR_LIMIT_ valid_yaml_list { parser_set_error_limit(parser, 0); }
- | _ERROR_LIMIT_ valid_yaml_map { parser_set_error_limit(parser, 0); }
- | _ERROR_LIMIT_ _STRING_ { parser_set_error_limit(parser, 0); }
- | _ERROR_LIMIT_ _ID_ { parser_set_error_limit(parser, 0); }
- | _ERROR_LIMIT_ _VERSION_STRING_ { parser_set_error_limit(parser, 0); }
- | doc_keywords { yaml_yyerror(parser, "Invalid INPUT Attribute"); } valid_yaml
- | obj_keywords { yaml_yyerror(parser, "Invalid INPUT Attribute"); } valid_yaml
- | func_keywords { yaml_yyerror(parser, "Invalid INPUT Attribute"); } valid_yaml
- | misc_keywords { yaml_yyerror(parser, "Invalid INPUT Attribute"); } valid_yaml
- | valid_yaml_list { yaml_yyerror(parser, "YAML LIST element found in INPUT"); } valid_yaml
- | valid_yaml_map { yaml_yyerror(parser, "YAML MAPPING element found in INPUT"); } valid_yaml
- | scalar
- {
- char buffer[128];
- snprintf(buffer, 128, "%s is not a valid INPUT attribute", $1);
- yaml_yyerror(parser, buffer);
- } valid_yaml
- ;
-
-
-output_item:
- _NAME_ obj_name
- | _TABLE_ io_table
- | _FILE_ scalar { parser_set_file(parser, $2); }
- | _FORMAT_ io_format
- | _DELIMITER_ scalar { parser_set_delimiter(parser, $2); }
- | _MODE_ scalar { parser_set_mode(parser, $2); }
-
- /* Error recovery */
- | _FILE_ valid_yaml_list { parser_set_file(parser, 0); }
- | _FILE_ valid_yaml_map { parser_set_file(parser, 0); }
- | _DELIMITER_ valid_yaml_list { parser_set_delimiter(parser, 0); }
- | _DELIMITER_ valid_yaml_map { parser_set_delimiter(parser, 0); }
- | _MODE_ valid_yaml_list { parser_set_mode(parser, 0); }
- | _MODE_ valid_yaml_map { parser_set_mode(parser, 0); }
- | doc_keywords { yaml_yyerror(parser, "Invalid OUTPUT Attribute"); } valid_yaml
- | obj_keywords { yaml_yyerror(parser, "Invalid OUTPUT Attribute"); } valid_yaml
- | _FUNCTION_ { yaml_yyerror(parser, "Invalid OUTPUT Attribute"); } valid_yaml
- | _LIBRARY_ { yaml_yyerror(parser, "Invalid OUTPUT Attribute"); } valid_yaml
- | _LANGUAGE_ { yaml_yyerror(parser, "Invalid OUTPUT Attribute"); } valid_yaml
- | _OPTIMIZE_ { yaml_yyerror(parser, "Invalid OUTPUT Attribute"); } valid_yaml
- | _PARAMETERS_ { yaml_yyerror(parser, "Invalid OUTPUT Attribute"); } valid_yaml
- | _RETURNS_ { yaml_yyerror(parser, "Invalid OUTPUT Attribute"); } valid_yaml
- | misc_keywords { yaml_yyerror(parser, "Invalid OUTPUT Attribute"); } valid_yaml
- | valid_yaml_list { yaml_yyerror(parser, "YAML LIST element found in OUTPUT"); } valid_yaml
- | valid_yaml_map { yaml_yyerror(parser, "YAML MAPPING element found in OUTPUT"); } valid_yaml
-
- | _STRING_
- {
- char buffer[128];
- snprintf(buffer, 128, "%s is not a valid OUTPUT attribute", $1);
- yaml_yyerror(parser, buffer);
- } valid_yaml
- ;
-
-reduce_item:
- _NAME_ obj_name
- | _TRANSITION_ _ID_ { parser_set_transition(parser, $2); }
- | _CONSOLIDATE_ _ID_ { parser_set_combiner(parser, $2); }
- | _FINALIZE_ _ID_ { parser_set_finalizer(parser, $2); }
- | _INITIALIZE_ scalar { parser_set_initialize(parser, $2); }
- | _KEYS_ { parser_begin_keys(parser); }
- key_list
- | _ORDERING_ { parser_begin_ordering(parser); }
- ordering_list
-
- /* Error recovery */
- | _INPUT_ { yaml_yyerror(parser, "Invalid REDUCE Attribute"); } valid_yaml
- | _OUTPUT_ { yaml_yyerror(parser, "Invalid REDUCE Attribute"); } valid_yaml
- | _MAP_ { yaml_yyerror(parser, "Invalid REDUCE Attribute"); } valid_yaml
- | _REDUCE_ { yaml_yyerror(parser, "Invalid REDUCE Attribute"); } valid_yaml
- | _TASK_ { yaml_yyerror(parser, "Invalid REDUCE Attribute"); } valid_yaml
- | _SOURCE_ { yaml_yyerror(parser, "Invalid REDUCE Attribute"); } valid_yaml
- | _TARGET_ { yaml_yyerror(parser, "Invalid REDUCE Attribute"); } valid_yaml
- | doc_keywords { yaml_yyerror(parser, "Invalid REDUCE Attribute"); } valid_yaml
- | exec_keywords { yaml_yyerror(parser, "Invalid REDUCE Attribute"); } valid_yaml
- | func_keywords { yaml_yyerror(parser, "Invalid REDUCE Attribute"); } valid_yaml
- | io_keywords { yaml_yyerror(parser, "Invalid REDUCE Attribute"); } valid_yaml
- | valid_yaml_list { yaml_yyerror(parser, "YAML LIST element found in REDUCE"); } valid_yaml
- | valid_yaml_map { yaml_yyerror(parser, "YAML MAPPING element found in REDUCE"); } valid_yaml
- | _STRING_
- {
- char buffer[128];
- snprintf(buffer, 128, "%s is not a valid REDUCER attribute", $1);
- yaml_yyerror(parser, buffer);
- } valid_yaml
- ;
-
-function_item:
- _NAME_ obj_name
- | _LANGUAGE_ scalar { parser_set_language(parser, $2); }
- | _FUNCTION_ scalar { parser_set_function(parser, $2); }
- | _LIBRARY_ scalar { parser_set_library(parser, $2); }
- | _MODE_ scalar { parser_set_mode(parser, $2); }
- | _OPTIMIZE_ scalar { parser_set_optimize(parser, $2); }
- | _PARAMETERS_ { parser_begin_parameters(parser); }
- parameter_list
- | _RETURNS_ { parser_begin_returns(parser); }
- return_list
-
- /* Error recovery */
- | _LANGUAGE_ valid_yaml_map { parser_set_language(parser, 0); }
- | _LANGUAGE_ valid_yaml_list { parser_set_language(parser, 0); }
- | _FUNCTION_ valid_yaml_map { parser_set_function(parser, 0); }
- | _FUNCTION_ valid_yaml_list { parser_set_function(parser, 0); }
- | _LIBRARY_ valid_yaml_map { parser_set_library(parser, 0); }
- | _LIBRARY_ valid_yaml_list { parser_set_library(parser, 0); }
- | _MODE_ valid_yaml_map { parser_set_mode(parser, 0); }
- | _MODE_ valid_yaml_list { parser_set_mode(parser, 0); }
- | _OPTIMIZE_ valid_yaml_map { parser_set_optimize(parser, 0); }
- | _OPTIMIZE_ valid_yaml_list { parser_set_optimize(parser, 0); }
- | doc_keywords { yaml_yyerror(parser, "Invalid FUNCTION Attribute"); } valid_yaml
- | obj_keywords { yaml_yyerror(parser, "Invalid FUNCTION Attribute"); } valid_yaml
- | io_keywords { yaml_yyerror(parser, "Invalid FUNCTION Attribute"); } valid_yaml
- | misc_keywords { yaml_yyerror(parser, "Invalid FUNCTION Attribute"); } valid_yaml
- | valid_yaml_list { yaml_yyerror(parser, "YAML LIST element found in FUNCTION"); } valid_yaml
- | valid_yaml_map { yaml_yyerror(parser, "YAML MAPPING element found in FUNCTION"); } valid_yaml
- | _STRING_
- {
- char buffer[128];
- snprintf(buffer, 128, "%s is not a valid FUNCTION attribute", $1);
- yaml_yyerror(parser, buffer);
- } valid_yaml
- ;
-
-task_item:
- _NAME_ obj_name
- | _SOURCE_ _ID_ { parser_set_source(parser, $2); }
- | _MAP_ _ID_ { parser_set_mapper(parser, $2); }
- | _REDUCE_ _ID_ { parser_set_reducer(parser, $2); }
-
- /* Error recovery */
- | doc_keywords { yaml_yyerror(parser, "Invalid TASK Attribute"); } valid_yaml
- | exec_keywords { yaml_yyerror(parser, "Invalid TASK Attribute"); } valid_yaml
- | _TRANSITION_ { yaml_yyerror(parser, "Invalid TASK Attribute"); } valid_yaml
- | _CONSOLIDATE_ { yaml_yyerror(parser, "Invalid TASK Attribute"); } valid_yaml
- | _FINALIZE_ { yaml_yyerror(parser, "Invalid TASK Attribute"); } valid_yaml
- | _INPUT_ { yaml_yyerror(parser, "Invalid TASK Attribute"); } valid_yaml
- | _OUTPUT_ { yaml_yyerror(parser, "Invalid TASK Attribute"); } valid_yaml
- | io_keywords { yaml_yyerror(parser, "Invalid TASK Attribute"); } valid_yaml
- | _INITIALIZE_ { yaml_yyerror(parser, "Invalid TASK Attribute"); } valid_yaml
- | _TARGET_ { yaml_yyerror(parser, "Invalid TASK Attribute"); } valid_yaml
- | valid_yaml_list { yaml_yyerror(parser, "YAML LIST element found in TASK"); } valid_yaml
- | valid_yaml_map { yaml_yyerror(parser, "YAML MAPPING element found in TASK"); } valid_yaml
- | _STRING_
- {
- char buffer[128];
- snprintf(buffer, 128, "%s is not a valid TASK attribute", $1);
- yaml_yyerror(parser, buffer);
- } valid_yaml
- ;
-
-run_item:
- _NAME_ obj_name
- | _SOURCE_ _ID_ { parser_set_source(parser, $2); }
- | _TARGET_ _ID_ { parser_set_target(parser, $2); }
- | _MAP_ _ID_ { parser_set_mapper(parser, $2); }
- | _REDUCE_ _ID_ { parser_set_reducer(parser, $2); }
-
- /* Error recovery */
- | _INITIALIZE_ { yaml_yyerror(parser, "Invalid RUN Attribute"); } valid_yaml
- | doc_keywords { yaml_yyerror(parser, "Invalid RUN Attribute"); } valid_yaml
- | exec_keywords { yaml_yyerror(parser, "Invalid RUN Attribute"); } valid_yaml
- | _TRANSITION_ { yaml_yyerror(parser, "Invalid RUN Attribute"); } valid_yaml
- | _CONSOLIDATE_ { yaml_yyerror(parser, "Invalid RUN Attribute"); } valid_yaml
- | _FINALIZE_ { yaml_yyerror(parser, "Invalid RUN Attribute"); } valid_yaml
- | _INPUT_ { yaml_yyerror(parser, "Invalid RUN Attribute"); } valid_yaml
- | _OUTPUT_ { yaml_yyerror(parser, "Invalid RUN Attribute"); } valid_yaml
- | io_keywords { yaml_yyerror(parser, "Invalid RUN Attribute"); } valid_yaml
- | valid_yaml_list { yaml_yyerror(parser, "YAML LIST element found in RUN"); } valid_yaml
- | valid_yaml_map { yaml_yyerror(parser, "YAML MAPPING element found in RUN"); } valid_yaml
- | _STRING_
- {
- char buffer[128];
- snprintf(buffer, 128, "%s is not a valid RUN attribute", $1);
- yaml_yyerror(parser, buffer);
- } valid_yaml
- ;
-
-
-/* rules primarily created for error handling */
-obj_name:
- _ID_ { parser_set_name(parser, $1); }
-
- /* Error recovery */
- | { parser_set_name(parser, 0); } _STRING_
- | { parser_set_name(parser, 0); } _INTEGER_
- | { parser_set_name(parser, 0); } _VERSION_STRING_
- | { parser_set_name(parser, 0); } valid_yaml_list
- | { parser_set_name(parser, 0); } valid_yaml_map
- | {
- parser_set_name(parser, 0);
- YYERROR; /* Can't recover from unknown error */
- } error
- ;
-
-io_table:
- _ID_ { parser_set_table(parser, $1); }
-
- /* Error recovery */
- | { parser_set_table(parser, 0); } valid_yaml_list
- | { parser_set_table(parser, 0); } valid_yaml_map
- | { parser_set_table(parser, 0); } _STRING_
- | { parser_set_table(parser, 0); } _INTEGER_
- | { parser_set_table(parser, 0); } _VERSION_STRING_
- | {
- parser_set_table(parser, 0);
- YYERROR; /* Can't recover from unknown error */
- } error
- ;
-
-input_query:
- scalar { parser_set_query(parser, $1); }
-
- /* Error recovery */
- | valid_yaml_list { parser_set_query(parser, 0); }
- | valid_yaml_map { parser_set_query(parser, 0); }
- | error
- {
- parser_set_query(parser, 0);
- YYERROR; /* Can't recover from unknown error */
- }
- ;
-
-input_exec:
- scalar { parser_set_exec(parser, $1); }
-
- /* Error recovery */
- | valid_yaml_list { parser_set_exec(parser, 0); }
- | valid_yaml_map { parser_set_exec(parser, 0); }
- | error
- {
- parser_set_exec(parser, 0);
- YYERROR; /* Can't recover from unknown error */
- }
- ;
-
-io_format:
- scalar { parser_set_format(parser, $1); }
-
- /* Error recovery */
- | valid_yaml_list { parser_set_format(parser, 0); }
- | valid_yaml_map { parser_set_format(parser, 0); }
- | error
- {
- parser_set_format(parser, 0);
- YYERROR; /* Can't recover from unknown error */
- }
- ;
-
-
-
-/* Could probably make these more generic */
-file_list:
- scalar { parser_add_file(parser, $1); }
- | START_LIST file_list_2 END_LIST
-
- /* Error recovery */
- | valid_yaml_map { parser_add_file(parser, 0); }
- ;
-
-file_list_2:
- /* empty */
- | file_list_2 scalar { parser_add_file(parser, $2); }
-
- /* Error recovery */
- | valid_yaml_map { parser_add_file(parser, 0); }
- | valid_yaml_list { parser_add_file(parser, 0); }
- ;
-
-column_list:
- scalar { parser_add_column(parser, $1); }
- | START_LIST column_list_2 END_LIST
-
- /* Error recovery */
- | valid_yaml_map { parser_add_column(parser, 0); }
- ;
-
-column_list_2:
- /* empty */
- | column_list_2 scalar { parser_add_column(parser, $2); }
-
- /* Error recovery */
- | valid_yaml_map { parser_add_column(parser, 0); }
- | valid_yaml_list { parser_add_column(parser, 0); }
- ;
-
-parameter_list:
- scalar { parser_add_parameter(parser, $1); }
- | START_LIST parameter_list_2 END_LIST
-
- /* Error recovery */
- | valid_yaml_map { parser_add_parameter(parser, 0); }
- ;
-
-
-parameter_list_2:
- /* empty */
- | parameter_list_2 scalar { parser_add_parameter(parser, $2); }
-
- /* Error recovery */
- | valid_yaml_map { parser_add_parameter(parser, 0); }
- | valid_yaml_list { parser_add_parameter(parser, 0); }
- ;
-
-
-return_list:
- scalar { parser_add_return(parser, $1); }
- | START_LIST return_list_2 END_LIST
-
- | valid_yaml_map { parser_add_return(parser, 0); }
- ;
-
-return_list_2:
- /* empty */
- | return_list_2 scalar { parser_add_return(parser, $2); }
-
- /* Error recovery */
- | valid_yaml_map { parser_add_return(parser, 0); }
- | valid_yaml_list { parser_add_return(parser, 0); }
- ;
-
-key_list:
- scalar { parser_add_key(parser, $1); }
- | START_LIST key_list_2 END_LIST
-
- /* Error recovery */
- | valid_yaml_map { parser_add_key(parser, 0); }
- ;
-
-key_list_2:
- /* empty */
- | key_list_2 scalar { parser_add_key(parser, $2); }
-
- /* Error recovery */
- | valid_yaml_map { parser_add_key(parser, 0); }
- | valid_yaml_list { parser_add_key(parser, 0); }
- ;
-
-
-ordering_list:
- scalar { parser_add_ordering(parser, $1); }
- | START_LIST ordering_list_2 END_LIST
-
- /* Error recovery */
- | valid_yaml_map { parser_add_ordering(parser, 0); }
- ;
-
-ordering_list_2:
- /* empty */
- | ordering_list_2 scalar { parser_add_ordering(parser, $2); }
-
- /* Error recovery */
- | valid_yaml_map { parser_add_ordering(parser, 0); }
- | valid_yaml_list { parser_add_ordering(parser, 0); }
- ;
-
-
-
-
-scalar:
- _STRING_
- | _INTEGER_
- | _ID_
- | _VERSION_STRING_
- ;
-
-
-/* These are used to handle error recovery */
-valid_yaml:
- scalar
- | valid_yaml_list
- | valid_yaml_map
- ;
-valid_yaml_list:
- START_LIST valid_yaml_list_item END_LIST
- ;
-valid_yaml_map:
- START_MAP valid_yaml_map_item END_MAP
- ;
-valid_yaml_list_item:
- | valid_yaml_list_item valid_yaml
- ;
-
-valid_yaml_map_item:
- | valid_yaml_map_item valid_yaml valid_yaml
- | valid_yaml_map_item keyword valid_yaml
- ;
-
-
-%%
-
-static const char EMPTY_STRING[1] = "";
-
-
-/* Called by yyparse on error. */
-void yaml_yyerror (mapred_parser_t *parser, char const *s)
-{
- if (parser->current_doc)
- {
- if (global_verbose_flag)
- fprintf(stderr, " - ");
- parser->current_doc->u.document.flags |=
- mapred_document_error;
- }
- else
- {
- if (global_verbose_flag)
- fprintf(stderr, " - ");
- }
-
- if (parser->yparser->error != YAML_NO_ERROR)
- {
- fprintf(stderr, "Error: YAML syntax error - %s %s, at line %d\n",
- NULL != parser->yparser->problem? parser->yparser->problem: EMPTY_STRING,
- NULL != parser->yparser->context? parser->yparser->context: EMPTY_STRING,
- (int) parser->yparser->problem_mark.line+1);
- }
- else
- {
- fprintf(stderr, "Error: %s, at line %d\n", s,
- (int) parser->event.start_mark.line+1);
- }
-}
-
-#if 0
-#define DEBUG_TOKEN(x) printf("%s\n", x)
-#else
-#define DEBUG_TOKEN(x) do { } while (0)
-#endif
-#if 0
-#define DEBUG_YYTOKEN(x) printf(" YYTOKEN=%d\n", x)
-#else
-#define DEBUG_YYTOKEN(x) do { } while (0)
-#endif
-
-
-/*
- * int yamllex(lval, parser)
- *
- * We use a crazy mix of the yaml parse library and flex/bison to build
- * our parser.
- *
- * The YAML library handles all of the whitespace and flow parsing very
- * cleanly, but it has no domain knowledge of the Greenplum Mapreduce
- * YAML Schema.
- *
- * Coding up the YAML flow stuff in flex/bison is a pain
- *
- * So... we put the parse first through the YAML library parser and use
- * that as our first pass tokenizer to handle all of the YAML document flow
- * proccessing.
- *
- * If it was just that simple then we could feed these tokens into bison and
- * be done, but we also want additional lexical analysis on the scalar values
- * so we feed them back into a flex tokenizer.
- *
- */
-int
-yaml_yylex (YYSTYPE *lvalp, mapred_parser_t *parser)
-{
- if (parser->state == STATE_DONE)
- return 0;
-
- if (parser->state == STATE_SCALAR_LEX)
- {
- int token;
-#if USE_FLEX_REENTRANT
- token = yaml_scalar_yylex(parser->yscanner, parser);
-#else
- token = yaml_scalar_yylex(parser);
-#endif
- if (token)
- {
- DEBUG_YYTOKEN(token);
- return token;
- }
- else
- {
-#if USE_FLEX_REENTRANT
- yaml_scalar_yy_delete_buffer(parser->yscan_buffer, parser->yscanner);
- parser->yscan_buffer = NULL;
-#endif
- parser->state = STATE_YAML_PARSE;
- }
- }
-
- if (parser->state != STATE_YAML_PARSE)
- {
- parser->state = STATE_DONE;
- return ERROR;
- }
-
- if (!yaml_parser_parse(parser->yparser, &parser->event))
- {
- parser->state = STATE_DONE;
- return ERROR;
- }
-
-#if 0
- if (parser->frame < 0)
- printf("no frame\n");
- else if (parser->frame >= MAX_CONTEXT_DEPTH)
- printf("bad frame\n");
- else
- {
- switch (parser->context[parser->frame])
- {
- case CONTEXT_NONE:
- printf("FRAME [NONE]\n");
- break;
-
- case CONTEXT_HASH_KEY:
- printf("FRAME [HASH_KEY]\n");
- break;
-
- case CONTEXT_HASH_VALUE:
- printf("FRAME [HASH_VALUE]\n");
- break;
-
- case CONTEXT_LIST:
- printf("FRAME [LIST]\n");
- break;
-
- default:
- printf("FRAME [BAD FRAME]\n");
- break;
- }
- }
-#endif
-
- while (1) {
- switch (parser->event.type) {
-
- case YAML_NO_EVENT:
- parser->state = STATE_DONE;
- return 0;
-
- case YAML_STREAM_START_EVENT:
- DEBUG_TOKEN("YAML_STREAM_START");
- return START_STREAM;
-
- case YAML_DOCUMENT_START_EVENT:
- DEBUG_TOKEN("YAML_DOCUMENT_START");
- return START_DOCUMENT;
-
- case YAML_MAPPING_START_EVENT:
- if (++parser->frame >= MAX_CONTEXT_DEPTH) {
- fprintf(stderr, "Maximum context depth exceded");
- parser->state = STATE_DONE;
- return END_STREAM;
- }
- parser->context[parser->frame] = CONTEXT_HASH_KEY;
-
- DEBUG_TOKEN("YAML_MAPPING_START");
- return START_MAP;
-
- case YAML_SEQUENCE_START_EVENT:
- if (++parser->frame >= MAX_CONTEXT_DEPTH) {
- printf("Maximum context depth exceded");
- parser->state = STATE_DONE;
- return END_STREAM;
- }
- parser->context[parser->frame] = CONTEXT_LIST;
-
- DEBUG_TOKEN("YAML_SEQUENCE_START");
- return START_LIST;
-
- case YAML_SCALAR_EVENT:
- {
- int token;
- char *value = (char*) parser->event.data.scalar.value;
-
- DEBUG_TOKEN("SCALAR:");
- lvalp->token = value;
-
- /* Switch to the scalar scanner and continue */
- if (value[0] == '\0')
- token = _STRING_;
- else
- {
- parser->state = STATE_SCALAR_LEX;
-
-#if USE_FLEX_REENTRANT
- parser->yscan_buffer =
- yaml_scalar_yy_scan_string(value, parser->yscanner);
-#else
- yaml_scalar_yy_scan_string(value);
-#endif
- token = yaml_yylex(lvalp, parser);
- }
-
- /*
- * If we are in a hash context then we switch between the
- * key states and the value states.
- */
- if (parser->frame >= 0 && parser->frame < MAX_CONTEXT_DEPTH)
- {
- switch (parser->context[parser->frame])
- {
- case CONTEXT_HASH_KEY:
- parser->context[parser->frame] = CONTEXT_HASH_VALUE;
- break;
-
- case CONTEXT_HASH_VALUE:
- parser->context[parser->frame] = CONTEXT_HASH_KEY;
- break;
-
- default:
- break;
- }
- }
- return token;
- }
-
- case YAML_SEQUENCE_END_EVENT:
- if (--parser->frame < -1)
- parser->frame = -1;
-
- /*
- * If the sequence was the value pair of a hash then the next
- * scalar will be a key
- */
- if (parser->frame >= 0 && parser->frame < MAX_CONTEXT_DEPTH &&
- parser->context[parser->frame] == CONTEXT_HASH_VALUE)
- parser->context[parser->frame] = CONTEXT_HASH_KEY;
-
- DEBUG_TOKEN("YAML_SEQUENCE_END");
- return END_LIST;
-
- case YAML_MAPPING_END_EVENT:
- {
- /* Pop the parser frame stack */
- if (--parser->frame < -1)
- parser->frame = -1;
-
- /*
- * If the mapping was the value pair of a hash then the next
- * scalar will be a key
- */
- if (parser->frame >= 0 && parser->frame < MAX_CONTEXT_DEPTH &&
- parser->context[parser->frame] == CONTEXT_HASH_VALUE)
- parser->context[parser->frame] = CONTEXT_HASH_KEY;
-
- DEBUG_TOKEN("YAML_MAPPING_END");
- return END_MAP;
- }
-
- case YAML_DOCUMENT_END_EVENT:
- DEBUG_TOKEN("YAML_DOCUMENT_END");
- return END_DOCUMENT;
-
- case YAML_STREAM_END_EVENT:
- DEBUG_TOKEN("YAML_STREAM_END");
- return END_STREAM;
-
- default:
- printf("WARNING: Unknown event %d\n", parser->event.type);
- }
-
- yaml_event_delete(&parser->event);
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/4e392375/src/bin/gpmapreduce/src/yaml_private.h
----------------------------------------------------------------------
diff --git a/src/bin/gpmapreduce/src/yaml_private.h b/src/bin/gpmapreduce/src/yaml_private.h
deleted file mode 100644
index 6320caf..0000000
--- a/src/bin/gpmapreduce/src/yaml_private.h
+++ /dev/null
@@ -1,635 +0,0 @@
-
-#if HAVE_CONFIG_H
-#include <config.h>
-#endif
-
-#include <yaml.h>
-
-#include <assert.h>
-#include <limits.h>
-
-/*
- * Memory management.
- */
-
-YAML_DECLARE(void *)
-yaml_malloc(size_t size);
-
-YAML_DECLARE(void *)
-yaml_realloc(void *ptr, size_t size);
-
-YAML_DECLARE(void)
-yaml_free(void *ptr);
-
-YAML_DECLARE(yaml_char_t *)
-yaml_strdup(const yaml_char_t *);
-
-/*
- * Reader: Ensure that the buffer contains at least `length` characters.
- */
-
-YAML_DECLARE(int)
-yaml_parser_update_buffer(yaml_parser_t *parser, size_t length);
-
-/*
- * Scanner: Ensure that the token stack contains at least one token ready.
- */
-
-YAML_DECLARE(int)
-yaml_parser_fetch_more_tokens(yaml_parser_t *parser);
-
-/*
- * The size of the input raw buffer.
- */
-
-#define INPUT_RAW_BUFFER_SIZE 16384
-
-/*
- * The size of the input buffer.
- *
- * It should be possible to decode the whole raw buffer.
- */
-
-#define INPUT_BUFFER_SIZE (INPUT_RAW_BUFFER_SIZE*3)
-
-/*
- * The size of the output buffer.
- */
-
-#define OUTPUT_BUFFER_SIZE 16384
-
-/*
- * The size of the output raw buffer.
- *
- * It should be possible to encode the whole output buffer.
- */
-
-#define OUTPUT_RAW_BUFFER_SIZE (OUTPUT_BUFFER_SIZE*2+2)
-
-/*
- * The size of other stacks and queues.
- */
-
-#define INITIAL_STACK_SIZE 16
-#define INITIAL_QUEUE_SIZE 16
-#define INITIAL_STRING_SIZE 16
-
-/*
- * Buffer management.
- */
-
-#define BUFFER_INIT(context,buffer,size) \
- (((buffer).start = yaml_malloc(size)) ? \
- ((buffer).last = (buffer).pointer = (buffer).start, \
- (buffer).end = (buffer).start+(size), \
- 1) : \
- ((context)->error = YAML_MEMORY_ERROR, \
- 0))
-
-#define BUFFER_DEL(context,buffer) \
- (yaml_free((buffer).start), \
- (buffer).start = (buffer).pointer = (buffer).end = 0)
-
-/*
- * String management.
- */
-
-typedef struct {
- yaml_char_t *start;
- yaml_char_t *end;
- yaml_char_t *pointer;
-} yaml_string_t;
-
-YAML_DECLARE(int)
-yaml_string_extend(yaml_char_t **start,
- yaml_char_t **pointer, yaml_char_t **end);
-
-YAML_DECLARE(int)
-yaml_string_join(
- yaml_char_t **a_start, yaml_char_t **a_pointer, yaml_char_t **a_end,
- yaml_char_t **b_start, yaml_char_t **b_pointer, yaml_char_t **b_end);
-
-#define NULL_STRING { NULL, NULL, NULL }
-
-#define STRING(string,length) { (string), (string)+(length), (string) }
-
-#define STRING_INIT(context,string,size) \
- (((string).start = yaml_malloc(size)) ? \
- ((string).pointer = (string).start, \
- (string).end = (string).start+(size), \
- memset((string).start, 0, (size)), \
- 1) : \
- ((context)->error = YAML_MEMORY_ERROR, \
- 0))
-
-#define STRING_DEL(context,string) \
- (yaml_free((string).start), \
- (string).start = (string).pointer = (string).end = 0)
-
-#define STRING_EXTEND(context,string) \
- (((string).pointer+5 < (string).end) \
- || yaml_string_extend(&(string).start, \
- &(string).pointer, &(string).end))
-
-#define CLEAR(context,string) \
- ((string).pointer = (string).start, \
- memset((string).start, 0, (string).end-(string).start))
-
-#define JOIN(context,string_a,string_b) \
- ((yaml_string_join(&(string_a).start, &(string_a).pointer, \
- &(string_a).end, &(string_b).start, \
- &(string_b).pointer, &(string_b).end)) ? \
- ((string_b).pointer = (string_b).start, \
- 1) : \
- ((context)->error = YAML_MEMORY_ERROR, \
- 0))
-
-/*
- * String check operations.
- */
-
-/*
- * Check the octet at the specified position.
- */
-
-#define CHECK_AT(string,octet,offset) \
- ((string).pointer[offset] == (yaml_char_t)(octet))
-
-/*
- * Check the current octet in the buffer.
- */
-
-#define CHECK(string,octet) CHECK_AT((string),(octet),0)
-
-/*
- * Check if the character at the specified position is an alphabetical
- * character, a digit, '_', or '-'.
- */
-
-#define IS_ALPHA_AT(string,offset) \
- (((string).pointer[offset] >= (yaml_char_t) '0' && \
- (string).pointer[offset] <= (yaml_char_t) '9') || \
- ((string).pointer[offset] >= (yaml_char_t) 'A' && \
- (string).pointer[offset] <= (yaml_char_t) 'Z') || \
- ((string).pointer[offset] >= (yaml_char_t) 'a' && \
- (string).pointer[offset] <= (yaml_char_t) 'z') || \
- (string).pointer[offset] == '_' || \
- (string).pointer[offset] == '-')
-
-#define IS_ALPHA(string) IS_ALPHA_AT((string),0)
-
-/*
- * Check if the character at the specified position is a digit.
- */
-
-#define IS_DIGIT_AT(string,offset) \
- (((string).pointer[offset] >= (yaml_char_t) '0' && \
- (string).pointer[offset] <= (yaml_char_t) '9'))
-
-#define IS_DIGIT(string) IS_DIGIT_AT((string),0)
-
-/*
- * Get the value of a digit.
- */
-
-#define AS_DIGIT_AT(string,offset) \
- ((string).pointer[offset] - (yaml_char_t) '0')
-
-#define AS_DIGIT(string) AS_DIGIT_AT((string),0)
-
-/*
- * Check if the character at the specified position is a hex-digit.
- */
-
-#define IS_HEX_AT(string,offset) \
- (((string).pointer[offset] >= (yaml_char_t) '0' && \
- (string).pointer[offset] <= (yaml_char_t) '9') || \
- ((string).pointer[offset] >= (yaml_char_t) 'A' && \
- (string).pointer[offset] <= (yaml_char_t) 'F') || \
- ((string).pointer[offset] >= (yaml_char_t) 'a' && \
- (string).pointer[offset] <= (yaml_char_t) 'f'))
-
-#define IS_HEX(string) IS_HEX_AT((string),0)
-
-/*
- * Get the value of a hex-digit.
- */
-
-#define AS_HEX_AT(string,offset) \
- (((string).pointer[offset] >= (yaml_char_t) 'A' && \
- (string).pointer[offset] <= (yaml_char_t) 'F') ? \
- ((string).pointer[offset] - (yaml_char_t) 'A' + 10) : \
- ((string).pointer[offset] >= (yaml_char_t) 'a' && \
- (string).pointer[offset] <= (yaml_char_t) 'f') ? \
- ((string).pointer[offset] - (yaml_char_t) 'a' + 10) : \
- ((string).pointer[offset] - (yaml_char_t) '0'))
-
-#define AS_HEX(string) AS_HEX_AT((string),0)
-
-/*
- * Check if the character is ASCII.
- */
-
-#define IS_ASCII_AT(string,offset) \
- ((string).pointer[offset] <= (yaml_char_t) '\x7F')
-
-#define IS_ASCII(string) IS_ASCII_AT((string),0)
-
-/*
- * Check if the character can be printed unescaped.
- */
-
-#define IS_PRINTABLE_AT(string,offset) \
- (((string).pointer[offset] == 0x0A) /* . == #x0A */ \
- || ((string).pointer[offset] >= 0x20 /* #x20 <= . <= #x7E */ \
- && (string).pointer[offset] <= 0x7E) \
- || ((string).pointer[offset] == 0xC2 /* #0xA0 <= . <= #xD7FF */ \
- && (string).pointer[offset+1] >= 0xA0) \
- || ((string).pointer[offset] > 0xC2 \
- && (string).pointer[offset] < 0xED) \
- || ((string).pointer[offset] == 0xED \
- && (string).pointer[offset+1] < 0xA0) \
- || ((string).pointer[offset] == 0xEE) \
- || ((string).pointer[offset] == 0xEF /* #xE000 <= . <= #xFFFD */ \
- && !((string).pointer[offset+1] == 0xBB /* && . != #xFEFF */ \
- && (string).pointer[offset+2] == 0xBF) \
- && !((string).pointer[offset+1] == 0xBF \
- && ((string).pointer[offset+2] == 0xBE \
- || (string).pointer[offset+2] == 0xBF))))
-
-#define IS_PRINTABLE(string) IS_PRINTABLE_AT((string),0)
-
-/*
- * Check if the character at the specified position is NUL.
- */
-
-#define IS_Z_AT(string,offset) CHECK_AT((string),'\0',(offset))
-
-#define IS_Z(string) IS_Z_AT((string),0)
-
-/*
- * Check if the character at the specified position is BOM.
- */
-
-#define IS_BOM_AT(string,offset) \
- (CHECK_AT((string),'\xEF',(offset)) \
- && CHECK_AT((string),'\xBB',(offset)+1) \
- && CHECK_AT((string),'\xBF',(offset)+2)) /* BOM (#xFEFF) */
-
-#define IS_BOM(string) IS_BOM_AT(string,0)
-
-/*
- * Check if the character at the specified position is space.
- */
-
-#define IS_SPACE_AT(string,offset) CHECK_AT((string),' ',(offset))
-
-#define IS_SPACE(string) IS_SPACE_AT((string),0)
-
-/*
- * Check if the character at the specified position is tab.
- */
-
-#define IS_TAB_AT(string,offset) CHECK_AT((string),'\t',(offset))
-
-#define IS_TAB(string) IS_TAB_AT((string),0)
-
-/*
- * Check if the character at the specified position is blank (space or tab).
- */
-
-#define IS_BLANK_AT(string,offset) \
- (IS_SPACE_AT((string),(offset)) || IS_TAB_AT((string),(offset)))
-
-#define IS_BLANK(string) IS_BLANK_AT((string),0)
-
-/*
- * Check if the character at the specified position is a line break.
- */
-
-#define IS_BREAK_AT(string,offset) \
- (CHECK_AT((string),'\r',(offset)) /* CR (#xD)*/ \
- || CHECK_AT((string),'\n',(offset)) /* LF (#xA) */ \
- || (CHECK_AT((string),'\xC2',(offset)) \
- && CHECK_AT((string),'\x85',(offset)+1)) /* NEL (#x85) */ \
- || (CHECK_AT((string),'\xE2',(offset)) \
- && CHECK_AT((string),'\x80',(offset)+1) \
- && CHECK_AT((string),'\xA8',(offset)+2)) /* LS (#x2028) */ \
- || (CHECK_AT((string),'\xE2',(offset)) \
- && CHECK_AT((string),'\x80',(offset)+1) \
- && CHECK_AT((string),'\xA9',(offset)+2))) /* PS (#x2029) */
-
-#define IS_BREAK(string) IS_BREAK_AT((string),0)
-
-#define IS_CRLF_AT(string,offset) \
- (CHECK_AT((string),'\r',(offset)) && CHECK_AT((string),'\n',(offset)+1))
-
-#define IS_CRLF(string) IS_CRLF_AT((string),0)
-
-/*
- * Check if the character is a line break or NUL.
- */
-
-#define IS_BREAKZ_AT(string,offset) \
- (IS_BREAK_AT((string),(offset)) || IS_Z_AT((string),(offset)))
-
-#define IS_BREAKZ(string) IS_BREAKZ_AT((string),0)
-
-/*
- * Check if the character is a line break, space, or NUL.
- */
-
-#define IS_SPACEZ_AT(string,offset) \
- (IS_SPACE_AT((string),(offset)) || IS_BREAKZ_AT((string),(offset)))
-
-#define IS_SPACEZ(string) IS_SPACEZ_AT((string),0)
-
-/*
- * Check if the character is a line break, space, tab, or NUL.
- */
-
-#define IS_BLANKZ_AT(string,offset) \
- (IS_BLANK_AT((string),(offset)) || IS_BREAKZ_AT((string),(offset)))
-
-#define IS_BLANKZ(string) IS_BLANKZ_AT((string),0)
-
-/*
- * Determine the width of the character.
- */
-
-#define WIDTH_AT(string,offset) \
- (((string).pointer[offset] & 0x80) == 0x00 ? 1 : \
- ((string).pointer[offset] & 0xE0) == 0xC0 ? 2 : \
- ((string).pointer[offset] & 0xF0) == 0xE0 ? 3 : \
- ((string).pointer[offset] & 0xF8) == 0xF0 ? 4 : 0)
-
-#define WIDTH(string) WIDTH_AT((string),0)
-
-/*
- * Move the string pointer to the next character.
- */
-
-#define MOVE(string) ((string).pointer += WIDTH((string)))
-
-/*
- * Copy a character and move the pointers of both strings.
- */
-
-#define COPY(string_a,string_b) \
- ((*(string_b).pointer & 0x80) == 0x00 ? \
- (*((string_a).pointer++) = *((string_b).pointer++)) : \
- (*(string_b).pointer & 0xE0) == 0xC0 ? \
- (*((string_a).pointer++) = *((string_b).pointer++), \
- *((string_a).pointer++) = *((string_b).pointer++)) : \
- (*(string_b).pointer & 0xF0) == 0xE0 ? \
- (*((string_a).pointer++) = *((string_b).pointer++), \
- *((string_a).pointer++) = *((string_b).pointer++), \
- *((string_a).pointer++) = *((string_b).pointer++)) : \
- (*(string_b).pointer & 0xF8) == 0xF0 ? \
- (*((string_a).pointer++) = *((string_b).pointer++), \
- *((string_a).pointer++) = *((string_b).pointer++), \
- *((string_a).pointer++) = *((string_b).pointer++), \
- *((string_a).pointer++) = *((string_b).pointer++)) : 0)
-
-/*
- * Stack and queue management.
- */
-
-YAML_DECLARE(int)
-yaml_stack_extend(void **start, void **top, void **end);
-
-YAML_DECLARE(int)
-yaml_queue_extend(void **start, void **head, void **tail, void **end);
-
-#define STACK_INIT(context,stack,size) \
- (((stack).start = yaml_malloc((size)*sizeof(*(stack).start))) ? \
- ((stack).top = (stack).start, \
- (stack).end = (stack).start+(size), \
- 1) : \
- ((context)->error = YAML_MEMORY_ERROR, \
- 0))
-
-#define STACK_DEL(context,stack) \
- (yaml_free((stack).start), \
- (stack).start = (stack).top = (stack).end = 0)
-
-#define STACK_EMPTY(context,stack) \
- ((stack).start == (stack).top)
-
-#define PUSH(context,stack,value) \
- (((stack).top != (stack).end \
- || yaml_stack_extend((void **)&(stack).start, \
- (void **)&(stack).top, (void **)&(stack).end)) ? \
- (*((stack).top++) = value, \
- 1) : \
- ((context)->error = YAML_MEMORY_ERROR, \
- 0))
-
-#define POP(context,stack) \
- (*(--(stack).top))
-
-#define QUEUE_INIT(context,queue,size) \
- (((queue).start = yaml_malloc((size)*sizeof(*(queue).start))) ? \
- ((queue).head = (queue).tail = (queue).start, \
- (queue).end = (queue).start+(size), \
- 1) : \
- ((context)->error = YAML_MEMORY_ERROR, \
- 0))
-
-#define QUEUE_DEL(context,queue) \
- (yaml_free((queue).start), \
- (queue).start = (queue).head = (queue).tail = (queue).end = 0)
-
-#define QUEUE_EMPTY(context,queue) \
- ((queue).head == (queue).tail)
-
-#define ENQUEUE(context,queue,value) \
- (((queue).tail != (queue).end \
- || yaml_queue_extend((void **)&(queue).start, (void **)&(queue).head, \
- (void **)&(queue).tail, (void **)&(queue).end)) ? \
- (*((queue).tail++) = value, \
- 1) : \
- ((context)->error = YAML_MEMORY_ERROR, \
- 0))
-
-#define DEQUEUE(context,queue) \
- (*((queue).head++))
-
-#define QUEUE_INSERT(context,queue,index,value) \
- (((queue).tail != (queue).end \
- || yaml_queue_extend((void **)&(queue).start, (void **)&(queue).head, \
- (void **)&(queue).tail, (void **)&(queue).end)) ? \
- (memmove((queue).head+(index)+1,(queue).head+(index), \
- ((queue).tail-(queue).head-(index))*sizeof(*(queue).start)), \
- *((queue).head+(index)) = value, \
- (queue).tail++, \
- 1) : \
- ((context)->error = YAML_MEMORY_ERROR, \
- 0))
-
-/*
- * Token initializers.
- */
-
-#define TOKEN_INIT(token,token_type,token_start_mark,token_end_mark) \
- (memset(&(token), 0, sizeof(yaml_token_t)), \
- (token).type = (token_type), \
- (token).start_mark = (token_start_mark), \
- (token).end_mark = (token_end_mark))
-
-#define STREAM_START_TOKEN_INIT(token,token_encoding,start_mark,end_mark) \
- (TOKEN_INIT((token),YAML_STREAM_START_TOKEN,(start_mark),(end_mark)), \
- (token).data.stream_start.encoding = (token_encoding))
-
-#define STREAM_END_TOKEN_INIT(token,start_mark,end_mark) \
- (TOKEN_INIT((token),YAML_STREAM_END_TOKEN,(start_mark),(end_mark)))
-
-#define ALIAS_TOKEN_INIT(token,token_value,start_mark,end_mark) \
- (TOKEN_INIT((token),YAML_ALIAS_TOKEN,(start_mark),(end_mark)), \
- (token).data.alias.value = (token_value))
-
-#define ANCHOR_TOKEN_INIT(token,token_value,start_mark,end_mark) \
- (TOKEN_INIT((token),YAML_ANCHOR_TOKEN,(start_mark),(end_mark)), \
- (token).data.anchor.value = (token_value))
-
-#define TAG_TOKEN_INIT(token,token_handle,token_suffix,start_mark,end_mark) \
- (TOKEN_INIT((token),YAML_TAG_TOKEN,(start_mark),(end_mark)), \
- (token).data.tag.handle = (token_handle), \
- (token).data.tag.suffix = (token_suffix))
-
-#define SCALAR_TOKEN_INIT(token,token_value,token_length,token_style,start_mark,end_mark) \
- (TOKEN_INIT((token),YAML_SCALAR_TOKEN,(start_mark),(end_mark)), \
- (token).data.scalar.value = (token_value), \
- (token).data.scalar.length = (token_length), \
- (token).data.scalar.style = (token_style))
-
-#define VERSION_DIRECTIVE_TOKEN_INIT(token,token_major,token_minor,start_mark,end_mark) \
- (TOKEN_INIT((token),YAML_VERSION_DIRECTIVE_TOKEN,(start_mark),(end_mark)), \
- (token).data.version_directive.major = (token_major), \
- (token).data.version_directive.minor = (token_minor))
-
-#define TAG_DIRECTIVE_TOKEN_INIT(token,token_handle,token_prefix,start_mark,end_mark) \
- (TOKEN_INIT((token),YAML_TAG_DIRECTIVE_TOKEN,(start_mark),(end_mark)), \
- (token).data.tag_directive.handle = (token_handle), \
- (token).data.tag_directive.prefix = (token_prefix))
-
-/*
- * Event initializers.
- */
-
-#define EVENT_INIT(event,event_type,event_start_mark,event_end_mark) \
- (memset(&(event), 0, sizeof(yaml_event_t)), \
- (event).type = (event_type), \
- (event).start_mark = (event_start_mark), \
- (event).end_mark = (event_end_mark))
-
-#define STREAM_START_EVENT_INIT(event,event_encoding,start_mark,end_mark) \
- (EVENT_INIT((event),YAML_STREAM_START_EVENT,(start_mark),(end_mark)), \
- (event).data.stream_start.encoding = (event_encoding))
-
-#define STREAM_END_EVENT_INIT(event,start_mark,end_mark) \
- (EVENT_INIT((event),YAML_STREAM_END_EVENT,(start_mark),(end_mark)))
-
-#define DOCUMENT_START_EVENT_INIT(event,event_version_directive, \
- event_tag_directives_start,event_tag_directives_end,event_implicit,start_mark,end_mark) \
- (EVENT_INIT((event),YAML_DOCUMENT_START_EVENT,(start_mark),(end_mark)), \
- (event).data.document_start.version_directive = (event_version_directive), \
- (event).data.document_start.tag_directives.start = (event_tag_directives_start), \
- (event).data.document_start.tag_directives.end = (event_tag_directives_end), \
- (event).data.document_start.implicit = (event_implicit))
-
-#define DOCUMENT_END_EVENT_INIT(event,event_implicit,start_mark,end_mark) \
- (EVENT_INIT((event),YAML_DOCUMENT_END_EVENT,(start_mark),(end_mark)), \
- (event).data.document_end.implicit = (event_implicit))
-
-#define ALIAS_EVENT_INIT(event,event_anchor,start_mark,end_mark) \
- (EVENT_INIT((event),YAML_ALIAS_EVENT,(start_mark),(end_mark)), \
- (event).data.alias.anchor = (event_anchor))
-
-#define SCALAR_EVENT_INIT(event,event_anchor,event_tag,event_value,event_length, \
- event_plain_implicit, event_quoted_implicit,event_style,start_mark,end_mark) \
- (EVENT_INIT((event),YAML_SCALAR_EVENT,(start_mark),(end_mark)), \
- (event).data.scalar.anchor = (event_anchor), \
- (event).data.scalar.tag = (event_tag), \
- (event).data.scalar.value = (event_value), \
- (event).data.scalar.length = (event_length), \
- (event).data.scalar.plain_implicit = (event_plain_implicit), \
- (event).data.scalar.quoted_implicit = (event_quoted_implicit), \
- (event).data.scalar.style = (event_style))
-
-#define SEQUENCE_START_EVENT_INIT(event,event_anchor,event_tag, \
- event_implicit,event_style,start_mark,end_mark) \
- (EVENT_INIT((event),YAML_SEQUENCE_START_EVENT,(start_mark),(end_mark)), \
- (event).data.sequence_start.anchor = (event_anchor), \
- (event).data.sequence_start.tag = (event_tag), \
- (event).data.sequence_start.implicit = (event_implicit), \
- (event).data.sequence_start.style = (event_style))
-
-#define SEQUENCE_END_EVENT_INIT(event,start_mark,end_mark) \
- (EVENT_INIT((event),YAML_SEQUENCE_END_EVENT,(start_mark),(end_mark)))
-
-#define MAPPING_START_EVENT_INIT(event,event_anchor,event_tag, \
- event_implicit,event_style,start_mark,end_mark) \
- (EVENT_INIT((event),YAML_MAPPING_START_EVENT,(start_mark),(end_mark)), \
- (event).data.mapping_start.anchor = (event_anchor), \
- (event).data.mapping_start.tag = (event_tag), \
- (event).data.mapping_start.implicit = (event_implicit), \
- (event).data.mapping_start.style = (event_style))
-
-#define MAPPING_END_EVENT_INIT(event,start_mark,end_mark) \
- (EVENT_INIT((event),YAML_MAPPING_END_EVENT,(start_mark),(end_mark)))
-
-/*
- * Document initializer.
- */
-
-#define DOCUMENT_INIT(document,document_nodes_start,document_nodes_end, \
- document_version_directive,document_tag_directives_start, \
- document_tag_directives_end,document_start_implicit, \
- document_end_implicit,document_start_mark,document_end_mark) \
- (memset(&(document), 0, sizeof(yaml_document_t)), \
- (document).nodes.start = (document_nodes_start), \
- (document).nodes.end = (document_nodes_end), \
- (document).nodes.top = (document_nodes_start), \
- (document).version_directive = (document_version_directive), \
- (document).tag_directives.start = (document_tag_directives_start), \
- (document).tag_directives.end = (document_tag_directives_end), \
- (document).start_implicit = (document_start_implicit), \
- (document).end_implicit = (document_end_implicit), \
- (document).start_mark = (document_start_mark), \
- (document).end_mark = (document_end_mark))
-
-/*
- * Node initializers.
- */
-
-#define NODE_INIT(node,node_type,node_tag,node_start_mark,node_end_mark) \
- (memset(&(node), 0, sizeof(yaml_node_t)), \
- (node).type = (node_type), \
- (node).tag = (node_tag), \
- (node).start_mark = (node_start_mark), \
- (node).end_mark = (node_end_mark))
-
-#define SCALAR_NODE_INIT(node,node_tag,node_value,node_length, \
- node_style,start_mark,end_mark) \
- (NODE_INIT((node),YAML_SCALAR_NODE,(node_tag),(start_mark),(end_mark)), \
- (node).data.scalar.value = (node_value), \
- (node).data.scalar.length = (node_length), \
- (node).data.scalar.style = (node_style))
-
-#define SEQUENCE_NODE_INIT(node,node_tag,node_items_start,node_items_end, \
- node_style,start_mark,end_mark) \
- (NODE_INIT((node),YAML_SEQUENCE_NODE,(node_tag),(start_mark),(end_mark)), \
- (node).data.sequence.items.start = (node_items_start), \
- (node).data.sequence.items.end = (node_items_end), \
- (node).data.sequence.items.top = (node_items_start), \
- (node).data.sequence.style = (node_style))
-
-#define MAPPING_NODE_INIT(node,node_tag,node_pairs_start,node_pairs_end, \
- node_style,start_mark,end_mark) \
- (NODE_INIT((node),YAML_MAPPING_NODE,(node_tag),(start_mark),(end_mark)), \
- (node).data.mapping.pairs.start = (node_pairs_start), \
- (node).data.mapping.pairs.end = (node_pairs_end), \
- (node).data.mapping.pairs.top = (node_pairs_start), \
- (node).data.mapping.style = (node_style))
-
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/4e392375/src/bin/gpmapreduce/src/yaml_scan.l
----------------------------------------------------------------------
diff --git a/src/bin/gpmapreduce/src/yaml_scan.l b/src/bin/gpmapreduce/src/yaml_scan.l
deleted file mode 100644
index e91d011..0000000
--- a/src/bin/gpmapreduce/src/yaml_scan.l
+++ /dev/null
@@ -1,138 +0,0 @@
-%option outfile="yaml_scan.c"
-%option 8bit never-interactive nodefault nounput noyywrap noinput
-%option prefix="yaml_scalar_yy"
-
-
-/*
- * This would be nice, but it isn't necessary and isn't supported by
- * the old version of flex we have on the build machines.
- *
- * If this ever changes make sure to edit parser.h to #define the
- * relevant items.
-*/
-/* %option header-file="yaml_scan.h" */
-/* %option reentrant */
-
-%{
-#include <yaml_parse.h> /* Get the token types */
-#include <parser.h>
-
-/*
- * Setup prototypes since we build with -Wmissing-prototypes and flex isn't
- * kind enough to generate them.
- */
-#define unify_version(a,b,c) ((a<<16)+(b<<8)+c)
-#if unify_version(YY_FLEX_MAJOR_VERSION,YY_FLEX_MINOR_VERSION,YY_FLEX_SUBMINOR_VERSION) < unify_version(2,5,35)
-int yaml_scalar_yyget_lineno(void);
-FILE* yaml_scalar_yyget_in(void);
-FILE* yaml_scalar_yyget_out(void);
-int yaml_scalar_yyget_leng(void);
-char* yaml_scalar_yyget_text(void);
-
-void yaml_scalar_yyset_lineno(int);
-void yaml_scalar_yyset_in(FILE *);
-void yaml_scalar_yyset_out(FILE *);
-
-int yaml_scalar_yyget_debug(void);
-void yaml_scalar_yyset_debug(int);
-
-int yaml_scalar_yylex_destroy(void);
-#endif
-
-%}
- /*
- * States:
- * INITIAL = default state
- * <kw> = keyword
- */
-%x kw
-%%
-
- /* Look at our parser context and set the current state accordingly */
- if (parser->frame < 0 || parser->frame >= MAX_CONTEXT_DEPTH)
- BEGIN(INITIAL);
- else
- {
- switch (parser->context[parser->frame])
- {
- case CONTEXT_NONE:
- case CONTEXT_HASH_VALUE:
- case CONTEXT_LIST:
- BEGIN(INITIAL);
- break;
-
- case CONTEXT_HASH_KEY:
- BEGIN(kw); /* Switch to keyword context */
- break;
-
- default:
- printf("WARNING: bad parse context state\n");
- BEGIN(INITIAL);
- }
- }
-
-
- /* Keyword tokens */
-<kw>columns return _COLUMNS_;
-<kw>consolidate return _CONSOLIDATE_;
-<kw>database return _DATABASE_;
-<kw>define return _DEFINE_;
-<kw>delimiter return _DELIMITER_;
-<kw>encoding return _ENCODING_;
-<kw>error_limit return _ERROR_LIMIT_;
-<kw>escape return _ESCAPE_;
-<kw>exec return _EXEC_;
-<kw>execute return _EXECUTE_;
-<kw>file return _FILE_;
-<kw>finalize return _FINALIZE_;
-<kw>format return _FORMAT_;
-<kw>function return _FUNCTION_;
-<kw>gpfdist return _GPFDIST_;
-<kw>host return _HOST_;
-<kw>initialize return _INITIALIZE_;
-<kw>input return _INPUT_;
-<kw>keys return _KEYS_;
-<kw>language return _LANGUAGE_;
-<kw>library return _LIBRARY_;
-<kw>map return _MAP_;
-<kw>mode return _MODE_;
-<kw>name return _NAME_;
-<kw>null return _NULL_;
-<kw>optimize return _OPTIMIZE_;
-<kw>output return _OUTPUT_;
-<kw>parameters return _PARAMETERS_;
-<kw>port return _PORT_;
-<kw>query return _QUERY_;
-<kw>quote return _QUOTE_;
-<kw>reduce return _REDUCE_;
-<kw>returns return _RETURNS_;
-<kw>run return _RUN_;
-<kw>source return _SOURCE_;
-<kw>table return _TABLE_;
-<kw>target return _TARGET_;
-<kw>task return _TASK_;
-<kw>transition return _TRANSITION_;
-<kw>user return _USER_;
-<kw>version return _VERSION_;
-<kw>ordering return _ORDERING_;
-
- /* Matching specific fields */
-[a-z_]([a-z0-9_])* return _ID_;
-
- /* This is a bit of a hack to support schema qualification, the correct
- * way to do this would require changing the lexer to be reentrant within
- * yaml scalar values which would be a lot more work.
- */
-[a-z_]([a-z0-9_])*\.[a-z_]([a-z0-9_])* return _ID_;
-
-[0-9]+ return _INTEGER_;
-[1-9][0-9]*(\.[0-9]){3} return _VERSION_STRING_;
-
- /*
- * We will be calling this on the SCALAR tokens in a yaml file, if the
- * token doesn't exactly match one of the predefined tokens above then
- * just return it as a 'STRING'
- */
-<*>.*(\n.*)* return _STRING_;
-<<EOF>> return 0;
-%%
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/4e392375/src/bin/gpmapreduce/test/Makefile
----------------------------------------------------------------------
diff --git a/src/bin/gpmapreduce/test/Makefile b/src/bin/gpmapreduce/test/Makefile
deleted file mode 100644
index 16e8994..0000000
--- a/src/bin/gpmapreduce/test/Makefile
+++ /dev/null
@@ -1,100 +0,0 @@
-
-MAPRED = ../gpmapreduce
-PSQL = psql
-FIXUP = bin/fixup.pl
-MRDIFF = bin/mrdiff.pl
-
-DB = mapreduce_test
-MFLAGS = $(DB) --key="clar" --file
-PFLAGS = -X $(DB) --file
-
-INDIR = input
-DATADIR = data
-MASTERDIR = expected
-
-OUTDIR = output
-YMLDIR = yml
-SQLDIR = sql
-
-YMLIN = $(shell ls $(INDIR)/*.yml.in)
-YML = $(notdir $(basename $(YMLIN)))
-YMLOUT = $(addprefix $(YMLDIR)/, $(YML))
-
-TESTS = $(addprefix $(OUTDIR)/,$(addsuffix .suc, $(basename $(YML))))
-MAILDATA = $(addprefix $(shell pwd)/,$(DATADIR)/email/mailfiles)
-
-all: libs setup $(YMLDIR) $(OUTDIR) $(YMLOUT) $(MAILDATA) $(TESTS)
- @printf "\n=======================\n"
- @if [ -s $(OUTDIR)/FAIL ] ; then \
- export nfailed=`cat $(OUTDIR)/FAIL | wc -l`; \
- export ntotal=`ls -1 $(OUTDIR)/*.suc $(OUTDIR)/*.fail | wc -l`; \
- printf " %d of %d tests failed. " $$nfailed $$ntotal; \
- else \
- export ntotal=`ls -1 $(OUTDIR)/*.suc | wc -l`; \
- printf " All %d tests passed. " $$ntotal; \
- fi
- @printf "\n=======================\n"
-
-libs:
- $(MAKE) -C lib
- cp -f lib/*.so $(GPHOME)/lib/postgresql/.
-
-setup:
- @printf "============== dropping database \"$(DB)\" ==============\n"
- @psql -X -c "drop database $(DB)" template1 2>/dev/null || echo "DROP DATABASE"
- @printf "============== creating database \"$(DB)\" ==============\n"
- @printf "============== installing plpython ==============\n"
- @psql -X -c "create database $(DB)" template1
- @psql -X -c "create language plpythonu" -d $(DB)
- @printf "============== running mapreduce tests ==============\n"
- @rm -rf $(OUTDIR)
-
-clean:
- rm -rf $(YMLDIR)
- rm -rf $(OUTDIR)
- rm -rf $(MAILDATA)
- @dropdb $(DB) 2>/dev/null || echo > /dev/null
-
-
-%.test: $(OUTDIR)/%.suc
-
-$(OUTDIR):
- @mkdir $(OUTDIR)
-
-$(MAILDATA):
- @ls -1 $(dir $(MAILDATA))*.txt > $(MAILDATA)
- @chmod u+w $(MAILDATA)
-
-$(YMLDIR):
- @mkdir $(YMLDIR)
-
-$(YMLDIR)/%.yml: $(INDIR)/%.yml.in
- @$(FIXUP) $(INDIR)/$*.yml.in
- @mv $(INDIR)/$*.yml $(YMLDIR)/.
-
-$(OUTDIR)/%.suc: $(MAPRED) $(YMLDIR)/%.yml $(MASTERDIR)/%.out $(MASTERDIR)/%.err
- @printf 'test %-20s ... ' $*
- @rm -f $(OUTDIR)/$*.suc $(OUTDIR)/$*.fail
- @touch $(OUTDIR)/$*.out $(OUTDIR)/$*.err
- @cp -f $(MASTERDIR)/DEFAULT.out $(OUTDIR)/$*.out
- @cp -f $(MASTERDIR)/DEFAULT.err $(OUTDIR)/$*.err
- @if [ -e $(SQLDIR)/$*_init.sql ] ; \
- then $(PSQL) $(PFLAGS) $(SQLDIR)/$*_init.sql > $(OUTDIR)/$*.init.out; fi
- @$(MAPRED) $(MFLAGS) $(YMLDIR)/$*.yml >>$(OUTDIR)/$*.out 2>>$(OUTDIR)/$*.err; \
- if [ $$? -ne 0 ]; then echo "gpmapreduce retcode=$$?" >> $(OUTDIR)/$*.err; fi
- @if [ -e $(SQLDIR)/$*_done.sql ] ; \
- then $(PSQL) $(PFLAGS) $(SQLDIR)/$*_done.sql > $(OUTDIR)/$*.done.out 2>&1; fi
- @$(MRDIFF) $(OUTDIR) $(MASTERDIR) $* > $(OUTDIR)/$*.suc
- @psql -c "\d" $(DB) | \
- perl -e "while (<>) {s/No relations found.\\n$$//; print}" >> $(OUTDIR)/$*.suc
- @cd $(OUTDIR); if [ -s $*.suc ] ; then \
- mv $*.suc $*.fail; echo FAILED; echo $*.fail >> FAIL ; else echo ok; fi
-
-
-# If the master files don't exist this won't let the test pass, but it will
-# allow the test to run
-$(MASTERDIR)/%.out:
- @echo "FILE MISSING" > $@
-
-$(MASTERDIR)/%.err:
- @echo "FILE MISSING" > $@
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/4e392375/src/bin/gpmapreduce/test/bin/fixup.pl
----------------------------------------------------------------------
diff --git a/src/bin/gpmapreduce/test/bin/fixup.pl b/src/bin/gpmapreduce/test/bin/fixup.pl
deleted file mode 100755
index a8c1811..0000000
--- a/src/bin/gpmapreduce/test/bin/fixup.pl
+++ /dev/null
@@ -1,176 +0,0 @@
-#!/usr/bin/perl
-#
-# fixup.pl - Greenplum MapReduce fixup script
-#
-# copyright (c) 2008
-# Author: Jeffrey I Cohen, Caleb Welton
-#
-
-use strict;
-use warnings;
-use Pod::Usage;
-use Getopt::Long;
-use Cwd;
-
-# let Makefile.pl update the connect string if necessary...
-my $gen_connect = "__CONNECT: postgres __";
-my $glob_id = "";
-
-# find the connect string
-my $glob_connect = $gen_connect;
-$glob_connect =~ s/^\_\_CONNECT\:(.*)\_\_$/$1/;
-
-my $man = 0;
-my $help = 0;
-
-GetOptions('help|?' => \$help,
- man => \$man,
- )
- or pod2usage(2);
-pod2usage(-msg => $glob_id, -exitstatus => 1) if $help;
-pod2usage(-msg => $glob_id, -exitstatus => 0, -verbose => 2) if $man;
-
-# Check that input file exists and is readable
-
-
-
-# convert a postgresql psql formatted table into an array of hashes
-sub tablelizer
-{
- my ($ini, $got_line1) = @_;
-
- # first, split into separate lines, the find all the column headings
-
- my @lines = split(/\n/, $ini);
-
- return undef
- unless (scalar(@lines));
-
- # if the first line is supplied, then it has the column headers,
- # so don't try to find them (or the ---+---- separator) in
- # "lines"
- my $line1 = $got_line1;
- $line1 = shift @lines
- unless (defined($got_line1));
-
- # look for <space>|<space>
- my @colheads = split(/\s+\|\s+/, $line1);
-
- # fixup first, last column head (remove leading,trailing spaces)
-
- $colheads[0] =~ s/^\s+//;
- $colheads[0] =~ s/\s+$//;
- $colheads[-1] =~ s/^\s+//;
- $colheads[-1] =~ s/\s+$//;
-
- return undef
- unless (scalar(@lines));
-
- shift @lines # skip dashed separator (unless it was skipped already)
- unless (defined($got_line1));
-
- my @rows;
-
- for my $lin (@lines)
- {
- my @cols = split(/\|/, $lin, scalar(@colheads));
- last
- unless (scalar(@cols) == scalar(@colheads));
-
- my $rowh = {};
-
- for my $colhdcnt (0..(scalar(@colheads)-1))
- {
- my $rawcol = shift @cols;
-
- $rawcol =~ s/^\s+//;
- $rawcol =~ s/\s+$//;
-
- my $colhd = $colheads[$colhdcnt];
- $rowh->{($colhdcnt+1)} = $rawcol;
- }
- push @rows, $rowh;
- }
-
- return \@rows;
-}
-
-sub gethostname
-{
- my $psql_str = "psql ";
-
- $psql_str .= $glob_connect
- if (defined($glob_connect));
-
- $psql_str .= " -c \'select content, role, status, hostname from gp_segment_configuration\'";
-
- my $tabdef = `$psql_str`;
-
-# print $tabdef;
-
- # do something reasonable on error...
- if (($tabdef =~ m/database.*does not exist/)
- || ($tabdef =~ m/could not connect/) )
- {
- return `hostname`;
- }
-
- my $mpp_config_table = tablelizer($tabdef);
-
-# print Data::Dumper->Dump([$mpp_config_table]);
-
- my $hostname= "localhost";
-
- for my $rowh (@{$mpp_config_table})
- {
- if (($rowh->{1} == 0) && # content (seg 0)
- ($rowh->{2} =~ m/p/) && # role =primary
- ($rowh->{3} =~ m/u/)) # status = up
- {
- $hostname = $rowh->{4};
- last;
- }
-
- }
-
- return $hostname;
-}
-
-if (1)
-{
- my $curdir = getcwd();
- my $db_user = `whoami`;
- my $hostname = gethostname(); # `hostname`
- chomp $db_user;
- chomp $hostname;
-
- # We assume that this script is run by the make file in example directory
- my $abs_srcdir = "$curdir";
-
- for my $file (@ARGV)
- {
- pod2usage(-exitstatus => 1) unless ($file);
-
- (-d $file) and die "ERROR: '$file' is a directory\n";
- (-f $file) or die "ERROR: No such file '$file'\n";
- (-r $file) or die "ERROR: No read permissions for '$file'\n";
-
- my $outfile = $file;
-
- # remove the ".in" (input) suffix for the outfile name
- $outfile =~ s/\.in$//g;
-
- open(INPUT, "<$file");
- open(OUTPUT, ">$outfile");
- while (<INPUT>) {
- s/\@db_user\@/$db_user/gm;
- s/\@abs_srcdir\@/$abs_srcdir/gm;
- s/\@hostname\@/$hostname/gm;
- print OUTPUT
- }
- close(INPUT);
- close(OUTPUT);
- }
-}
-
-exit(0);
http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/4e392375/src/bin/gpmapreduce/test/bin/mrdiff.pl
----------------------------------------------------------------------
diff --git a/src/bin/gpmapreduce/test/bin/mrdiff.pl b/src/bin/gpmapreduce/test/bin/mrdiff.pl
deleted file mode 100755
index 20ff2a3..0000000
--- a/src/bin/gpmapreduce/test/bin/mrdiff.pl
+++ /dev/null
@@ -1,146 +0,0 @@
-#!/usr/bin/perl
-
-=head1 NAME
-
-B<mrdiff.pl> - Map/Reduce Diff utility script
-
-=head1 SYNOPSIS
-
-B<mrdiff.pl> [options] <output_dir> <expected_dir> <test_name>
-
-Options:
-
- -help brief help message
- -man full documentation
- -version print version information
-
-=head1 OPTIONS
-
-=over 8
-
-=item B<-help>
-
- Print a brief help message and exits.
-
-=item B<-man>
-
- Prints the manual page and exits.
-
-=item B<-version>
-
- Prints version information
-
-=back
-
-=head1 DESCRIPTION
-
-The diff utility looks for all files matching <test_name>.*" in
-the output directory and compares them against the like named files
-in the expected directory.
-
-It produces either $outdir/$test_name.suc or $outdir/$test_name.fail
-depending on the results of the diff.
-
-To handle the underlying diff the utility calls gpdiff.pl
-
-=head1 AUTHORS
-
-Caleb Welton
-
-Copyright (c) 2008 GreenPlum. All rights reserved
-
-Address bug reports and comments to: cwelton@greenplum.com
-
-=cut
-
-use warnings;
-use strict;
-use Pod::Usage;
-use Getopt::Long;
-use Cwd;
-
-my $man = 0;
-my $help = 0;
-
-GetOptions('help|?' => \$help,
- man => \$man,
- )
- or pod2usage(2);
-pod2usage(-exitstatus => 1) if $help;
-pod2usage(-exitstatus => 0, -verbose => 2) if $man;
-
-pod2usage(1) unless ($#ARGV == 2);
-my ($output, $expected, $testname) = @ARGV;
-
-my @outfiles = sort(glob("$output/$testname.*"));
-my @expfiles = sort(glob("$expected/$testname.*"));
-
-# We find gpdiff in the source tree assuming that this is being run
-# from the source tree as well.
-my $GPDIFF = getcwd();
-$GPDIFF =~ s|(.*/src)(/bin/gpmapreduce/.*)|$1/test/regress/gpdiff.pl|;
-die "unable to find gpdiff.pl" unless (-e $GPDIFF);
-
-
-sub printheader($$)
-{
- my ($a, $b) = @_;
- my $fill = "-" x (length($a) + length($b));
- print "\n";
- print "------------$fill\n";
- print "FILE: $a <=> $b\n";
- print "------------$fill\n";
-}
-
-my ($out, $exp, $ofile, $efile);
-while ($#outfiles >= 0 && $#expfiles >= 0)
-{
- $out = shift @outfiles;
- $exp = shift @expfiles;
-
- # we have paths, and want to check if the filename is the same
- while (1)
- {
- $out =~ m/$output\/(.*)/ or die();
- $ofile = $1;
- $exp =~ m/$expected\/(.*)/ or die();
- $efile = $1;
-
- if ($ofile eq $efile)
- {
- my $diff = `$GPDIFF $out $exp`;
- if (length($diff) > 0)
- {
- printheader($out,$exp);
- print $diff;
- }
- last;
- }
- elsif ($ofile lt $efile)
- {
- printheader($out,"<missing>") unless ($ofile =~ m/^$testname.suc$/);
- last if ($#outfiles < 0);
- $out = shift @outfiles;
- }
- else
- {
- printheader("<missing>", $exp);
- last if ($#expfiles < 0);
- $exp = shift @expfiles;
- }
-
- }
-}
-
-while ($#outfiles >= 0)
-{
- $out = shift @outfiles;
- $out =~ m/$output\/(.*)/ or die();
- $ofile = $1;
- printheader($out, "<missing>") unless ($ofile =~ m/^$testname.suc$/);
-}
-while ($#expfiles >= 0)
-{
- $exp = shift @expfiles;
- printheader("<missing>", $exp);
-}