You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by sa...@apache.org on 2015/07/21 04:38:33 UTC
spark git commit: [SPARK-9052] [SPARKR] Fix comments after curly
braces
Repository: spark
Updated Branches:
refs/heads/master 936a96cb3 -> 2bdf9914a
[SPARK-9052] [SPARKR] Fix comments after curly braces
[[SPARK-9052] Fix comments after curly braces - ASF JIRA](https://issues.apache.org/jira/browse/SPARK-9052)
This is the full result of lintr at the rivision:011551620faa87107a787530f074af3d9be7e695.
[[SPARK-9052] the result of lint-r at the revision:011551620faa87107a787530f074af3d9be7e695](https://gist.github.com/yu-iskw/e7246041b173a3f29482)
This is the difference of the result between before and after.
https://gist.github.com/yu-iskw/e7246041b173a3f29482/revisions
Author: Yu ISHIKAWA <yu...@gmail.com>
Closes #7440 from yu-iskw/SPARK-9052 and squashes the following commits:
015d738 [Yu ISHIKAWA] Fix the indentations and move the placement of commna
5cc30fe [Yu ISHIKAWA] Fix the indentation in a condition
4ead0e5 [Yu ISHIKAWA] [SPARK-9052][SparkR] Fix comments after curly braces
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/2bdf9914
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/2bdf9914
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/2bdf9914
Branch: refs/heads/master
Commit: 2bdf9914ab709bf9c1cdd17fc5dd7a69f6d46f29
Parents: 936a96c
Author: Yu ISHIKAWA <yu...@gmail.com>
Authored: Tue Jul 21 11:38:22 2015 +0900
Committer: Kousuke Saruta <sa...@oss.nttdata.co.jp>
Committed: Tue Jul 21 11:38:22 2015 +0900
----------------------------------------------------------------------
R/pkg/R/schema.R | 13 ++++++++-----
R/pkg/R/utils.R | 33 ++++++++++++++++++++++-----------
2 files changed, 30 insertions(+), 16 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/2bdf9914/R/pkg/R/schema.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/schema.R b/R/pkg/R/schema.R
index 06df430..79c744e 100644
--- a/R/pkg/R/schema.R
+++ b/R/pkg/R/schema.R
@@ -69,11 +69,14 @@ structType.structField <- function(x, ...) {
#' @param ... further arguments passed to or from other methods
print.structType <- function(x, ...) {
cat("StructType\n",
- sapply(x$fields(), function(field) { paste("|-", "name = \"", field$name(),
- "\", type = \"", field$dataType.toString(),
- "\", nullable = ", field$nullable(), "\n",
- sep = "") })
- , sep = "")
+ sapply(x$fields(),
+ function(field) {
+ paste("|-", "name = \"", field$name(),
+ "\", type = \"", field$dataType.toString(),
+ "\", nullable = ", field$nullable(), "\n",
+ sep = "")
+ }),
+ sep = "")
}
#' structField
http://git-wip-us.apache.org/repos/asf/spark/blob/2bdf9914/R/pkg/R/utils.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/utils.R b/R/pkg/R/utils.R
index 950ba74..3f45589 100644
--- a/R/pkg/R/utils.R
+++ b/R/pkg/R/utils.R
@@ -390,14 +390,17 @@ processClosure <- function(node, oldEnv, defVars, checkedFuncs, newEnv) {
for (i in 1:nodeLen) {
processClosure(node[[i]], oldEnv, defVars, checkedFuncs, newEnv)
}
- } else { # if node[[1]] is length of 1, check for some R special functions.
+ } else {
+ # if node[[1]] is length of 1, check for some R special functions.
nodeChar <- as.character(node[[1]])
- if (nodeChar == "{" || nodeChar == "(") { # Skip start symbol.
+ if (nodeChar == "{" || nodeChar == "(") {
+ # Skip start symbol.
for (i in 2:nodeLen) {
processClosure(node[[i]], oldEnv, defVars, checkedFuncs, newEnv)
}
} else if (nodeChar == "<-" || nodeChar == "=" ||
- nodeChar == "<<-") { # Assignment Ops.
+ nodeChar == "<<-") {
+ # Assignment Ops.
defVar <- node[[2]]
if (length(defVar) == 1 && typeof(defVar) == "symbol") {
# Add the defined variable name into defVars.
@@ -408,14 +411,16 @@ processClosure <- function(node, oldEnv, defVars, checkedFuncs, newEnv) {
for (i in 3:nodeLen) {
processClosure(node[[i]], oldEnv, defVars, checkedFuncs, newEnv)
}
- } else if (nodeChar == "function") { # Function definition.
+ } else if (nodeChar == "function") {
+ # Function definition.
# Add parameter names.
newArgs <- names(node[[2]])
lapply(newArgs, function(arg) { addItemToAccumulator(defVars, arg) })
for (i in 3:nodeLen) {
processClosure(node[[i]], oldEnv, defVars, checkedFuncs, newEnv)
}
- } else if (nodeChar == "$") { # Skip the field.
+ } else if (nodeChar == "$") {
+ # Skip the field.
processClosure(node[[2]], oldEnv, defVars, checkedFuncs, newEnv)
} else if (nodeChar == "::" || nodeChar == ":::") {
processClosure(node[[3]], oldEnv, defVars, checkedFuncs, newEnv)
@@ -429,7 +434,8 @@ processClosure <- function(node, oldEnv, defVars, checkedFuncs, newEnv) {
(typeof(node) == "symbol" || typeof(node) == "language")) {
# Base case: current AST node is a leaf node and a symbol or a function call.
nodeChar <- as.character(node)
- if (!nodeChar %in% defVars$data) { # Not a function parameter or local variable.
+ if (!nodeChar %in% defVars$data) {
+ # Not a function parameter or local variable.
func.env <- oldEnv
topEnv <- parent.env(.GlobalEnv)
# Search in function environment, and function's enclosing environments
@@ -439,20 +445,24 @@ processClosure <- function(node, oldEnv, defVars, checkedFuncs, newEnv) {
while (!identical(func.env, topEnv)) {
# Namespaces other than "SparkR" will not be searched.
if (!isNamespace(func.env) ||
- (getNamespaceName(func.env) == "SparkR" &&
- !(nodeChar %in% getNamespaceExports("SparkR")))) { # Only include SparkR internals.
+ (getNamespaceName(func.env) == "SparkR" &&
+ !(nodeChar %in% getNamespaceExports("SparkR")))) {
+ # Only include SparkR internals.
+
# Set parameter 'inherits' to FALSE since we do not need to search in
# attached package environments.
if (tryCatch(exists(nodeChar, envir = func.env, inherits = FALSE),
error = function(e) { FALSE })) {
obj <- get(nodeChar, envir = func.env, inherits = FALSE)
- if (is.function(obj)) { # If the node is a function call.
+ if (is.function(obj)) {
+ # If the node is a function call.
funcList <- mget(nodeChar, envir = checkedFuncs, inherits = F,
ifnotfound = list(list(NULL)))[[1]]
found <- sapply(funcList, function(func) {
ifelse(identical(func, obj), TRUE, FALSE)
})
- if (sum(found) > 0) { # If function has been examined, ignore.
+ if (sum(found) > 0) {
+ # If function has been examined, ignore.
break
}
# Function has not been examined, record it and recursively clean its closure.
@@ -495,7 +505,8 @@ cleanClosure <- function(func, checkedFuncs = new.env()) {
# environment. First, function's arguments are added to defVars.
defVars <- initAccumulator()
argNames <- names(as.list(args(func)))
- for (i in 1:(length(argNames) - 1)) { # Remove the ending NULL in pairlist.
+ for (i in 1:(length(argNames) - 1)) {
+ # Remove the ending NULL in pairlist.
addItemToAccumulator(defVars, argNames[i])
}
# Recursively examine variables in the function body.
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org