You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@jspwiki.apache.org by ju...@apache.org on 2020/02/27 15:10:28 UTC

[jspwiki] 03/20: JSPWIKI-120: PageFilters use Engine instead of WikiEngine on initialize method.

This is an automated email from the ASF dual-hosted git repository.

juanpablo pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/jspwiki.git

commit f8e3e5ee137c0d98e970c8233bc40a13caff0fe6
Author: juanpablo <ju...@apache.org>
AuthorDate: Wed Feb 26 13:14:44 2020 +0100

    JSPWIKI-120: PageFilters use Engine instead of WikiEngine on initialize method.
    
    Also, moved methods from BasicPageFilter to PageFilter as default methods where appropiate
---
 .../apache/wiki/api/filters/BasicPageFilter.java   |  71 +++---------
 .../org/apache/wiki/api/filters/PageFilter.java    |  92 +++++++---------
 .../java/org/apache/wiki/filters/CreoleFilter.java |  41 +++----
 .../apache/wiki/filters/PingWeblogsComFilter.java  |  22 ++--
 .../java/org/apache/wiki/filters/SpamFilter.java   | 122 ++++++---------------
 5 files changed, 114 insertions(+), 234 deletions(-)

diff --git a/jspwiki-main/src/main/java/org/apache/wiki/api/filters/BasicPageFilter.java b/jspwiki-main/src/main/java/org/apache/wiki/api/filters/BasicPageFilter.java
index 1234000..1462067 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/api/filters/BasicPageFilter.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/api/filters/BasicPageFilter.java
@@ -18,73 +18,28 @@
  */
 package org.apache.wiki.api.filters;
 
+import org.apache.wiki.api.core.Engine;
+import org.apache.wiki.api.exceptions.FilterException;
+
 import java.util.Properties;
 
-import org.apache.wiki.WikiContext;
-import org.apache.wiki.WikiEngine;
-import org.apache.wiki.api.exceptions.FilterException;
 
 /**
- *  Provides a base implementation of a PageFilter.  None of the callbacks
- *  do anything, so it is a good idea for you to extend from this class
- *  and implement only methods that you need.
- *
+ * Provides a base implementation of a PageFilter.  None of the callbacks do anything, so it is a good idea for you to extend from this
+ * class and implement only methods that you need.
  */
-public class BasicPageFilter
-    implements PageFilter
-{
-    protected WikiEngine m_engine;
-  
-    /**
-     *  If you override this, you should call super.initialize() first.
-     *  
-     *  {@inheritDoc}
-     */
-    public void initialize( WikiEngine engine, Properties properties )
-        throws FilterException
-    {
-        m_engine = engine;
-    }
+public class BasicPageFilter implements PageFilter {
 
-    /**
-     *  {@inheritDoc}
-     */
-    public String preTranslate( WikiContext wikiContext, String content )
-        throws FilterException
-    {
-        return content;
-    }
+    protected Engine m_engine;
 
     /**
-     *  {@inheritDoc}
+     * If you override this, you should call super.initialize() first.
+     *
+     * {@inheritDoc}
      */
-    public String postTranslate( WikiContext wikiContext, String htmlContent )
-        throws FilterException
-    {
-        return htmlContent;
-    }
-
-    /**
-     *  {@inheritDoc}
-     */
-    public String preSave( WikiContext wikiContext, String content )
-        throws FilterException
-    {
-        return content;
+    @Override
+    public void initialize( final Engine engine, final Properties properties ) throws FilterException {
+        m_engine = engine;
     }
 
-    /**
-     *  {@inheritDoc}
-     */
-    public void postSave( WikiContext wikiContext, String content )
-        throws FilterException
-    {
-    }
-    
-    /**
-     *  {@inheritDoc}
-     */
-    public void destroy( WikiEngine engine ) 
-    {
-    }
 }
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/api/filters/PageFilter.java b/jspwiki-main/src/main/java/org/apache/wiki/api/filters/PageFilter.java
index 3354240..26d9008 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/api/filters/PageFilter.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/api/filters/PageFilter.java
@@ -18,72 +18,62 @@
  */
 package org.apache.wiki.api.filters;
 
-import java.util.Properties;
-
 import org.apache.wiki.WikiContext;
-import org.apache.wiki.WikiEngine;
+import org.apache.wiki.api.core.Engine;
 import org.apache.wiki.api.exceptions.FilterException;
 
+import java.util.Properties;
+
+
 /**
- *  Provides a definition for a page filter.  A page filter is a class
- *  that can be used to transform the WikiPage content being saved or
+ *  Provides a definition for a page filter.  A page filter is a class that can be used to transform the WikiPage content being saved or
  *  being loaded at any given time.
  *  <p>
- *  Note that the WikiContext.getPage() method always returns the context
- *  in which text is rendered, i.e. the original request.  Thus the content
- *  may actually be different content than what what the wikiContext.getPage()
- *  implies!  This happens often if you are for example including multiple
- *  pages on the same page.
+ *  Note that the WikiContext.getPage() method always returns the context in which text is rendered, i.e. the original request.  Thus the
+ *  content may actually be different content than what what the wikiContext.getPage() implies!  This happens often if you are for example
+ *  including multiple pages on the same page.
  *  <p>
- *  PageFilters must be thread-safe!  There is only one instance of each PageFilter 
- *  per each WikiEngine invocation.  If you need to store data persistently, use
- *  VariableManager, or WikiContext.
+ *  PageFilters must be thread-safe!  There is only one instance of each PageFilter per each Engine invocation.  If you need to store data
+ *  persistently, use VariableManager, or WikiContext.
  *  <p>
- *  As of 2.5.30, initialize() gains access to the WikiEngine.
- *
+ *  As of 2.5.30, initialize() gains access to the Engine.
  */
-public interface PageFilter
-{
+public interface PageFilter {
+
     /**
-     *  Is called whenever the a new PageFilter is instantiated and
-     *  reset.
+     *  Is called whenever the a new PageFilter is instantiated and reset.
      *  
      *  @param engine The WikiEngine whic owns this PageFilter
      *  @param properties The properties ripped from filters.xml.
-     *  @throws FilterException If the filter could not be initialized. If this is thrown,
-     *                          the filter is not added to the internal queues.
+     *  @throws FilterException If the filter could not be initialized. If this is thrown, the filter is not added to the internal queues.
      */
-    void initialize( WikiEngine engine, Properties properties )
-        throws FilterException;
+    void initialize( Engine engine, Properties properties ) throws FilterException;
 
     /**
-     *  This method is called whenever a page has been loaded from the provider,
-     *  but not yet been sent through the markup-translation process.  Note that you cannot
-     *  do HTML translation here, because it will be escaped.
+     *  This method is called whenever a page has been loaded from the provider, but not yet been sent through the markup-translation
+     *  process.  Note that you cannot do HTML translation here, because it will be escaped.
      *
      *  @param wikiContext The current wikicontext.
-     *  @param content     WikiMarkup.
+     *  @param content WikiMarkup.
      *  @return The modified wikimarkup content.
-     *  @throws FilterException If something goes wrong.  Throwing this causes the entire page
-     *                          processing to be abandoned.
+     *  @throws FilterException If something goes wrong.  Throwing this causes the entire page processing to be abandoned.
      */
-    String preTranslate( WikiContext wikiContext, String content )
-        throws FilterException;
+    default String preTranslate( final WikiContext wikiContext, final String content ) throws FilterException {
+        return content;
+    }
 
     /**
-     *  This method is called after a page has been fed through the translation process,
-     *  so anything you are seeing here is translated content.  If you want to
-     *  do any of your own WikiMarkup2HTML translation, do it here.
+     *  This method is called after a page has been fed through the translation process, so anything you are seeing here is translated
+     *  content.  If you want to do any of your own WikiMarkup2HTML translation, do it here.
      *  
      *  @param wikiContext The WikiContext.
      *  @param htmlContent The translated HTML
      *  @return The modified HTML
-     *  
-     *  @throws FilterException If something goes wrong.  Throwing this causes the entire page
-     *                          processing to be abandoned.
+     *  @throws FilterException If something goes wrong.  Throwing this causes the entire page processing to be abandoned.
      */
-    String postTranslate( WikiContext wikiContext, String htmlContent )
-        throws FilterException;
+    default String postTranslate( final WikiContext wikiContext, final String htmlContent ) throws FilterException {
+        return htmlContent;
+    }
 
     /**
      *  This method is called before the page has been saved to the PageProvider.
@@ -91,35 +81,31 @@ public interface PageFilter
      *  @param wikiContext The WikiContext
      *  @param content The wikimarkup that the user just wanted to save.
      *  @return The modified wikimarkup
-     *  @throws FilterException If something goes wrong.  Throwing this causes the entire page
-     *                          processing to be abandoned.
+     *  @throws FilterException If something goes wrong.  Throwing this causes the entire page processing to be abandoned.
      */
-    String preSave( WikiContext wikiContext, String content )
-        throws FilterException;
+    default String preSave( final WikiContext wikiContext, final String content ) throws FilterException {
+        return content;
+    }
 
     /**
-     *  This method is called after the page has been successfully saved.
-     *  If the saving fails for any reason, then this method will not
+     *  This method is called after the page has been successfully saved. If the saving fails for any reason, then this method will not
      *  be called.
      *  <p>
-     *  Since the result is discarded from this method, this is only useful
-     *  for things like counters, etc.
+     *  Since the result is discarded from this method, this is only useful for things like counters, etc.
      *  
      *  @param wikiContext The WikiContext
      *  @param content The content which was just stored.
-     *  @throws FilterException If something goes wrong.  As the page is already saved,
-     *                          This is just logged.
+     *  @throws FilterException If something goes wrong.  As the page is already saved, This is just logged.
      */
-    void postSave( WikiContext wikiContext, String content )
-        throws FilterException;
+    default void postSave( final WikiContext wikiContext, final String content ) throws FilterException {}
 
     /**
      *  Called for every filter, e.g. on wiki engine shutdown. Use this if you have to 
      *  clean up or close global resources you allocated in the initialize() method.
      * 
-     *  @param engine The WikiEngine which owns this filter.
+     *  @param engine The Engine which owns this filter.
      *  @since 2.5.36
      */
-    void destroy( WikiEngine engine );
+    default void destroy( final Engine engine ) {}
 
 }
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/filters/CreoleFilter.java b/jspwiki-main/src/main/java/org/apache/wiki/filters/CreoleFilter.java
index 9e8fab4..43b678f 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/filters/CreoleFilter.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/filters/CreoleFilter.java
@@ -20,7 +20,7 @@ package org.apache.wiki.filters;
 
 import org.apache.log4j.Logger;
 import org.apache.wiki.WikiContext;
-import org.apache.wiki.WikiEngine;
+import org.apache.wiki.api.core.Engine;
 import org.apache.wiki.api.exceptions.FilterException;
 import org.apache.wiki.api.filters.BasicPageFilter;
 import org.apache.wiki.parser.CreoleToJSPWikiTranslator;
@@ -28,26 +28,19 @@ import org.apache.wiki.parser.CreoleToJSPWikiTranslator;
 import java.util.Properties;
 
 /**
- * <p>Provides the Implementation for mixed mode creole: If you activate
- * this filter, it will translate all markup that was saved as creole
- * markup to JSPWiki markup. Therefore the files will be saved 
- * with mixed markup.
+ * <p>Provides the Implementation for mixed mode creole: If you activate this filter, it will translate all markup that was saved as creole
+ * markup to JSPWiki markup. Therefore the files will be saved with mixed markup.
  * <p>
- * <b>WARNING</b>: There's no turning back after installing this
- * filter. Since your wiki pages are saved in Creole markup you can
+ * <b>WARNING</b>: There's no turning back after installing this filter. Since your wiki pages are saved in Creole markup you can
  * not deactivate it afterwards.
  * <p>
- * <b>WARNING</b>: This feature is completely experimental, and is known to be
- * broken.  Use at your own risk.
+ * <b>WARNING</b>: This feature is completely experimental, and is known to be broken.  Use at your own risk.
  * <p>
- * <b>WARNING</b>: The CreoleFilter feature is deprecated.  JSPWiki is likely
- * to implement a non-mixed mode Creole at some point, since turning on
- * Creole will make new pages obsolete.
- * 
- * 
+ * <b>WARNING</b>: The CreoleFilter feature is deprecated.  JSPWiki is likely to implement a non-mixed mode Creole at some point, since
+ * turning on Creole will make new pages obsolete.
+ *
  * @see <a href="http://www.wikicreole.org/wiki/MixedMode">[[WikiCreole:MixedMode]]</a> 
  */
-
 public class CreoleFilter extends BasicPageFilter {
     
     private static final Logger log = Logger.getLogger(CreoleFilter.class);
@@ -55,18 +48,19 @@ public class CreoleFilter extends BasicPageFilter {
     /**
      *  {@inheritDoc}
      */
-    public void initialize(WikiEngine engine, Properties props) throws FilterException {
+    @Override public void initialize( final Engine engine, final Properties props ) throws FilterException {
     }
 
     /**
      *  {@inheritDoc}
      */
-    public String preSave( WikiContext wikiContext, String content ) {
+    @Override
+    public String preSave( final WikiContext wikiContext, final String content ) {
         try {
-            String username=wikiContext.getCurrentUser().getName();
-            Properties prop = wikiContext.getEngine().getWikiProperties();
+            final String username = wikiContext.getCurrentUser().getName();
+            final Properties prop = wikiContext.getEngine().getWikiProperties();
             return new CreoleToJSPWikiTranslator().translateSignature(prop, content,username);
-        } catch(Exception e ) {
+        } catch( final Exception e ) {
             log.error( e.getMessage(), e );
             return e.getMessage();
         }
@@ -75,11 +69,12 @@ public class CreoleFilter extends BasicPageFilter {
     /**
      *  {@inheritDoc}
      */
-    public String preTranslate(WikiContext wikiContext, String content) {
+    @Override
+    public String preTranslate( final WikiContext wikiContext, final String content ) {
         try {
-            Properties prop = wikiContext.getEngine().getWikiProperties();
+            final Properties prop = wikiContext.getEngine().getWikiProperties();
             return new CreoleToJSPWikiTranslator().translate(prop ,content);
-        } catch (Exception e) {
+        } catch( final Exception e ) {
             log.error( e.getMessage(), e );
             return content
                    + "\n \n %%error \n"
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/filters/PingWeblogsComFilter.java b/jspwiki-main/src/main/java/org/apache/wiki/filters/PingWeblogsComFilter.java
index 6e1bfe6..fbfdeb9 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/filters/PingWeblogsComFilter.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/filters/PingWeblogsComFilter.java
@@ -20,7 +20,6 @@ package org.apache.wiki.filters;
 
 import org.apache.log4j.Logger;
 import org.apache.wiki.WikiContext;
-import org.apache.wiki.WikiEngine;
 import org.apache.wiki.api.core.Engine;
 import org.apache.wiki.api.filters.BasicPageFilter;
 import org.apache.xmlrpc.AsyncCallback;
@@ -53,14 +52,16 @@ public class PingWeblogsComFilter extends BasicPageFilter {
     /**
      *  {@inheritDoc}
      */
-    @Override public void initialize( final WikiEngine engine, final Properties props ) {
+    @Override
+    public void initialize( final Engine engine, final Properties props ) {
         m_pingURL = props.getProperty( PROP_PINGURL, "http://rpc.weblogs.com/RPC2" );
     }
 
     /**
      *  {@inheritDoc}
      */
-    @Override public void postSave( final WikiContext context, final String pagecontent ) {
+    @Override
+    public void postSave( final WikiContext context, final String pagecontent ) {
         String blogName = context.getPage().getName();
         final Engine engine   = context.getEngine();
 
@@ -87,22 +88,24 @@ public class PingWeblogsComFilter extends BasicPageFilter {
 
             xmlrpc.executeAsync("weblogUpdates.ping", params, 
                                 new AsyncCallback() {
-                                    @Override public void handleError( final Exception ex, final URL url, final String method ) {
+                                    @Override
+                                    public void handleError( final Exception ex, final URL url, final String method ) {
                                         log.error( "Unable to execute weblogs.com ping to URL: " + url.toString(), ex );
                                     }
 
-                                    @Override public void handleResult( final Object result, final URL url, final String method ) {
+                                    @Override
+                                    public void handleResult( final Object result, final URL url, final String method ) {
                                         @SuppressWarnings("unchecked")
                                         final Hashtable< String, Object > res = (Hashtable < String, Object > ) result;
 
-                                        final Boolean flerror = (Boolean)res.get("flerror");
-                                        final String  msg     = (String)res.get("message");
+                                        final Boolean flerror = ( Boolean )res.get( "flerror" );
+                                        final String  msg     = ( String )res.get( "message" );
 
                                         if( flerror ) {
-                                            log.error("Failed to ping: "+msg);
+                                            log.error( "Failed to ping: " + msg );
                                         }
 
-                                        log.info("Weblogs.com has been pinged.");
+                                        log.info( "Weblogs.com has been pinged." );
                                     }
                                 }
                                 );
@@ -110,4 +113,5 @@ public class PingWeblogsComFilter extends BasicPageFilter {
             log.error("Malformed URL",e);
         }
     }
+
 }
diff --git a/jspwiki-main/src/main/java/org/apache/wiki/filters/SpamFilter.java b/jspwiki-main/src/main/java/org/apache/wiki/filters/SpamFilter.java
index 0175d6c..109eb6f 100644
--- a/jspwiki-main/src/main/java/org/apache/wiki/filters/SpamFilter.java
+++ b/jspwiki-main/src/main/java/org/apache/wiki/filters/SpamFilter.java
@@ -30,7 +30,6 @@ import org.apache.oro.text.regex.Perl5Compiler;
 import org.apache.oro.text.regex.Perl5Matcher;
 import org.apache.wiki.InternalWikiException;
 import org.apache.wiki.WikiContext;
-import org.apache.wiki.WikiEngine;
 import org.apache.wiki.WikiPage;
 import org.apache.wiki.WikiProvider;
 import org.apache.wiki.api.core.Engine;
@@ -250,26 +249,20 @@ public class SpamFilter extends BasicPageFilter {
      *  {@inheritDoc}
      */
     @Override
-    public void initialize( final WikiEngine engine, final Properties properties ) {
+    public void initialize( final Engine engine, final Properties properties ) {
         m_forbiddenWordsPage = properties.getProperty( PROP_WORDLIST, m_forbiddenWordsPage );
         m_forbiddenIPsPage = properties.getProperty( PROP_IPLIST, m_forbiddenIPsPage);
         m_pageNameMaxLength = properties.getProperty( PROP_MAX_PAGENAME_LENGTH, m_pageNameMaxLength);
         m_errorPage = properties.getProperty( PROP_ERRORPAGE, m_errorPage );
-        m_limitSinglePageChanges = TextUtil.getIntegerProperty( properties,
-                                                                PROP_PAGECHANGES,
-                                                                m_limitSinglePageChanges );
+        m_limitSinglePageChanges = TextUtil.getIntegerProperty( properties, PROP_PAGECHANGES, m_limitSinglePageChanges );
         
-        m_limitSimilarChanges = TextUtil.getIntegerProperty( properties,
-                                                             PROP_SIMILARCHANGES,
-                                                             m_limitSimilarChanges );
+        m_limitSimilarChanges = TextUtil.getIntegerProperty( properties, PROP_SIMILARCHANGES, m_limitSimilarChanges );
 
         m_maxUrls = TextUtil.getIntegerProperty( properties, PROP_MAXURLS, m_maxUrls );
         m_banTime = TextUtil.getIntegerProperty( properties, PROP_BANTIME, m_banTime );
         m_blacklist = properties.getProperty( PROP_BLACKLIST, m_blacklist );
 
-        m_ignoreAuthenticated = TextUtil.getBooleanProperty( properties,
-                                                             PROP_IGNORE_AUTHENTICATED,
-                                                             m_ignoreAuthenticated );
+        m_ignoreAuthenticated = TextUtil.getBooleanProperty( properties, PROP_IGNORE_AUTHENTICATED, m_ignoreAuthenticated );
 
         m_useCaptcha = properties.getProperty( PROP_CAPTCHA, "" ).equals("asirra");
 
@@ -280,18 +273,11 @@ public class SpamFilter extends BasicPageFilter {
             throw new InternalWikiException( "Faulty pattern." , e);
         }
 
-        m_akismetAPIKey = TextUtil.getStringProperty( properties,
-                                                      PROP_AKISMET_API_KEY,
-                                                      m_akismetAPIKey );
-
-        m_stopAtFirstMatch = TextUtil.getStringProperty( properties,
-                                                         PROP_FILTERSTRATEGY,
-                                                         STRATEGY_EAGER ).equals( STRATEGY_EAGER );
+        m_akismetAPIKey = TextUtil.getStringProperty( properties, PROP_AKISMET_API_KEY, m_akismetAPIKey );
+        m_stopAtFirstMatch = TextUtil.getStringProperty( properties, PROP_FILTERSTRATEGY, STRATEGY_EAGER ).equals( STRATEGY_EAGER );
 
         log.info( "# Spam filter initialized.  Temporary ban time " + m_banTime +
                   " mins, max page changes/minute: " + m_limitSinglePageChanges );
-
-
     }
 
     private static final int REJECT = 0;
@@ -327,7 +313,8 @@ public class SpamFilter extends BasicPageFilter {
     }
 
     /** {@inheritDoc} */
-    @Override public String preSave( final WikiContext context, final String content ) throws RedirectException {
+    @Override
+    public String preSave( final WikiContext context, final String content ) throws RedirectException {
         cleanBanList();
         refreshBlacklists( context );
         final Change change = getChange( context, content );
@@ -473,35 +460,25 @@ public class SpamFilter extends BasicPageFilter {
             for( final Iterator< Host > i = m_lastModifications.iterator(); i.hasNext(); ) {
                 final Host host = i.next();
 
-                //
                 //  Check if this item is invalid
-                //
                 if( host.getAddedTime() < time ) {
                     log.debug( "Removed host " + host.getAddress() + " from modification queue (expired)" );
                     i.remove();
                     continue;
                 }
 
-                //
                 // Check if this IP address has been seen before
-                //
-
                 if( host.getAddress().equals( addr ) ) {
                     hostCounter++;
                 }
 
-                //
                 //  Check, if this change has been seen before
-                //
-
                 if( host.getChange() != null && host.getChange().equals( change ) ) {
                     changeCounter++;
                 }
             }
 
-            //
             //  Now, let's check against the limits.
-            //
             if( hostCounter >= m_limitSinglePageChanges ) {
                 final Host host = new Host( addr, null );
                 m_temporaryBanList.add( host );
@@ -520,11 +497,9 @@ public class SpamFilter extends BasicPageFilter {
                 checkStrategy( context, REASON_SIMILAR_MODIFICATIONS, "Herb says you look like a spammer, and I trust Herb! (Incident code "+uid+")");
             }
 
-            //
             //  Calculate the number of links in the addition.
-            //
             String tstChange  = change.toString();
-            int    urlCounter = 0;
+            int urlCounter = 0;
             while( m_matcher.contains( tstChange,m_urlPattern ) ) {
                 final MatchResult m = m_matcher.getMatch();
                 tstChange = tstChange.substring( m.endOffset(0) );
@@ -540,20 +515,13 @@ public class SpamFilter extends BasicPageFilter {
                 checkStrategy( context, REASON_TOO_MANY_URLS, "Herb says you look like a spammer, and I trust Herb! (Incident code " + uid + ")" );
             }
 
-            //
             //  Check bot trap
-            //
             checkBotTrap( context, change );
 
-            //
             //  Check UTF-8 mangling
-            //
             checkUTF8( context, change );
 
-            //
-            //  Do Akismet check.  This is good to be the last, because this is the most
-            //  expensive operation.
-            //
+            //  Do Akismet check.  This is good to be the last, because this is the most expensive operation.
             checkAkismet( context, change );
 
             m_lastModifications.add( new Host( addr, change ) );
@@ -583,10 +551,7 @@ public class SpamFilter extends BasicPageFilter {
 
             final HttpServletRequest req = context.getHttpRequest();
 
-            //
-            //  Akismet will mark all empty statements as spam, so we'll just
-            //  ignore them.
-            //
+            //  Akismet will mark all empty statements as spam, so we'll just ignore them.
             if( change.m_adds == 0 && change.m_removals > 0 ) {
                 return;
             }
@@ -607,15 +572,15 @@ public class SpamFilter extends BasicPageFilter {
                 final String commentAuthorURL   = null;
 
                 final boolean isSpam = m_akismet.commentCheck( ipAddress,
-                                                         userAgent,
-                                                         referrer,
-                                                         permalink,
-                                                         commentType,
-                                                         commentAuthor,
-                                                         commentAuthorEmail,
-                                                         commentAuthorURL,
-                                                         change.toString(),
-                                                         null );
+                                                               userAgent,
+                                                               referrer,
+                                                               permalink,
+                                                               commentType,
+                                                               commentAuthor,
+                                                               commentAuthorEmail,
+                                                               commentAuthorURL,
+                                                               change.toString(),
+                                                               null );
 
                 sw.stop();
                 log.debug( "Akismet request done in: " + sw );
@@ -650,7 +615,6 @@ public class SpamFilter extends BasicPageFilter {
      */
     private void checkBotTrap( final WikiContext context, final Change change ) throws RedirectException {
         final HttpServletRequest request = context.getHttpRequest();
-
         if( request != null ) {
             final String unspam = request.getParameter( getBotFieldName() );
             if( unspam != null && unspam.length() > 0 ) {
@@ -664,10 +628,8 @@ public class SpamFilter extends BasicPageFilter {
 
     private void checkUTF8( final WikiContext context, final Change change ) throws RedirectException {
         final HttpServletRequest request = context.getHttpRequest();
-
         if( request != null ) {
             final String utf8field = request.getParameter( "encodingcheck" );
-
             if( utf8field != null && !utf8field.equals( "\u3041" ) ) {
                 final String uid = log( context, REJECT, REASON_UTF8_TRAP, change.toString() );
 
@@ -680,7 +642,6 @@ public class SpamFilter extends BasicPageFilter {
     /** Goes through the ban list and cleans away any host which has expired from it. */
     private synchronized void cleanBanList() {
         final long now = System.currentTimeMillis();
-
         for( final Iterator< Host > i = m_temporaryBanList.iterator(); i.hasNext(); ) {
             final Host host = i.next();
 
@@ -723,12 +684,9 @@ public class SpamFilter extends BasicPageFilter {
      */
     private void refreshBlacklists( final WikiContext context ) {
         try {
-
             boolean rebuild = false;
 
-            //
             //  Rebuild, if the spam words page, the attachment or the IP ban page has changed since.
-            //
             final WikiPage sourceSpam = context.getEngine().getManager( PageManager.class ).getPage( m_forbiddenWordsPage );
             if( sourceSpam != null ) {
                 if( m_spamPatterns == null || m_spamPatterns.isEmpty() || sourceSpam.getLastModified().after( m_lastRebuild ) ) {
@@ -750,10 +708,7 @@ public class SpamFilter extends BasicPageFilter {
                 }
             }
 
-            //
-            //  Do the actual rebuilding.  For simplicity's sake, we always rebuild the complete
-            //  filter list regardless of what changed.
-            //
+            //  Do the actual rebuilding.  For simplicity's sake, we always rebuild the complete filter list regardless of what changed.
             if( rebuild ) {
                 m_lastRebuild = new Date();
                 m_spamPatterns = parseWordList( sourceSpam, ( sourceSpam != null ) ? sourceSpam.getAttribute( LISTVAR ) : null );
@@ -788,10 +743,7 @@ public class SpamFilter extends BasicPageFilter {
      *  @throws RedirectException
      */
     private void checkPatternList( final WikiContext context, final String content, final Change change ) throws RedirectException {
-        //
-        //  If we have no spam patterns defined, or we're trying to save
-        //  the page containing the patterns, just return.
-        //
+        // If we have no spam patterns defined, or we're trying to save the page containing the patterns, just return.
         if( m_spamPatterns == null || context.getPage().getName().equals( m_forbiddenWordsPage ) ) {
             return;
         }
@@ -805,9 +757,7 @@ public class SpamFilter extends BasicPageFilter {
             // log.debug("Attempting to match page contents with "+p.getPattern());
 
             if( m_matcher.contains( ch, p ) ) {
-                //
                 //  Spam filter has a match.
-                //
                 final String uid = log( context, REJECT, REASON_REGEXP + "(" + p.getPattern() + ")", ch );
 
                 log.info( "SPAM:Regexp (" + uid + "). Content matches the spam filter '" + p.getPattern() + "'" );
@@ -824,10 +774,7 @@ public class SpamFilter extends BasicPageFilter {
      *  @throws RedirectException
      */
     private void checkIPList( final WikiContext context ) throws RedirectException {
-        //
-        //  If we have no IP patterns defined, or we're trying to save
-        //  the page containing the IP patterns, just return.
-        //
+        //  If we have no IP patterns defined, or we're trying to save the page containing the IP patterns, just return.
         if( m_IPPatterns == null || context.getPage().getName().equals( m_forbiddenIPsPage ) ) {
             return;
         }
@@ -900,19 +847,14 @@ public class SpamFilter extends BasicPageFilter {
             log.error( "Diff failed", e );
         }
 
-        //
         //  Don't forget to include the change note, too
-        //
         final String changeNote = page.getAttribute( WikiPage.CHANGENOTE );
-
         if( changeNote != null ) {
             change.append( "\r\n" );
             change.append( changeNote );
         }
 
-        //
         //  And author as well
-        //
         if( page.getAuthor() != null ) {
             change.append( "\r\n" + page.getAuthor() );
         }
@@ -951,7 +893,6 @@ public class SpamFilter extends BasicPageFilter {
     private static String getUniqueID() {
         final StringBuilder sb = new StringBuilder();
         final Random rand = new Random();
-
         for( int i = 0; i < 6; i++ ) {
             final char x = ( char )( 'A' + rand.nextInt( 26 ) );
             sb.append( x );
@@ -968,7 +909,7 @@ public class SpamFilter extends BasicPageFilter {
      */
     private String getRedirectPage( final WikiContext ctx ) {
         if( m_useCaptcha ) {
-            return ctx.getURL( WikiContext.NONE, "Captcha.jsp", "page="+ctx.getEngine().encodeName( ctx.getPage().getName() ) );
+            return ctx.getURL( WikiContext.NONE, "Captcha.jsp", "page= " +ctx.getEngine().encodeName( ctx.getPage().getName() ) );
         }
 
         return ctx.getURL( WikiContext.VIEW, m_errorPage );
@@ -1060,7 +1001,6 @@ public class SpamFilter extends BasicPageFilter {
      */
     public static final boolean checkHash( final WikiContext context, final PageContext pageContext ) throws IOException {
         final String hashName = getHashFieldName( (HttpServletRequest)pageContext.getRequest() );
-
         if( pageContext.getRequest().getParameter(hashName) == null ) {
             if( pageContext.getAttribute( hashName ) == null ) {
                 final Change change = getChange( context, EditorManager.getEditedText( pageContext ) );
@@ -1099,9 +1039,9 @@ public class SpamFilter extends BasicPageFilter {
      *  @since
      */
     private class Host {
-    	
-        private long   m_addedTime = System.currentTimeMillis();
-        private long   m_releaseTime;
+
+        private long m_addedTime = System.currentTimeMillis();
+        private long m_releaseTime;
         private String m_address;
         private Change m_change;
 
@@ -1123,7 +1063,7 @@ public class SpamFilter extends BasicPageFilter {
 
         public Host( final String ipaddress, final Change change ) {
             m_address = ipaddress;
-            m_change  = change;
+            m_change = change;
             m_releaseTime = System.currentTimeMillis() + m_banTime * 60 * 1000L;
         }
         
@@ -1134,18 +1074,18 @@ public class SpamFilter extends BasicPageFilter {
         public String m_change;
         public int    m_adds;
         public int    m_removals;
-        
+
         @Override public String toString() {
             return m_change;
         }
-        
+
         @Override public boolean equals( final Object o ) {
             if( o instanceof Change ) {
                 return m_change.equals( ( ( Change )o ).m_change );
             }
             return false;
         }
-        
+
         @Override public int hashCode() {
             return m_change.hashCode() + 17;
         }