You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@jena.apache.org by an...@apache.org on 2021/11/15 16:57:13 UTC

[jena-site] branch main updated (77206b2 -> 98502db)

This is an automated email from the ASF dual-hosted git repository.

andy pushed a change to branch main
in repository https://gitbox.apache.org/repos/asf/jena-site.git.


    from 77206b2  Documentation for xloader
     new e642a05  Remove references to SDB
     new 4292074  Move to archive/
     new 98502db  Updates for revised SPARQL APIs

The 3 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .gitignore                                         |   3 +
 source/documentation/{ => archive}/csv/__index.md  |   0
 .../documentation/{ => archive}/csv/csv_index.md   |   0
 source/documentation/{ => archive}/csv/design.md   |   0
 .../documentation/{ => archive}/csv/get_started.md |   0
 .../{ => archive}/csv/implementation.md            |   0
 .../documentation/{ => archive}/hadoop/__index.md  |   0
 .../{ => archive}/hadoop/artifacts.md              |   0
 .../documentation/{ => archive}/hadoop/common.md   |   0
 source/documentation/{ => archive}/hadoop/demo.md  |   0
 .../{ => archive}/hadoop/elephas_index.md          |   0
 source/documentation/{ => archive}/hadoop/io.md    |   0
 .../documentation/{ => archive}/hadoop/mapred.md   |   0
 source/documentation/{ => archive}/sdb/__index.md  |   0
 source/documentation/{ => archive}/sdb/commands.md |   0
 .../{ => archive}/sdb/configuration.md             |   0
 .../{ => archive}/sdb/database_layouts.md          |   0
 .../{ => archive}/sdb/databases_supported.md       |   0
 .../{ => archive}/sdb/dataset_description.md       |   0
 source/documentation/{ => archive}/sdb/db_notes.md |   0
 source/documentation/{ => archive}/sdb/faq.md      |   0
 .../{ => archive}/sdb/fuseki_integration.md        |   0
 .../{ => archive}/sdb/installation.md              |   0
 source/documentation/{ => archive}/sdb/javaapi.md  |   0
 .../{ => archive}/sdb/loading_data.md              |   0
 .../{ => archive}/sdb/loading_performance.md       |   0
 .../{ => archive}/sdb/query_performance.md         |   0
 .../documentation/{ => archive}/sdb/quickstart.md  |   0
 .../documentation/{ => archive}/sdb/sdb_index.md   |   0
 .../{ => archive}/sdb/store_description.md         |   0
 .../versions/http-auth-old.md}                     |   6 +-
 source/documentation/fuseki2/fuseki-embedded.md    |   2 +-
 .../documentation/fuseki2/fuseki-main-security.md  |  13 ++
 source/documentation/geosparql/__index.md          |   2 +-
 source/documentation/geosparql/geosparql-fuseki.md |   2 +-
 source/documentation/notes/model-factory.md        |   4 +-
 source/documentation/ontology/__index.md           |   3 +-
 source/documentation/query/__index.md              | 129 ++++++++-------
 source/documentation/query/architecture.md         |   3 +-
 source/documentation/query/arq-query-eval.md       |  16 +-
 source/documentation/query/construct-quad.md       |   2 +-
 source/documentation/query/explain.md              |  10 +-
 source/documentation/query/logging.md              |   3 +-
 source/documentation/query/sparql-remote.md        |  14 +-
 source/documentation/rdfconnection/__index.md      |  26 ++-
 source/documentation/sparql-apis/__index.md        | 174 +++------------------
 source/documentation/sparql-apis/http-auth.md      | 138 ++++++++++++++++
 source/documentation/tdb/configuration.md          |   8 +-
 source/documentation/tdb/datasets.md               |   8 +-
 source/documentation/tdb/optimizer.md              |  10 +-
 source/documentation/tdb/quadfilter.md             |   6 +-
 source/documentation/tdb/tdb_transactions.md       |  14 +-
 source/documentation/txn/transactions_api.md       |  12 +-
 source/getting_started/__index.md                  |   1 -
 54 files changed, 305 insertions(+), 304 deletions(-)
 rename source/documentation/{ => archive}/csv/__index.md (100%)
 rename source/documentation/{ => archive}/csv/csv_index.md (100%)
 rename source/documentation/{ => archive}/csv/design.md (100%)
 rename source/documentation/{ => archive}/csv/get_started.md (100%)
 rename source/documentation/{ => archive}/csv/implementation.md (100%)
 rename source/documentation/{ => archive}/hadoop/__index.md (100%)
 rename source/documentation/{ => archive}/hadoop/artifacts.md (100%)
 rename source/documentation/{ => archive}/hadoop/common.md (100%)
 rename source/documentation/{ => archive}/hadoop/demo.md (100%)
 rename source/documentation/{ => archive}/hadoop/elephas_index.md (100%)
 rename source/documentation/{ => archive}/hadoop/io.md (100%)
 rename source/documentation/{ => archive}/hadoop/mapred.md (100%)
 rename source/documentation/{ => archive}/sdb/__index.md (100%)
 rename source/documentation/{ => archive}/sdb/commands.md (100%)
 rename source/documentation/{ => archive}/sdb/configuration.md (100%)
 rename source/documentation/{ => archive}/sdb/database_layouts.md (100%)
 rename source/documentation/{ => archive}/sdb/databases_supported.md (100%)
 rename source/documentation/{ => archive}/sdb/dataset_description.md (100%)
 rename source/documentation/{ => archive}/sdb/db_notes.md (100%)
 rename source/documentation/{ => archive}/sdb/faq.md (100%)
 rename source/documentation/{ => archive}/sdb/fuseki_integration.md (100%)
 rename source/documentation/{ => archive}/sdb/installation.md (100%)
 rename source/documentation/{ => archive}/sdb/javaapi.md (100%)
 rename source/documentation/{ => archive}/sdb/loading_data.md (100%)
 rename source/documentation/{ => archive}/sdb/loading_performance.md (100%)
 rename source/documentation/{ => archive}/sdb/query_performance.md (100%)
 rename source/documentation/{ => archive}/sdb/quickstart.md (100%)
 rename source/documentation/{ => archive}/sdb/sdb_index.md (100%)
 rename source/documentation/{ => archive}/sdb/store_description.md (100%)
 rename source/documentation/{query/http-auth.md => archive/versions/http-auth-old.md} (98%)
 create mode 100644 source/documentation/fuseki2/fuseki-main-security.md
 create mode 100644 source/documentation/sparql-apis/http-auth.md

[jena-site] 01/03: Remove references to SDB

Posted by an...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

andy pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/jena-site.git

commit e642a05106d257e9caa89996ff187cf099dd4ec4
Author: Andy Seaborne <an...@apache.org>
AuthorDate: Fri Nov 5 08:11:46 2021 +0000

    Remove references to SDB
---
 source/documentation/notes/model-factory.md  | 4 +---
 source/documentation/ontology/__index.md     | 3 +--
 source/documentation/query/architecture.md   | 3 +--
 source/documentation/query/arq-query-eval.md | 9 ++++-----
 source/documentation/query/explain.md        | 7 +------
 source/getting_started/__index.md            | 1 -
 6 files changed, 8 insertions(+), 19 deletions(-)

diff --git a/source/documentation/notes/model-factory.md b/source/documentation/notes/model-factory.md
index 4248410..48a1b2b 100644
--- a/source/documentation/notes/model-factory.md
+++ b/source/documentation/notes/model-factory.md
@@ -23,9 +23,7 @@ no special ontology interface.
 
 ## Database model creation
 
-_Note:_ this section previously referred to creating database models
-with RDB, a now obsolete Jena features. For methods of creating models
-with [SDB](/documentation/sdb/index.html) and [TDB](/documentation/tdb/index.html)
+For methods of creating models for [TDB](/documentation/tdb/index.html)
 please see the relevant reference sections.
 
 ## Inference model creation
diff --git a/source/documentation/ontology/__index.md b/source/documentation/ontology/__index.md
index 1936801..b4e0735 100644
--- a/source/documentation/ontology/__index.md
+++ b/source/documentation/ontology/__index.md
@@ -1913,8 +1913,7 @@ model. In this section we briefly discuss using the ontology API with
 Jena's persistent database models.
 
 For information on setting-up and accessing the persistent models
-themselves, please see the
-[SDB](/documentation/sdb/index.html) and [TDB](/documentation/tdb/index.html)
+themselves, see the [TDB](/documentation/tdb/index.html)
 reference sections.
 
 There are two somewhat separate requirements for persistently
diff --git a/source/documentation/query/architecture.md b/source/documentation/query/architecture.md
index c1c9281..fa2b578 100644
--- a/source/documentation/query/architecture.md
+++ b/source/documentation/query/architecture.md
@@ -18,8 +18,7 @@ ARQ consists of the following parts:
     -   Reference engine - direct implementation of the algebra
     -   Quad engine - direct implementation of the algebra except
     -   The main engine
-    -   SDB, a SPARQL database for large-sale persistent data (external
-        system)
+    -   TDB, a SPARQL database for large-sale persistent data
 
 -   Result set handling for the SPARQL XML results format, the
     [JSON](http://json.org) and text versions.
diff --git a/source/documentation/query/arq-query-eval.md b/source/documentation/query/arq-query-eval.md
index 27781aa..9bcae56 100644
--- a/source/documentation/query/arq-query-eval.md
+++ b/source/documentation/query/arq-query-eval.md
@@ -7,7 +7,7 @@ modify query execution within ARQ. Through these mechanisms, ARQ
 can be used to query different graph implementations and to provide
 different query evaluation and optimization strategies for
 particular circumstances. These mechanisms are used by
-[TDB](../tdb) and [SDB](../sdb/).
+[TDB](../tdb).
 
 ARQ can be [extended in various ways](extension.html) to
 incorporate custom code into a query.
@@ -190,7 +190,7 @@ extensions to query execution.
 
 ARQ provides three query engine factories; the main query engine
 factory, one for a reference query engine and one to remotely
-execute a query. SDB and TDB provide their own query engine
+execute a query. TDB provides its own query engine
 factories which they register during sub-system initialization.
 Both extend the main query engine described below.
 
@@ -457,7 +457,7 @@ custom query engine and overriding `QueryEngineMain.modifyOp`:
 
 The extension may need to provide its own dataset implementation so
 that it can detect when queries are directed to its named graph
-storage. [TDB](../tdb/) and [SDB](../sdb/) are examples of this.
+storage. [TDB](../tdb/) are examples of this.
 
 ## Mixed Graph Implementation Datasets
 
@@ -500,5 +500,4 @@ as the super-class of the new operator. They can be inserted into
 the expression to be evaluated using a custom query engine to
 intercept evaluation initialization.  When evaluation of a query
 requires the evaluation of a sub-class of `OpExt`, the `eval`
-method is called. SDB uses this to introduce an operator that is
-implemented in SQL.
+method is called.
diff --git a/source/documentation/query/explain.md b/source/documentation/query/explain.md
index f543360..3720417 100644
--- a/source/documentation/query/explain.md
+++ b/source/documentation/query/explain.md
@@ -1,12 +1,7 @@
 ---
-title: explaining ARQ queries
+title: Explaining ARQ queries
 ---
 
-*This page applies to ARQ version 2.8.6 and later. In this version query
-logging was consolidated and made uniform across ARQ, SDB and TDB.
-Details of TDB logging changed to use this logging and explanation
-framework from TDB version 0.8.8.*
-
 Optimization in ARQ proceeds on two levels. After the query is parsed,
 the SPARQL algebra for the query is generated as described in the SPARQL
 specification. High-level optimization occurs by rewriting the algebra
diff --git a/source/getting_started/__index.md b/source/getting_started/__index.md
index c5523f8..c2ed47b 100644
--- a/source/getting_started/__index.md
+++ b/source/getting_started/__index.md
@@ -35,7 +35,6 @@ The following topics are covered in the documentation:
 * [How-To's](/documentation/notes/) - various topic-specific how-to documents
 * [Ontology](/documentation/ontology/) - support for handling OWL models in Jena
 * [TDB](/documentation/tdb/) - a fast persistent triple store that stores directly to disk
-* [SQL DB](/documentation/sdb/) - constructing persistent Jena models using SQL databases as the storage layer
 * [Tools](/documentation/tools/) - various command-line tools and utilities to help developers manage RDF data and other aspects of Jena
 
 <h2><img class="logo-menu" src="/images/jena-logo/jena-logo-notext-small.png" alt="jena logo">Framework Architecture</h2>

[jena-site] 02/03: Move to archive/

Posted by an...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

andy pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/jena-site.git

commit 4292074b2d244986e483237d6eec0f5063ef55eb
Author: Andy Seaborne <an...@apache.org>
AuthorDate: Fri Nov 5 11:40:17 2021 +0000

    Move to archive/
---
 source/documentation/{ => archive}/csv/__index.md             | 0
 source/documentation/{ => archive}/csv/csv_index.md           | 0
 source/documentation/{ => archive}/csv/design.md              | 0
 source/documentation/{ => archive}/csv/get_started.md         | 0
 source/documentation/{ => archive}/csv/implementation.md      | 0
 source/documentation/{ => archive}/hadoop/__index.md          | 0
 source/documentation/{ => archive}/hadoop/artifacts.md        | 0
 source/documentation/{ => archive}/hadoop/common.md           | 0
 source/documentation/{ => archive}/hadoop/demo.md             | 0
 source/documentation/{ => archive}/hadoop/elephas_index.md    | 0
 source/documentation/{ => archive}/hadoop/io.md               | 0
 source/documentation/{ => archive}/hadoop/mapred.md           | 0
 source/documentation/{ => archive}/sdb/__index.md             | 0
 source/documentation/{ => archive}/sdb/commands.md            | 0
 source/documentation/{ => archive}/sdb/configuration.md       | 0
 source/documentation/{ => archive}/sdb/database_layouts.md    | 0
 source/documentation/{ => archive}/sdb/databases_supported.md | 0
 source/documentation/{ => archive}/sdb/dataset_description.md | 0
 source/documentation/{ => archive}/sdb/db_notes.md            | 0
 source/documentation/{ => archive}/sdb/faq.md                 | 0
 source/documentation/{ => archive}/sdb/fuseki_integration.md  | 0
 source/documentation/{ => archive}/sdb/installation.md        | 0
 source/documentation/{ => archive}/sdb/javaapi.md             | 0
 source/documentation/{ => archive}/sdb/loading_data.md        | 0
 source/documentation/{ => archive}/sdb/loading_performance.md | 0
 source/documentation/{ => archive}/sdb/query_performance.md   | 0
 source/documentation/{ => archive}/sdb/quickstart.md          | 0
 source/documentation/{ => archive}/sdb/sdb_index.md           | 0
 source/documentation/{ => archive}/sdb/store_description.md   | 0
 29 files changed, 0 insertions(+), 0 deletions(-)

diff --git a/source/documentation/csv/__index.md b/source/documentation/archive/csv/__index.md
similarity index 100%
rename from source/documentation/csv/__index.md
rename to source/documentation/archive/csv/__index.md
diff --git a/source/documentation/csv/csv_index.md b/source/documentation/archive/csv/csv_index.md
similarity index 100%
rename from source/documentation/csv/csv_index.md
rename to source/documentation/archive/csv/csv_index.md
diff --git a/source/documentation/csv/design.md b/source/documentation/archive/csv/design.md
similarity index 100%
rename from source/documentation/csv/design.md
rename to source/documentation/archive/csv/design.md
diff --git a/source/documentation/csv/get_started.md b/source/documentation/archive/csv/get_started.md
similarity index 100%
rename from source/documentation/csv/get_started.md
rename to source/documentation/archive/csv/get_started.md
diff --git a/source/documentation/csv/implementation.md b/source/documentation/archive/csv/implementation.md
similarity index 100%
rename from source/documentation/csv/implementation.md
rename to source/documentation/archive/csv/implementation.md
diff --git a/source/documentation/hadoop/__index.md b/source/documentation/archive/hadoop/__index.md
similarity index 100%
rename from source/documentation/hadoop/__index.md
rename to source/documentation/archive/hadoop/__index.md
diff --git a/source/documentation/hadoop/artifacts.md b/source/documentation/archive/hadoop/artifacts.md
similarity index 100%
rename from source/documentation/hadoop/artifacts.md
rename to source/documentation/archive/hadoop/artifacts.md
diff --git a/source/documentation/hadoop/common.md b/source/documentation/archive/hadoop/common.md
similarity index 100%
rename from source/documentation/hadoop/common.md
rename to source/documentation/archive/hadoop/common.md
diff --git a/source/documentation/hadoop/demo.md b/source/documentation/archive/hadoop/demo.md
similarity index 100%
rename from source/documentation/hadoop/demo.md
rename to source/documentation/archive/hadoop/demo.md
diff --git a/source/documentation/hadoop/elephas_index.md b/source/documentation/archive/hadoop/elephas_index.md
similarity index 100%
rename from source/documentation/hadoop/elephas_index.md
rename to source/documentation/archive/hadoop/elephas_index.md
diff --git a/source/documentation/hadoop/io.md b/source/documentation/archive/hadoop/io.md
similarity index 100%
rename from source/documentation/hadoop/io.md
rename to source/documentation/archive/hadoop/io.md
diff --git a/source/documentation/hadoop/mapred.md b/source/documentation/archive/hadoop/mapred.md
similarity index 100%
rename from source/documentation/hadoop/mapred.md
rename to source/documentation/archive/hadoop/mapred.md
diff --git a/source/documentation/sdb/__index.md b/source/documentation/archive/sdb/__index.md
similarity index 100%
rename from source/documentation/sdb/__index.md
rename to source/documentation/archive/sdb/__index.md
diff --git a/source/documentation/sdb/commands.md b/source/documentation/archive/sdb/commands.md
similarity index 100%
rename from source/documentation/sdb/commands.md
rename to source/documentation/archive/sdb/commands.md
diff --git a/source/documentation/sdb/configuration.md b/source/documentation/archive/sdb/configuration.md
similarity index 100%
rename from source/documentation/sdb/configuration.md
rename to source/documentation/archive/sdb/configuration.md
diff --git a/source/documentation/sdb/database_layouts.md b/source/documentation/archive/sdb/database_layouts.md
similarity index 100%
rename from source/documentation/sdb/database_layouts.md
rename to source/documentation/archive/sdb/database_layouts.md
diff --git a/source/documentation/sdb/databases_supported.md b/source/documentation/archive/sdb/databases_supported.md
similarity index 100%
rename from source/documentation/sdb/databases_supported.md
rename to source/documentation/archive/sdb/databases_supported.md
diff --git a/source/documentation/sdb/dataset_description.md b/source/documentation/archive/sdb/dataset_description.md
similarity index 100%
rename from source/documentation/sdb/dataset_description.md
rename to source/documentation/archive/sdb/dataset_description.md
diff --git a/source/documentation/sdb/db_notes.md b/source/documentation/archive/sdb/db_notes.md
similarity index 100%
rename from source/documentation/sdb/db_notes.md
rename to source/documentation/archive/sdb/db_notes.md
diff --git a/source/documentation/sdb/faq.md b/source/documentation/archive/sdb/faq.md
similarity index 100%
rename from source/documentation/sdb/faq.md
rename to source/documentation/archive/sdb/faq.md
diff --git a/source/documentation/sdb/fuseki_integration.md b/source/documentation/archive/sdb/fuseki_integration.md
similarity index 100%
rename from source/documentation/sdb/fuseki_integration.md
rename to source/documentation/archive/sdb/fuseki_integration.md
diff --git a/source/documentation/sdb/installation.md b/source/documentation/archive/sdb/installation.md
similarity index 100%
rename from source/documentation/sdb/installation.md
rename to source/documentation/archive/sdb/installation.md
diff --git a/source/documentation/sdb/javaapi.md b/source/documentation/archive/sdb/javaapi.md
similarity index 100%
rename from source/documentation/sdb/javaapi.md
rename to source/documentation/archive/sdb/javaapi.md
diff --git a/source/documentation/sdb/loading_data.md b/source/documentation/archive/sdb/loading_data.md
similarity index 100%
rename from source/documentation/sdb/loading_data.md
rename to source/documentation/archive/sdb/loading_data.md
diff --git a/source/documentation/sdb/loading_performance.md b/source/documentation/archive/sdb/loading_performance.md
similarity index 100%
rename from source/documentation/sdb/loading_performance.md
rename to source/documentation/archive/sdb/loading_performance.md
diff --git a/source/documentation/sdb/query_performance.md b/source/documentation/archive/sdb/query_performance.md
similarity index 100%
rename from source/documentation/sdb/query_performance.md
rename to source/documentation/archive/sdb/query_performance.md
diff --git a/source/documentation/sdb/quickstart.md b/source/documentation/archive/sdb/quickstart.md
similarity index 100%
rename from source/documentation/sdb/quickstart.md
rename to source/documentation/archive/sdb/quickstart.md
diff --git a/source/documentation/sdb/sdb_index.md b/source/documentation/archive/sdb/sdb_index.md
similarity index 100%
rename from source/documentation/sdb/sdb_index.md
rename to source/documentation/archive/sdb/sdb_index.md
diff --git a/source/documentation/sdb/store_description.md b/source/documentation/archive/sdb/store_description.md
similarity index 100%
rename from source/documentation/sdb/store_description.md
rename to source/documentation/archive/sdb/store_description.md

[jena-site] 03/03: Updates for revised SPARQL APIs

Posted by an...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

andy pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/jena-site.git

commit 98502db66446b725ddf4e6b97c8e618dc711dba4
Author: Andy Seaborne <an...@apache.org>
AuthorDate: Fri Nov 5 16:04:36 2021 +0000

    Updates for revised SPARQL APIs
---
 .gitignore                                         |   3 +
 .../versions/http-auth-old.md}                     |   6 +-
 source/documentation/fuseki2/fuseki-embedded.md    |   2 +-
 .../documentation/fuseki2/fuseki-main-security.md  |  13 ++
 source/documentation/geosparql/__index.md          |   2 +-
 source/documentation/geosparql/geosparql-fuseki.md |   2 +-
 source/documentation/query/__index.md              | 129 ++++++++-------
 source/documentation/query/arq-query-eval.md       |   7 +-
 source/documentation/query/construct-quad.md       |   2 +-
 source/documentation/query/explain.md              |   3 +-
 source/documentation/query/logging.md              |   3 +-
 source/documentation/query/sparql-remote.md        |  14 +-
 source/documentation/rdfconnection/__index.md      |  26 ++-
 source/documentation/sparql-apis/__index.md        | 174 +++------------------
 source/documentation/sparql-apis/http-auth.md      | 138 ++++++++++++++++
 source/documentation/tdb/configuration.md          |   8 +-
 source/documentation/tdb/datasets.md               |   8 +-
 source/documentation/tdb/optimizer.md              |  10 +-
 source/documentation/tdb/quadfilter.md             |   6 +-
 source/documentation/tdb/tdb_transactions.md       |  14 +-
 source/documentation/txn/transactions_api.md       |  12 +-
 21 files changed, 297 insertions(+), 285 deletions(-)

diff --git a/.gitignore b/.gitignore
index 232d865..3fb554e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -9,3 +9,6 @@ target/
 *.iml
 .idea
 .java-version
+
+# Hugo
+.hugo_build.lock
\ No newline at end of file
diff --git a/source/documentation/query/http-auth.md b/source/documentation/archive/versions/http-auth-old.md
similarity index 98%
rename from source/documentation/query/http-auth.md
rename to source/documentation/archive/versions/http-auth-old.md
index 86f57bc..f5f85a5 100644
--- a/source/documentation/query/http-auth.md
+++ b/source/documentation/archive/versions/http-auth-old.md
@@ -1,5 +1,9 @@
 ---
-title: HTTP Authentication in ARQ
+title: HTTP Authentication in ARQ (Superseded)
+---
+
+<i>Documentation for HTTP Authentication (Jena3.1.1 to Jena 4.2.0) using Apache Commons HttpClient.</i>
+
 ---
 
 After [Jena 3.1.0](#http-authentication-from-jena-311), Jena exposes the underlying HTTP Commons functionality to support a range of authentication mechanisms as well as [other HTTP configuration][16]. From [Jena 3.0.0 through Jena 3.1.0](#http-authentication-from-jena-300-through-310) there is a Jena-specific framework that provides a uniform mechanism for HTTP authentication. This documentation is therefore divided into two sections. The first explains how to use HTTP Commons code, and [...]
diff --git a/source/documentation/fuseki2/fuseki-embedded.md b/source/documentation/fuseki2/fuseki-embedded.md
index 07db6ee..a74ed42 100644
--- a/source/documentation/fuseki2/fuseki-embedded.md
+++ b/source/documentation/fuseki2/fuseki-embedded.md
@@ -34,7 +34,7 @@ or read the dataset and see any updates made by remote systems:
     // Read transaction.
     Txn.execRead(dsg, ()->{
     Dataset ds = DatasetFactory.wrap(dsg) ;
-    try (QueryExecution qExec = QueryExecutionFactory.create("SELECT * { ?s  ?o}", ds) ) {
+    try (QueryExecution qExec = QueryExecution.create("SELECT * { ?s  ?o}", ds) ) {
         ResultSet rs = qExec.execSelect() ;
         ResultSetFormatter.out(rs) ;
       }
diff --git a/source/documentation/fuseki2/fuseki-main-security.md b/source/documentation/fuseki2/fuseki-main-security.md
new file mode 100644
index 0000000..3d39e37
--- /dev/null
+++ b/source/documentation/fuseki2/fuseki-main-security.md
@@ -0,0 +1,13 @@
+---
+title: Security in Fuseki2 server
+---
+This page covers security for Fuseki Main.
+
+See other [documentation](./fusek0-security.html) for the webapp packaging of Fuseki.
+
+## Serving RDF
+
+For any use of users-password information, and especially HTTP basic
+authentication, information is visible in the HTTP headers. When serving RDF and SPARQL requests, using HTTPS is necessary to avoid snooping.
+Digest authentication is also stronger over HTTPS
+because it protects against man-in-the-middle attacks.
\ No newline at end of file
diff --git a/source/documentation/geosparql/__index.md b/source/documentation/geosparql/__index.md
index 7b5a35c..73a7adb 100644
--- a/source/documentation/geosparql/__index.md
+++ b/source/documentation/geosparql/__index.md
@@ -107,7 +107,7 @@ To query a Model with GeoSPARQL or standard SPARQL:
     Model model = .....;
     String query = ....;
     
-    try (QueryExecution qe = QueryExecutionFactory.create(query, model)) {
+    try (QueryExecution qe = QueryExecution.create(query, model)) {
         ResultSet rs = qe.execSelect();
         ResultSetFormatter.outputAsTSV(rs);
     }
diff --git a/source/documentation/geosparql/geosparql-fuseki.md b/source/documentation/geosparql/geosparql-fuseki.md
index 2de806b..456718f 100644
--- a/source/documentation/geosparql/geosparql-fuseki.md
+++ b/source/documentation/geosparql/geosparql-fuseki.md
@@ -94,7 +94,7 @@ Once the default server is running it can be queried using Jena as follows:
 
     String service = "http://localhost:3030/ds";
     String query = ....;
-    try (QueryExecution qe = QueryExecutionFactory.sparqlService(service, query)) {
+    try (QueryExecution qe = QueryExecution.service(service).query(query).build()) {
         ResultSet rs = qe.execSelect();
         ResultSetFormatter.outputAsTSV(rs);
     }
diff --git a/source/documentation/query/__index.md b/source/documentation/query/__index.md
index b3caea4..6f8fe9a 100644
--- a/source/documentation/query/__index.md
+++ b/source/documentation/query/__index.md
@@ -11,58 +11,58 @@ SPARQL is the query language developed by the W3C
 
 ## ARQ Features
 
--   Standard SPARQL
--   Free text search via Lucene
--   SPARQL/Update
--   Access and extension of the SPARQL algebra
--   Support for custom filter functions, including javascript functions
--   Property functions for custom processing of semantic
+- Standard SPARQL
+- Free text search via Lucene
+- SPARQL/Update
+- Access and extension of the SPARQL algebra
+- Support for custom filter functions, including javascript functions
+- Property functions for custom processing of semantic
     relationships
--   Aggregation, GROUP BY and assignment as SPARQL extensions
--   Support for federated query
--   Support for extension to other storage systems
--   Client-support for remote access to any SPARQL endpoint
+- Aggregation, GROUP BY and assignment as SPARQL extensions
+- Support for federated query
+- Support for extension to other storage systems
+- Client-support for remote access to any SPARQL endpoint
 
 ## Introduction
 
--   [A Brief Tutorial on SPARQL](/tutorials/sparql.html)
--   [Application API](app_api.html) - covers the majority of
+- [A Brief Tutorial on SPARQL](/tutorials/sparql.html)
+- [Application API](app_api.html) - covers the majority of
     application usages
--   [Frequently Asked Questions](faq.html)
--   [ARQ Support](support.html)
--   Application [javadoc](/documentation/javadoc/arq/index.html)
--   [Command line utilities](cmds.html)
--   [Querying remote SPARQL services](sparql-remote.html)
-    - [HTTP Authentication for ARQ](http-auth.html)
--   [Logging](logging.html)
--   [Explaining queries](explain.html)
--   [Tutorial: manipulating SPARQL using ARQ](manipulating_sparql_using_arq.html)
--   [Basic federated query (`SERVICE`)](service.html)
--   [Property paths](property_paths.html)
--   [GROUP BY and counting](group-by.html)
--   [SELECT expressions](select_expr.html)
--   [Sub-SELECT](sub-select.html)
--   [Negation](negation.html)
+- [Frequently Asked Questions](faq.html)
+- [ARQ Support](support.html)
+- Application [javadoc](/documentation/javadoc/arq/index.html)
+- [Command line utilities](cmds.html)
+- [Querying remote SPARQL services](sparql-remote.html)
+  - [HTTP Authentication for ARQ](http-auth.html)
+- [Logging](logging.html)
+- [Explaining queries](explain.html)
+- [Tutorial: manipulating SPARQL using ARQ](manipulating_sparql_using_arq.html)
+- [Basic federated query (`SERVICE`)](service.html)
+- [Property paths](property_paths.html)
+- [GROUP BY and counting](group-by.html)
+- [SELECT expressions](select_expr.html)
+- [Sub-SELECT](sub-select.html)
+- [Negation](negation.html)
 
 Features of ARQ that are legal SPARQL syntax
 
--   [Conditions in FILTERs](function_forms.html)
-
--   [Free text searches](text-query.html)
--   [Accessing lists](rdf_lists.html) (RDF collections)
--   [Extension mechanisms](extension.html)
-    -   [Custom Expression Functions](extension.html#valueFunctions)
-    -   [Property Functions](extension.html#property-functions)
--   Library
-    -   [Expression function library](library-function.html)
-    -   [Property function library](library-propfunc.html)
--   [Writing SPARQL functions](writing_functions.html)
--   [Writing SPARQL functions in JavaScript](javascript-functions.html)
--   [Constructing queries programmatically](programmatic.html)
--   [Parameterized query strings](parameterized-sparql-strings.html)
--   [ARQ and the SPARQL algebra](algebra.html)
--   [Extending ARQ query execution and accessing different storage implementations](arq-query-eval.html)
--   [Custom aggregates](custom_aggregates.html)
+- [Conditions in FILTERs](function_forms.html)
+
+- [Free text searches](text-query.html)
+- [Accessing lists](rdf_lists.html) (RDF collections)
+- [Extension mechanisms](extension.html)
+  - [Custom Expression Functions](extension.html#valueFunctions)
+  - [Property Functions](extension.html#property-functions)
+- Library
+  - [Expression function library](library-function.html)
+  - [Property function library](library-propfunc.html)
+- [Writing SPARQL functions](writing_functions.html)
+- [Writing SPARQL functions in JavaScript](javascript-functions.html)
+- [Constructing queries programmatically](programmatic.html)
+- [Parameterized query strings](parameterized-sparql-strings.html)
+- [ARQ and the SPARQL algebra](algebra.html)
+- [Extending ARQ query execution and accessing different storage implementations](arq-query-eval.html)
+- [Custom aggregates](custom_aggregates.html)
 
 ## Extensions
 
@@ -70,21 +70,21 @@ Feature of ARQ that go beyond SPARQL syntax. The default query
 language is standard SPARQL. These features require the query to be
 parsed with an explicit declaration of `Syntax.syntaxARQ`.
 
--   [RDF-star](https://w3c.github.io/rdf-star/)
--   Operators and functions
+- [RDF-star](https://w3c.github.io/rdf-star/)
+- Operators and functions
     `[MOD](https://www.w3.org/TR/xpath-functions/#func-numeric-mod)`
     and `[IDIV](https://www.w3.org/TR/xpath-functions/#func-numeric-integer-divide)` for modulus and integer division.
--   [LET variable assignment](assignment.html)
--   [Order results using a Collation](collation.html)
--   [Construct Quad](construct-quad.html)
--   [Generate JSON from SPARQL](generate-json-from-sparql.html)
+- [LET variable assignment](assignment.html)
+- [Order results using a Collation](collation.html)
+- [Construct Quad](construct-quad.html)
+- [Generate JSON from SPARQL](generate-json-from-sparql.html)
 
 ## Update
 
 ARQ supports the W3C standard SPARQL Update language.
 
--   [SPARQL Update](http://www.w3.org/TR/sparql11-update/)
--   [The ARQ SPARQL/Update API](update.html)
+- [SPARQL Update](http://www.w3.org/TR/sparql11-update/)
+- [The ARQ SPARQL/Update API](update.html)
 
 ## See Also
 
@@ -94,30 +94,29 @@ ARQ supports the W3C standard SPARQL Update language.
 
 ## W3C Documents
 
--   [SPARQL Query Language specification](http://www.w3.org/TR/sparql11-query/)
--   [SPARQL Query Results JSON Format](https://www.w3.org/TR/sparql11-results-json/)
--   [SPARQL Protocol](http://www.w3.org/TR/rdf-sparql-protocol/)
+- [SPARQL Query Language specification](http://www.w3.org/TR/sparql11-query/)
+- [SPARQL Query Results JSON Format](https://www.w3.org/TR/sparql11-results-json/)
+- [SPARQL Protocol](http://www.w3.org/TR/rdf-sparql-protocol/)
 
 ## Articles
 
 Articles and documentation elsewhere:
 
--   [Introducing SPARQL: Querying the Semantic Web](http://xml.com/lpt/a/2005/11/16/introducing-sparql-querying-semantic-web-tutorial.html)
+- [Introducing SPARQL: Querying the Semantic Web](http://xml.com/lpt/a/2005/11/16/introducing-sparql-querying-semantic-web-tutorial.html)
     ([xml.com](http://www.xml.com/) article by Leigh Dodds)
--   [Search RDF data with SPARQL](http://www.ibm.com/developerworks/xml/library/j-sparql/)
+- [Search RDF data with SPARQL](http://www.ibm.com/developerworks/xml/library/j-sparql/)
     (by Phil McCarthy) - article published on IBM developer works about
     SPARQL and Jena.
--   [SPARQL reference card](http://www.dajobe.org/2005/04-sparql/)
+- [SPARQL reference card](http://www.dajobe.org/2005/04-sparql/)
     (by [Dave Beckett](http://www.dajobe.org/))
--   [Parameterised Queries with SPARQL and ARQ](http://www.ldodds.com/blog/archives/000251.html)
+- [Parameterised Queries with SPARQL and ARQ](http://www.ldodds.com/blog/archives/000251.html)
     (by Leigh Dodds)
--   [Writing an ARQ Extension Function](http://www.ldodds.com/blog/archives/000252.html)
+- [Writing an ARQ Extension Function](http://www.ldodds.com/blog/archives/000252.html)
     (by Leigh Dodds)
 
 ## RDF Syntax Specifications
 
--   [Turtle](https://www.w3.org/TR/turtle/)
--   [N-Triples](https://www.w3.org/TR/n-triples)
--   [TriG](https://www.w3.org/TR/trig/)
--   [N-Quads](https://www.w3.org/TR/n-quads/)
-
+- [Turtle](https://www.w3.org/TR/turtle/)
+- [N-Triples](https://www.w3.org/TR/n-triples)
+- [TriG](https://www.w3.org/TR/trig/)
+- [N-Quads](https://www.w3.org/TR/n-quads/)
diff --git a/source/documentation/query/arq-query-eval.md b/source/documentation/query/arq-query-eval.md
index 9bcae56..4527ea3 100644
--- a/source/documentation/query/arq-query-eval.md
+++ b/source/documentation/query/arq-query-eval.md
@@ -182,8 +182,8 @@ The steps from algebra generation to query evaluation are carried
 out when a query is executed via the `QueryExecution.execSelect` or
 other `QueryExecution` exec operation. It is possible to carry out
 storage-specific operations when the query execution is created. A
-query engine works in conjunction with a `QueryExecution` created
-by the `QueryExecutionFactory` to provide the evaluation of a query
+query engine works in conjunction with a `QueryExecution`
+to provide the evaluation of a query
 pattern. `QueryExecutionBase` provides all the machinery for the
 different result types and does not need to be modified by
 extensions to query execution.
@@ -490,8 +490,7 @@ While it is possible to replace the entire process of query
 evaluation, this is a substantial endeavour. `QueryExecutionBase`
 provides the machinery for result presentation (`SELECT`,
 `CONSTRUCT`, `DESCRIBE`, `ASK`), leaving the work of pattern
-evaluation to the custom query engine. `QueryExecutionFactory`
-assumes that `QueryExecutionBase` will be used.
+evaluation to the custom query engine.
 
 ## Algebra Extensions
 
diff --git a/source/documentation/query/construct-quad.md b/source/documentation/query/construct-quad.md
index f88d927..1b23695 100644
--- a/source/documentation/query/construct-quad.md
+++ b/source/documentation/query/construct-quad.md
@@ -119,7 +119,7 @@ previous sections, e.g.
 
     String queryString = " CONSTRUCT { GRAPH <http://example/ns#g1> {?s ?p ?o} } WHERE {?s ?p ?o}" ;
     Query query = QueryFactory.create(queryString, Syntax.syntaxARQ);
-    try ( QueryExecution qExec = QueryExecutionFactory.sparqlService(serviceQuery, query) ) { // serviceQuery is the URL of the remote service
+    try ( QueryExecution qExec = QueryExecution.service(serviceQuery).query(query).build() ) { // serviceQuery is the URL of the remote service
         Iterator<Quad> result = qExec.execConstructQuads();
         ...
     }
diff --git a/source/documentation/query/explain.md b/source/documentation/query/explain.md
index 3720417..f4d55bd 100644
--- a/source/documentation/query/explain.md
+++ b/source/documentation/query/explain.md
@@ -74,8 +74,7 @@ globally:
 and it may also be set on an individual query execution using its local
 context.
 
-     try(QueryExecution qExec = QueryExecutionFactory.create(...)) {
-         qExec.getContext().set(ARQ.symLogExec, Explain.InfoLevel.ALL) ;
+     try(QueryExecution qExec = QueryExecution.create() ... .set(ARQ.symLogExec, Explain.InfoLevel.ALL).build() ) {
          ResultSet rs = qExec.execSelect() ;
          ...
      }
diff --git a/source/documentation/query/logging.md b/source/documentation/query/logging.md
index 4c25b34..d5f4c14 100644
--- a/source/documentation/query/logging.md
+++ b/source/documentation/query/logging.md
@@ -96,8 +96,7 @@ globally:
 and it may also be set on an individual query execution using its local
 context.
 
-     try(QueryExecution qExec = QueryExecutionFactory.create(...)) {
-        qExec.getContext().set(ARQ.symLogExec, Explain.InfoLevel.ALL) ;
+     try(QueryExecution qExec = QueryExecution.create()... .set(ARQ.symLogExec, Explain.InfoLevel.ALL).build) {
         ...
      }
 
diff --git a/source/documentation/query/sparql-remote.md b/source/documentation/query/sparql-remote.md
index 40f0485..27da702 100644
--- a/source/documentation/query/sparql-remote.md
+++ b/source/documentation/query/sparql-remote.md
@@ -5,8 +5,7 @@ title: ARQ - Querying Remote SPARQL Services
 SPARQL is a
 [query language](http://www.w3.org/TR/sparql11-query/) and a
 [remote access protocol](http://www.w3.org/2001/sw/DataAccess/proto-wd/).
-The remote access protocol can be used with plain HTTP or over
-[SOAP](http://www.w3.org/TR/soap12-part0/).
+The remote access protocol runs over HTTP.
 
 See [Fuseki](../fuseki2/index.html) for an implementation of the
 SPARQL protocol over HTTP. Fuseki uses ARQ to provide SPARQL 
@@ -16,12 +15,9 @@ ARQ includes a query engine capable of using the HTTP version.
 
 ## From your application
 
-The `QueryExecutionFactory` has methods for creating a
-`QueryExecution` object for remote use.
-`QueryExecutionFactory.sparqlService`
-
-These methods build a query execution object that uses the query
-engine in `org.apache.jena.sparql.engine.http`.
+The `QueryExecutionHTTP` has methods for creating a
+`QueryExecution` object for remote use. There are various
+HTTP specific settings; the default should work in most cases.
 
 The remote request is made when the `execSelect`, `execConstruct`,
 `execDescribe` or `execAsk` method is called.
@@ -43,7 +39,7 @@ sending.
 
 ## Authentication
 
-ARQ provides a flexible API for authenticating against remote services, see the [HTTP Authentication](http-auth.html) documentation for more details.
+ARQ provides a flexible API for authenticating against remote services, see the [HTTP Authentication](../sparql-apis/http-auth.html) documentation for more details.
 
 ## Firewalls and Proxies
 
diff --git a/source/documentation/rdfconnection/__index.md b/source/documentation/rdfconnection/__index.md
index d676e0d..3e976ab 100644
--- a/source/documentation/rdfconnection/__index.md
+++ b/source/documentation/rdfconnection/__index.md
@@ -24,7 +24,7 @@ passing styles, as well the more basic sequence of methods calls.
 For example: using `try-resources` to manage the connection, and perform two operations, one to load
 some data, and one to make a query can be written as:
 
-    try ( RDFConnection conn = RDFConnectionFactory.connect(...) ) {
+    try ( RDFConnection conn = RDFConnection.connect(...) ) {
         conn.load("data.ttl") ;
         conn.querySelect("SELECT DISTINCT ?s { ?s ?p ?o }", (qs)->
            Resource subject = qs.getResource("s") ;
@@ -35,7 +35,7 @@ some data, and one to make a query can be written as:
 This could have been written as (approximately -- the error handling is better
 in the example above):
 
-    RDFConnection conn = RDFConnectionFactory.connect(...)
+    RDFConnection conn = RDFConnection.connect(...)
     conn.load("data.ttl") ;
     QueryExecution qExec = conn.query("SELECT DISTINCT ?s { ?s ?p ?o }") ;
     ResultSet rs = qExec.execSelect() ;
@@ -47,10 +47,6 @@ in the example above):
     qExec.close() ;
     conn.close() ;
 
-Jena also provides a separate
-[SPARQL over JDBC driver](/documentation/jdbc/index.html)
-library.
-
 ## Transactions
 
 Transactions are the preferred way to work with RDF data.
@@ -62,7 +58,7 @@ to excessive overhead.
 The `Txn` class provides a Java8-style transaction API.  Transactions are
 code passed in the `Txn` library that handles the transaction lifecycle.
 
-    try ( RDFConnection conn = RDFConnectionFactory.connect(...) ) {
+    try ( RDFConnection conn = RDFConnection.connect(...) ) {
         Txn.execWrite(conn, ()-> {
             conn.load("data1.ttl") ;
             conn.load("data2.ttl") ;
@@ -75,7 +71,7 @@ code passed in the `Txn` library that handles the transaction lifecycle.
 
 The traditional style of explicit `begin`, `commit`, `abort` is also available.
 
-    try ( RDFConnection conn = RDFConnectionFactory.connect(...) ) {
+    try ( RDFConnection conn = RDFConnection.connect(...) ) {
         conn.begin(ReadWrite.WRITE) ;
         try {
             conn.load("data1.ttl") ;
@@ -155,7 +151,7 @@ add more RDF data into a graph, and delete a graph from a dataset.
 
 For example: load two files:
 
-    try ( RDFConnection conn = RDFConnectionFactory.connect(...) ) {
+    try ( RDFConnection conn = RDFConnection.connect(...) ) {
         conn.load("data1.ttl") ;
         conn.load("data2.nt") ;
       }
@@ -195,16 +191,16 @@ in this mode.
 ## Query Usage
 
 `RDFConnection` provides methods for each of the SPARQL query forms (`SELECT`,
-`CONSTRUCT`, `DESCRIBE`, `ASK`) as well as a way to get the lower level
-`QueryExecution` for specialized configuration.
+`CONSTRUCT`, `DESCRIBE`, `ASK`) as well as a way to get the
+`QueryExecution` for specialized configuration. When creating an 
+`QueryExecution` explicitly, care should be taken to close it.
 
-When creating an `QueryExecution` explicitly, care should be taken to close
-it. If the application wishes to capture the result set from a SELECT query and
+If the application wishes to capture the result set from a SELECT query and
 retain it across the lifetime of the transaction or `QueryExecution`, then
 the application should create a copy which is not attached to any external system
 with `ResultSetFactory.copyResults`.
 
-      try ( RDFConnection conn = RDFConnectionFactory.connect("foo") ) {
+      try ( RDFConnection conn = RDFConnection.connect("https://...") ) {
           ResultSet safeCopy =
               Txn.execReadReturn(conn, ()-> {
                   // Process results by row:
@@ -221,7 +217,7 @@ with `ResultSetFactory.copyResults`.
 
 SPARQL Update operations can be performed and mixed with other operations.
 
-      try ( RDFConnection conn = RDFConnectionFactory.connect(...) ) {
+      try ( RDFConnection conn = RDFConnection.connect(...) ) {
           Txn.execWrite(conn, ()-> {
              conn.update("DELETE DATA { ... }" ) ;
              conn.load("data.ttl") ;
diff --git a/source/documentation/sparql-apis/__index.md b/source/documentation/sparql-apis/__index.md
index 8649df4..d26d8d3 100644
--- a/source/documentation/sparql-apis/__index.md
+++ b/source/documentation/sparql-apis/__index.md
@@ -64,9 +64,10 @@ Both API and GPI provide builders for detailed setup, particularly for remote
 usage over HTTP and HTTPS where detailed control of the HTTP requests is
 sometimes necessary to work with other triple stores.
 
-Factory style functions for many common usage patterns are retained in
-`QueryExecutionFactory`, `UpdateExecutionFactory`. Note that any methods that
-involved Apache HttpClient objects have been removed.
+Use of the builders is preferred to factories. Factory style functions for many
+common usage patterns are retained in `QueryExecutionFactory`,
+`UpdateExecutionFactory`. Note that any methods that involved Apache HttpClient
+objects have been removed.
 
 ## Changes from Jena 4.2.0 to Jena 4.3.0 {#changes}
 
@@ -76,7 +77,7 @@ involved Apache HttpClient objects have been removed.
   `QueryExecutionBuilder`.
 
 * HTTP usage provided by the JDK `java.net.http` package, with challenge-based
-  authentication provided on top by Jena. [See below](#auth).
+  authentication provided on top by Jena. [See the authentiucation documentation](./http-auth.html).
 
 * Authentication support is uniformly applied to query, update, GSP and `SERVICE`.
 
@@ -120,7 +121,7 @@ involved Apache HttpClient objects have been removed.
 ```
 or the less flexible:
 ```
-    try ( RDFConnection conn = RDFConnectionFactory.connect(dataURL) ) {
+    try ( RDFConnection conn = RDFConnection.connect(dataURL) ) {
         conn.update("INSERT DATA{}");
         conn.queryAsk("ASK{}");
     }
@@ -128,17 +129,6 @@ or the less flexible:
 
 ## Query Execution
 
-Factory Examples
-
-```
-  Dataset dataset = ...
-  Query query = ...
-  try ( QueryExecution qExec = QueryExecutionFactory.create(query, dataset) ) {
-       ResultSet results = qExec.execSelect();
-       ... use results ...
-  }
-```
-
 Builder Examples
 Builders are reusable and modifiable after a "build" operation.
 
@@ -162,6 +152,18 @@ try ( QueryExecution qExec = QueryExecutionHTTP.service("http://....")
     ... use results ...
 }
 ```
+
+Factory Examples
+
+```
+  Dataset dataset = ...
+  Query query = ...
+  try ( QueryExecution qExec = QueryExecutionFactory.create(query, dataset) ) {
+       ResultSet results = qExec.execSelect();
+       ... use results ...
+  }
+```
+
 More complex setup:
 ```
 // JDK HttpClient
@@ -254,142 +256,4 @@ which has access to the headers and the query string parameters of the request.
 
 ## Authentication {#auth}
 
-For any use of users-password information, and especially HTTP basic
-authentication, information is visible in the HTTP headers. Using HTTPS is
-necessary to avoid snooping.  Digest authentication is also stronger over HTTPS
-because it protects against man-in-the-middle attacks.
-
-There are 5 variations:
-
-1. Basic authentication
-2. Challenge-Basic authentication
-3. Challenge-Digest authentication
-4. URL user (that is, `user@host.net` in the URL)
-5. URL user and password in the URL (that is, `user:password@host.net` in the URL)
-
-Basic authentication occurs where the app provides the user and password
-information to the JDK `HttpClient` and that information is always used when
-sending HTTP requests with that `HttpClient`. It does not require an initial
-request-challenge-resend to initiate. This is provided natively by the `java.net.http`
-JDK code. See `HttpClient.newBuilder().authenticate(...)`.
-
-Challenge based authentication, for "basic" or "digest", are provided by Jena.
-The challenge happens on the first contact with the remote endpoint and the
-server returns a 401 response with an HTTP header saying which style of
-authentication is required. There is a registry of users name and password for
-endpoints which is consulted and the appropriate
-[`Authorization:`](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Authorization)
-header is generated then the request resent. If no registration matches, the 401
-is passed back to the application as an exception.
-
-Because it is a challenge response to a request, the request must be sent twice,
-first to trigger the challenge and then again with the HTTP authentication
-information.  To make this automatic, the first request must not be a streaming
-request (the stream is not repeatable). All HTTP request generated by Jena are
-repeatable.
-
-The URL can contain a `userinfo` part, either the `users@host` form, or the `user:password@host` form.
-If just the user is given, the authentication environment is consulted for registered users-password information. If user and password is given, the details as given are used. This latter form is not recommended and should only be used if necessary because the password is in-clear in the SPARQL
-query.
-
-### JDK HttpClient.authenticator
-
-
-```java
-    // Basic or Digest - determined when the challenge happens.
-    AuthEnv.get().registerUsernamePassword(URI.create(dataURL), "user", "password");
-    try ( QueryExecution qExec = QueryExecutionHTTP.service(dataURL)
-            .endpoint(dataURL)
-            .queryString("ASK{}")
-            .build()) {
-        qExec.execAsk();
-    }
-```
-
-alternatively, the java platform provides basic authentication. 
-This is not challenge based - any request sent using a `HttpClient` configured 
-with an authenticator will include the authentication details. 
-(Caution - including sending username/password to the wrong site!).
-Digest authentication must use `AuthEnv.get().registerUsernamePassword`.
-
-```java
-    Authenticator authenticator = AuthLib.authenticator("user", "password");
-    HttpClient httpClient = HttpClient.newBuilder()
-            .authenticator(authenticator)
-            .build();
-```
-
-```java
-    // Use with RDFConnection      
-    try ( RDFConnection conn = RDFConnectionRemote.service(dataURL)
-            .httpClient(httpClient)
-            .build()) {
-        conn.queryAsk("ASK{}");
-    }
-```
-
-```java
-    try ( QueryExecution qExec = QueryExecutionHTTP.service(dataURL)
-            .httpClient(httpClient)
-            .endpoint(dataURL)
-            .queryString("ASK{}")
-            .build()) {
-        qExec.execAsk();
-    }
-```
-
-### Challenge registration
-
-`AuthEnv` maintains a registry of credentials and also a registry of which service URLs
-the credentials should be used. It supports registration of endpoint prefixes so that one
-registration will apply to all URLs starting with a common root.
-
-The main function is `AuthEnv.get().registerUsernamePassword`.
-
-```java
-   // Application setup code 
-   AuthEnv.get().registerUsernamePassword("username", "password");
-```
-
-```java
-   ...
-   try ( QueryExecution qExec = QueryExecutionHTTP.service(dataURL)
-        .endpoint(dataURL)
-        .queryString("ASK{}")
-        .build()) {
-       qExec.execAsk();
-   }
-```
-
-When an HTTP 401 response with an `WWW-Authenticate` header is received, the Jena http handling code
-will will look for a suitable authentication registration (exact or longest prefix), and retry the
-request. If it succeeds, a modifier is installed so all subsequent request to the same endpoint will
-have the authentication header added and there is no challenge round-trip.
-
-### <tt>SERVICE</tt>
-
-The same mechanism is used for the URL in a SPARQL `SERVICE` clause.  If there is a 401 challenge,
-the registry is consulted and authetication applied.
-
-In addition, if the SERVICE URL has a username as the `userinfo` (that is, `https://users@some.host/...`),
-that user name is used to look in the authentication registry.
-
-If the `userinfo` is of the form "username:password" then the information as given in the URL is
-used.
-
-```
-    AuthEnv.get().registerUsernamePassword(URI.create("http://host/sparql"), "u", "p");
-     // Registration applies to SERVICE.
-    Query query = QueryFactory.create("SELECT * { SERVICE <http://host/sparql> { ?s ?p ?o } }");
-    try ( QueryExecution qExec = QueryExecution.create().query(query).dataset(...).build() ) {
-        System.out.println("Call using SERVICE...");
-        ResultSet rs = qExec.execSelect();
-        ResultSetFormatter.out(rs);
-    }
-```
-
-## Environment
-
-`AuthEnv` - passwordRegistry , authModifiers
-`RegistryHttpClient`
-
+[Documentation for authentication](./http-auth.html).
diff --git a/source/documentation/sparql-apis/http-auth.md b/source/documentation/sparql-apis/http-auth.md
new file mode 100644
index 0000000..a867418
--- /dev/null
+++ b/source/documentation/sparql-apis/http-auth.md
@@ -0,0 +1,138 @@
+---
+title: HTTP Authentication
+---
+
+<i>[Old documentation](../archive/versions/http-auth-old.html) (Jena 3.1.1 to Jena 4.2.0)</i>
+
+Jena 4.3.0 and later uses the JDK `java.net.http` package. Jena adds API support
+for challenge-based authentication and also provide HTTP digest authentication.
+
+## Authentication {#auth}
+
+There are 5 variations:
+
+1. Basic authentication
+2. Challenge-Basic authentication
+3. Challenge-Digest authentication
+4. URL user (that is, `user@host.net` in the URL)
+5. URL user and password in the URL (that is, `user:password@host.net` in the URL)
+
+Basic authentication occurs where the app provides the user and password
+information to the JDK `HttpClient` and that information is always used when
+sending HTTP requests with that `HttpClient`. It does not require an initial
+request-challenge-resend to initiate. This is provided natively by the `java.net.http`
+JDK code. See `HttpClient.newBuilder().authenticate(...)`.
+
+Challenge based authentication, for "basic" or "digest", are provided by Jena.
+The challenge happens on the first contact with the remote endpoint and the
+server returns a 401 response with an HTTP header saying which style of
+authentication is required. There is a registry of users name and password for
+endpoints which is consulted and the appropriate
+[`Authorization:`](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Authorization)
+header is generated then the request resent. If no registration matches, the 401
+is passed back to the application as an exception.
+
+Because it is a challenge response to a request, the request must be sent twice,
+first to trigger the challenge and then again with the HTTP authentication
+information.  To make this automatic, the first request must not be a streaming
+request (the stream is not repeatable). All HTTP request generated by Jena are
+repeatable.
+
+The URL can contain a `userinfo` part, either the `users@host` form, or the `user:password@host` form.
+If just the user is given, the authentication environment is consulted for registered users-password information. If user and password is given, the details as given are used. This latter form is not recommended and should only be used if necessary because the password is in-clear in the SPARQL
+query.
+
+### JDK HttpClient.authenticator
+
+```java
+    // Basic or Digest - determined when the challenge happens.
+    AuthEnv.get().registerUsernamePassword(URI.create(dataURL), "user", "password");
+    try ( QueryExecution qExec = QueryExecutionHTTP.service(dataURL)
+            .endpoint(dataURL)
+            .queryString("ASK{}")
+            .build()) {
+        qExec.execAsk();
+    }
+```
+
+alternatively, the java platform provides basic authentication. 
+This is not challenge based - any request sent using a `HttpClient` configured 
+with an authenticator will include the authentication details. 
+(Caution - including sending username/password to the wrong site!).
+Digest authentication must use `AuthEnv.get().registerUsernamePassword`.
+
+```java
+    Authenticator authenticator = AuthLib.authenticator("user", "password");
+    HttpClient httpClient = HttpClient.newBuilder()
+            .authenticator(authenticator)
+            .build();
+```
+
+```java
+    // Use with RDFConnection      
+    try ( RDFConnection conn = RDFConnectionRemote.service(dataURL)
+            .httpClient(httpClient)
+            .build()) {
+        conn.queryAsk("ASK{}");
+    }
+```
+
+```java
+    try ( QueryExecution qExec = QueryExecutionHTTP.service(dataURL)
+            .httpClient(httpClient)
+            .endpoint(dataURL)
+            .queryString("ASK{}")
+            .build()) {
+        qExec.execAsk();
+    }
+```
+
+### Challenge registration
+
+`AuthEnv` maintains a registry of credentials and also a registry of which service URLs
+the credentials should be used. It supports registration of endpoint prefixes so that one
+registration will apply to all URLs starting with a common root.
+
+The main function is `AuthEnv.get().registerUsernamePassword`.
+
+```java
+   // Application setup code 
+   AuthEnv.get().registerUsernamePassword("username", "password");
+```
+
+```java
+   ...
+   try ( QueryExecution qExec = QueryExecutionHTTP.service(dataURL)
+        .endpoint(dataURL)
+        .queryString("ASK{}")
+        .build()) {
+       qExec.execAsk();
+   }
+```
+
+When an HTTP 401 response with an `WWW-Authenticate` header is received, the Jena http handling code
+will will look for a suitable authentication registration (exact or longest prefix), and retry the
+request. If it succeeds, a modifier is installed so all subsequent request to the same endpoint will
+have the authentication header added and there is no challenge round-trip.
+
+### <tt>SERVICE</tt>
+
+The same mechanism is used for the URL in a SPARQL `SERVICE` clause.  If there is a 401 challenge,
+the registry is consulted and authentication applied.
+
+In addition, if the SERVICE URL has a username as the `userinfo` (that is, `https://users@some.host/...`),
+that user name is used to look in the authentication registry.
+
+If the `userinfo` is of the form "username:password" then the information as given in the URL is
+used.
+
+```java
+    AuthEnv.get().registerUsernamePassword(URI.create("http://host/sparql"), "u", "p");
+     // Registration applies to SERVICE.
+    Query query = QueryFactory.create("SELECT * { SERVICE <http://host/sparql> { ?s ?p ?o } }");
+    try ( QueryExecution qExec = QueryExecution.create().query(query).dataset(...).build() ) {
+        System.out.println("Call using SERVICE...");
+        ResultSet rs = qExec.execSelect();
+        ResultSetFormatter.out(rs);
+    }
+```
diff --git a/source/documentation/tdb/configuration.md b/source/documentation/tdb/configuration.md
index f3aa6f7..920c46d 100644
--- a/source/documentation/tdb/configuration.md
+++ b/source/documentation/tdb/configuration.md
@@ -40,10 +40,10 @@ Setting globally:
 
 Per query execution:
 
-     try(QueryExecution qExec = QueryExecutionFactory.create(...)) {
-         qExec.getContext().set(symbol, value) ;
-         ...
-     }
+    try(QueryExecution qExec = QueryExecution.dataset(dataset)
+                .query(query).set(ARQ.symLogExec,true).build() ) {
+         ....
+    }
 
 Setting for a query execution happens before any query compilation
 or setup happens. Creation of a query execution object does not
diff --git a/source/documentation/tdb/datasets.md b/source/documentation/tdb/datasets.md
index 8239465..cbf0561 100644
--- a/source/documentation/tdb/datasets.md
+++ b/source/documentation/tdb/datasets.md
@@ -48,9 +48,11 @@ Set globally:
 
 or set on a per query basis:
 
-    try(QueryExecution qExec = QueryExecutionFactory.create(...)) {
-        qExec.getContext().set(TDB.symUnionDefaultGraph, true) ;
-        ...
+    try(QueryExecution qExec = QueryExecution.dataset(dataset)
+            .query(query)
+            .set(TDB.symUnionDefaultGraph,true)
+            .build() ) {
+         ....
     }
 
 ## Special Graph Names
diff --git a/source/documentation/tdb/optimizer.md b/source/documentation/tdb/optimizer.md
index 2b936f1..632d724 100644
--- a/source/documentation/tdb/optimizer.md
+++ b/source/documentation/tdb/optimizer.md
@@ -112,13 +112,15 @@ set globally:
 
     ARQ.getContext().set(ARQ.symLogExec,true) ;
 
-and it may also be set on an individual query execution using it's
+and it may also be set on an individual query execution using its
 local context.
 
-     try(QueryExecution qExec = QueryExecutionFactory.create(...)) {
-        qExec.getContext().set(ARQ.symLogExec,true) ;
+    try(QueryExecution qExec = QueryExecution.dataset(dataset)
+              .query(query)
+              .set(ARQ.symLogExec,true)
+              .build() ) {
         ResultSet rs = qExec.execSelect() ;
-     }
+    }
 
 On the command line:
 
diff --git a/source/documentation/tdb/quadfilter.md b/source/documentation/tdb/quadfilter.md
index ef129e3..82eeef1 100644
--- a/source/documentation/tdb/quadfilter.md
+++ b/source/documentation/tdb/quadfilter.md
@@ -67,8 +67,10 @@ under the symbol `SystemTDB.symTupleFilter` then execute the query as normal.
         Dataset ds = ... ;
         Filter<Tuple<NodeId>> filter = createFilter(ds) ;
         Query query = ... ;
-        try (QueryExecution qExec = QueryExecutionFactory.create(query, ds)) {
-            qExec.getContext().set(SystemTDB.symTupleFilter, filter) ;
+        try (QueryExecution qExec = QueryExecution.dataset(ds)
+                .query(query)
+                .set(SystemTDB.symTupleFilter, filter)
+                .build() ) {}
             ResultSet rs = qExec.execSelect() ;
             ...
         }
diff --git a/source/documentation/tdb/tdb_transactions.md b/source/documentation/tdb/tdb_transactions.md
index e86d207..ba063ff 100644
--- a/source/documentation/tdb/tdb_transactions.md
+++ b/source/documentation/tdb/tdb_transactions.md
@@ -95,13 +95,13 @@ The `dataset.end()` declares the end of the read transaction.  Applications may
      dataset.begin(ReadWrite.READ) ;
      String qs1 = "SELECT * {?s ?p ?o} LIMIT 10" ;        
 
-     try(QueryExecution qExec = QueryExecutionFactory.create(qs1, dataset)) {
+     try(QueryExecution qExec = QueryExecution.dataset(dataset).query(qs1).build() ) {
          ResultSet rs = qExec.execSelect() ;
          ResultSetFormatter.out(rs) ;
      } 
 
      String qs2 = "SELECT * {?s ?p ?o} OFFSET 10 LIMIT 10" ;  
-     try(QueryExecution qExec = QueryExecutionFactory.create(qs2, dataset)) {
+     try(QueryExecution qExec = QueryExecution.dataset(dataset).query(qs2).build() ) {
          rs = qExec.execSelect() ;
          ResultSetFormatter.out(rs) ;
      }
@@ -140,23 +140,21 @@ dataset.
          model.add( ... )
 
          // A SPARQL query will see the new statement added.
-         try (QueryExecution qExec = QueryExecutionFactory.create(
-                 "SELECT (count(*) AS ?count) { ?s ?p ?o} LIMIT 10", 
-               dataset)) {
+         try (QueryExecution qExec = QueryExecution.dataset(dataset)
+                 .query("SELECT (count(*) AS ?count) { ?s ?p ?o} LIMIT 10")
+                 .build() ) {
              ResultSet rs = qExec.execSelect() ;
              ResultSetFormatter.out(rs) ;
          }
 
          // ... perform a SPARQL Update
-         GraphStore graphStore = GraphStoreFactory.create(dataset) ;
          String sparqlUpdateString = StrUtils.strjoinNL(
               "PREFIX . <http://example/>",
               "INSERT { :s :p ?now } WHERE { BIND(now() AS ?now) }"
               ) ;
 
          UpdateRequest request = UpdateFactory.create(sparqlUpdateString) ;
-         UpdateProcessor proc = UpdateExecutionFactory.create(request, graphStore) ;
-         proc.execute() ;
+         UpdateExecution.dataset(dataset).update(request).execute();
             
          // Finally, commit the transaction. 
          dataset.commit() ;
diff --git a/source/documentation/txn/transactions_api.md b/source/documentation/txn/transactions_api.md
index 8e692f1..80953c9 100644
--- a/source/documentation/txn/transactions_api.md
+++ b/source/documentation/txn/transactions_api.md
@@ -35,13 +35,13 @@ this code even if another thread commits changes in the lifetime of this transac
      dataset.begin(ReadWrite.READ) ;
      try {
          String qs1 = "SELECT * {?s ?p ?o} LIMIT 10" ;        
-         try(QueryExecution qExec = QueryExecutionFactory.create(qs1, dataset)) {
+         try(QueryExecution qExec = QueryExecution.create(qs1, dataset)) {
              ResultSet rs = qExec.execSelect() ;
              ResultSetFormatter.out(rs) ;
          }
 
          String qs2 = "SELECT * {?s ?p ?o} OFFSET 10 LIMIT 10" ;  
-         try(QueryExecution qExec = QueryExecutionFactory.create(qs2, dataset)) {
+         try(QueryExecution qExec = QueryExecution.create(qs2, dataset)) {
              rs = qExec.execSelect() ;
              ResultSetFormatter.out(rs) ;
          }
@@ -81,23 +81,21 @@ dataset.
          model.add( ... )
 
          // A SPARQL query will see the new statement added.
-         try (QueryExecution qExec = QueryExecutionFactory.create(
+         try (QueryExecution qExec = QueryExecution.create(
                  "SELECT (count(?s) AS ?count) { ?s ?p ?o} LIMIT 10",
-               dataset)) {
+                 dataset)) {
              ResultSet rs = qExec.execSelect() ;
              ResultSetFormatter.out(rs) ;
          }
 
          // ... perform a SPARQL Update
-         GraphStore graphStore = GraphStoreFactory.create(dataset) ;
          String sparqlUpdateString = StrUtils.strjoinNL(
               "PREFIX . <http://example/>",
               "INSERT { :s :p ?now } WHERE { BIND(now() AS ?now) }"
               ) ;
 
          UpdateRequest request = UpdateFactory.create(sparqlUpdateString) ;
-         UpdateProcessor proc = UpdateExecutionFactory.create(request, graphStore) ;
-         proc.execute() ;
+         UpdateExecution.dataset(dataset).update(request).execute();
 
          // Finally, commit the transaction.
          dataset.commit() ;