You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@taverna.apache.org by st...@apache.org on 2016/06/23 08:53:15 UTC
[01/14] incubator-taverna-databundle-viewer git commit: Add files via
upload
Repository: incubator-taverna-databundle-viewer
Updated Branches:
refs/heads/master a7fdbb0d6 -> 5c084b7b8
Add files via upload
Project: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/commit/94ddf9b9
Tree: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/tree/94ddf9b9
Diff: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/diff/94ddf9b9
Branch: refs/heads/master
Commit: 94ddf9b964047be165659f951f642872c9174332
Parents: a7fdbb0
Author: PCStefan <pa...@gmail.com>
Authored: Mon Jun 6 12:48:39 2016 +0100
Committer: PCStefan <pa...@gmail.com>
Committed: Mon Jun 6 12:48:39 2016 +0100
----------------------------------------------------------------------
Gemfile | 8 +++++++-
1 file changed, 7 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/blob/94ddf9b9/Gemfile
----------------------------------------------------------------------
diff --git a/Gemfile b/Gemfile
index e257834..43029d0 100644
--- a/Gemfile
+++ b/Gemfile
@@ -18,7 +18,6 @@
#
source 'https://rubygems.org'
-ruby '2.2.1'
gem 'rails', '4.2.3'
@@ -57,6 +56,13 @@ gem 'ro-bundle'
# A Ruby library to aid the interaction with Taverna 2 workflows
gem 'workflow_parser', github: 'myExperiment/workflow_parser'
gem 'taverna-t2flow', github: 'myExperiment/workflow_parser-t2flow'
+
+# A gem to build digraphs
+gem 'ruby-graphviz'
+
+# A gem to query the prov.ttl
+gem 'sparql'
+
# A simple interface to working with ZIP archives
gem 'archive-zip'
# Paginator
[11/14] incubator-taverna-databundle-viewer git commit: Add files via
upload
Posted by st...@apache.org.
Add files via upload
Project: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/commit/e3ac7c23
Tree: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/tree/e3ac7c23
Diff: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/diff/e3ac7c23
Branch: refs/heads/master
Commit: e3ac7c23d60d7d98ca67506042ee0264b29b6021
Parents: 380bdcf
Author: PCStefan <pa...@gmail.com>
Authored: Mon Jun 6 12:56:10 2016 +0100
Committer: PCStefan <pa...@gmail.com>
Committed: Mon Jun 6 12:56:10 2016 +0100
----------------------------------------------------------------------
config/database.yml | 85 ++++++++++++++++++++++++++++++++++++++++++++++++
1 file changed, 85 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/blob/e3ac7c23/config/database.yml
----------------------------------------------------------------------
diff --git a/config/database.yml b/config/database.yml
new file mode 100644
index 0000000..603e719
--- /dev/null
+++ b/config/database.yml
@@ -0,0 +1,85 @@
+# PostgreSQL. Versions 8.2 and up are supported.
+#
+# Install the pg driver:
+# gem install pg
+# On OS X with Homebrew:
+# gem install pg -- --with-pg-config=/usr/local/bin/pg_config
+# On OS X with MacPorts:
+# gem install pg -- --with-pg-config=/opt/local/lib/postgresql84/bin/pg_config
+# On Windows:
+# gem install pg
+# Choose the win32 build.
+# Install PostgreSQL and put its /bin directory on your path.
+#
+# Configure Using Gemfile
+# gem 'pg'
+#
+default: &default
+ adapter: postgresql
+ encoding: unicode
+ # For details on connection pooling, see rails configuration guide
+ # http://guides.rubyonrails.org/configuring.html#database-pooling
+ pool: 5
+
+development:
+ <<: *default
+ database: DatabundleViewer_development
+
+ # The specified database role being used to connect to postgres.
+ # To create additional roles in postgres see `$ createuser --help`.
+ # When left blank, postgres will use the default role. This is
+ # the same name as the operating system user that initialized the database.
+ #username: cpadurariu
+
+ # The password associated with the postgres role (username).
+ #password: developer123
+
+ # Connect on a TCP socket. Omitted by default since the client uses a
+ # domain socket that doesn't need configuration. Windows does not have
+ # domain sockets, so uncomment these lines.
+ #host: localhost
+
+ # The TCP port the server listens on. Defaults to 5432.
+ # If your server runs on a different port number, change accordingly.
+ #port: 5432
+
+ # Schema search path. The server defaults to $user,public
+ #schema_search_path: myapp,sharedapp,public
+
+ # Minimum log levels, in increasing order:
+ # debug5, debug4, debug3, debug2, debug1,
+ # log, notice, warning, error, fatal, and panic
+ # Defaults to warning.
+ #min_messages: notice
+
+# Warning: The database defined as "test" will be erased and
+# re-generated from your development database when you run "rake".
+# Do not set this db to the same as development or production.
+test:
+ <<: *default
+ database: DatabundleViewer_test
+
+# As with config/secrets.yml, you never want to store sensitive information,
+# like your database password, in your source code. If your source code is
+# ever seen by anyone, they now have access to your database.
+#
+# Instead, provide the password as a unix environment variable when you boot
+# the app. Read http://guides.rubyonrails.org/configuring.html#configuring-a-database
+# for a full rundown on how to provide these environment variables in a
+# production deployment.
+#
+# On Heroku and other platform providers, you may have a full connection URL
+# available as an environment variable. For example:
+#
+# DATABASE_URL="postgres://myuser:mypass@localhost/somedatabase"
+#
+# You can use this database configuration with:
+#
+# production:
+# url: <%= ENV['DATABASE_URL'] %>
+#
+production:
+ <<: *default
+ database: DatabundleViewer_production
+ username: cpadurariu
+ password: developer123
[04/14] incubator-taverna-databundle-viewer git commit: Add files via
upload
Posted by st...@apache.org.
Add files via upload
Project: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/commit/043417a9
Tree: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/tree/043417a9
Diff: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/diff/043417a9
Branch: refs/heads/master
Commit: 043417a9318fdc6848f4a33b3621c61742791240
Parents: 6721942
Author: PCStefan <pa...@gmail.com>
Authored: Mon Jun 6 12:52:23 2016 +0100
Committer: PCStefan <pa...@gmail.com>
Committed: Mon Jun 6 12:52:23 2016 +0100
----------------------------------------------------------------------
app/controllers/data_bundles_controller.rb | 12 ++++++++++--
1 file changed, 10 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/blob/043417a9/app/controllers/data_bundles_controller.rb
----------------------------------------------------------------------
diff --git a/app/controllers/data_bundles_controller.rb b/app/controllers/data_bundles_controller.rb
index a1939d1..55adff6 100644
--- a/app/controllers/data_bundles_controller.rb
+++ b/app/controllers/data_bundles_controller.rb
@@ -34,12 +34,15 @@ class DataBundlesController < ApplicationController
@data_bundle = @data_bundle.decorate
respond_to do |format|
format.html
- format.json { render json: @data_bundle.to_json }
+ # format.json { render json: @data_bundle.to_json }
+ format.json { render :json => {:workflow => @data_bundle.to_dataHashObject,
+ :provenance => @data_bundle.provenanceMain } }
end
end
# GET /data_bundles/1/edit
def edit
+ redirect_to data_bundles_path, notice: "Success"
end
# POST /data_bundles
@@ -68,6 +71,11 @@ class DataBundlesController < ApplicationController
redirect_to data_bundles_url, notice: 'Data bundle was successfully destroyed.'
end
+ def redirectToLogin
+ redirect_to new_user_session_path, notice: 'Data bundle was successfully created.'
+ end
+ helper_method :redirectToLogin
+
private
# Use callbacks to share common setup or constraints between actions.
def set_data_bundle
@@ -77,5 +85,5 @@ class DataBundlesController < ApplicationController
# Never trust parameters from the scary internet, only allow the white list through.
def data_bundle_params
params.require(:data_bundle).permit(:file, :name)
- end
+ end
end
[12/14] incubator-taverna-databundle-viewer git commit: Merge
remote-tracking branch 'PCStefan/master'
Posted by st...@apache.org.
Merge remote-tracking branch 'PCStefan/master'
Provenance Viewer for Taverna
Contributed by Padurariu Stefan (PCStefan)
(ICLA is on file)
This closes #3
TODO:
Remove ruby-graphviz library (GPL2)
Add NOTICE for sankey.js (See #3) - BSD-3 license
https://github.com/d3/d3-plugins/blob/master/LICENSE
Project: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/commit/ccf12b31
Tree: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/tree/ccf12b31
Diff: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/diff/ccf12b31
Branch: refs/heads/master
Commit: ccf12b31b35c7573f5e0a19bb92eb1862481f51b
Parents: a7fdbb0 e3ac7c2
Author: Stian Soiland-Reyes <st...@apache.org>
Authored: Wed Jun 22 23:29:15 2016 +0100
Committer: Stian Soiland-Reyes <st...@apache.org>
Committed: Wed Jun 22 23:29:15 2016 +0100
----------------------------------------------------------------------
Gemfile | 8 +-
app/assets/javascripts/application.coffee | 5 +-
app/assets/javascripts/data_bundle.coffee | 951 ++++++++++++++++++++++--
app/assets/javascripts/sankey.js | 577 ++++++++++++++
app/assets/javascripts/vertical_sankey.js | 292 ++++++++
app/assets/stylesheets/data_bundle.scss | 107 ++-
app/controllers/data_bundles_controller.rb | 12 +-
app/decorators/data_bundle_decorator.rb | 90 ++-
app/models/provenance.rb | 606 +++++++++++++++
app/views/data_bundles/edit.html.slim | 5 +-
app/views/data_bundles/index.html.slim | 11 +-
app/views/data_bundles/show.html.slim | 109 ++-
app/views/layouts/application.html.slim | 2 +-
config/database.yml | 85 +++
14 files changed, 2743 insertions(+), 117 deletions(-)
----------------------------------------------------------------------
[03/14] incubator-taverna-databundle-viewer git commit: Add files via
upload
Posted by st...@apache.org.
Add files via upload
Project: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/commit/67219422
Tree: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/tree/67219422
Diff: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/diff/67219422
Branch: refs/heads/master
Commit: 672194226f3cd2dfa273772ff114a50e1e44ef75
Parents: 7dddbeb
Author: PCStefan <pa...@gmail.com>
Authored: Mon Jun 6 12:51:44 2016 +0100
Committer: PCStefan <pa...@gmail.com>
Committed: Mon Jun 6 12:51:44 2016 +0100
----------------------------------------------------------------------
app/assets/stylesheets/data_bundle.scss | 107 +++++++++++++++++++++------
1 file changed, 86 insertions(+), 21 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/blob/67219422/app/assets/stylesheets/data_bundle.scss
----------------------------------------------------------------------
diff --git a/app/assets/stylesheets/data_bundle.scss b/app/assets/stylesheets/data_bundle.scss
index 719f309..0a47b3e 100644
--- a/app/assets/stylesheets/data_bundle.scss
+++ b/app/assets/stylesheets/data_bundle.scss
@@ -17,29 +17,94 @@
* under the License.
*/
-svg {
- path.link {
- fill: none;
- stroke: #666;
- stroke-width: 1.5px;
- }
+.box {
+ border-top: 0px solid #d2d6de;
+}
- circle {
- fill: #ccc;
- stroke: #fff;
- stroke-width: 1.5px;
- }
+.box-body {
+ padding: 0px;
+}
- text {
- fill: #000;
- font: 10px sans-serif;
- pointer-events: none;
- }
+.container {
+ width: 90%;
+}
- rect {
- fill: #585858;
- }
- .fill_text {
- fill: red !important;
+.provCont {
+ margin-top: 30px;
+}
+
+.well{
+ border: 0px solid #d2d6de;
+}
+
+#back_btn {
+ padding-left: 15px;
+}
+
+#provenance {
+ .well, .container-fluid {
+ padding: 0px;
}
}
+
+.table-bordered>tbody>tr>td, .table-bordered>tbody>tr>th, .table-bordered>tfoot>tr>td, .table-bordered>tfoot>tr>th, .table-bordered>thead>tr>td, .table-bordered>thead>tr>th{
+ border: 1px solid #ddd;
+}
+
+.svgImage {
+ position: relative;
+ top: 15px;
+ left: 88.5%;
+}
+
+#enableZooming {
+ margin-right: 20px;
+ margin-left : -146px;
+}
+
+#mapContainer {
+ position: relative;
+ left:0px;
+ top:0px;
+ overflow : auto;
+ padding : 0px;
+ margin: 0px;
+ max-height: 960px;
+}
+
+#wfContainer {
+ overflow : scroll;
+ position: relative;
+ top:10px;
+ padding-top: 10px;
+ height: 990px;
+ min-width: 990px;
+ max-width: 1586px;
+}
+
+#save_data_bundle {
+ margin-left: -10px;
+}
+
+.misbackground {
+ fill: red;
+ /*fill: #ecf0f5;*/
+ border: 1px solid #000;
+}
+
+line {
+ stroke: #0bb;
+}
+
+text.active {
+ fill: red;
+}
+
+#order {
+ margin-left: 10px;
+}
+
+#porder {
+ margin-left: 32px;
+ margin-top: 20px;
+}
\ No newline at end of file
[13/14] incubator-taverna-databundle-viewer git commit: Added BSD-3
license to sankey.js
Posted by st...@apache.org.
Added BSD-3 license to sankey.js
From
https://github.com/d3/d3-plugins/blob/master/LICENSE
added additional (c) holders of git commit log from
https://github.com/kunalb/d3-plugins/blob/sankey/sankey/sankey.js
.. in addition to Stefan, who then made vertical_sankey.js
as a derivative.
See discussion in #3
Project: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/commit/50486001
Tree: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/tree/50486001
Diff: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/diff/50486001
Branch: refs/heads/master
Commit: 50486001b908d6dad30a8515407fb095f6cac84b
Parents: ccf12b3
Author: Stian Soiland-Reyes <st...@apache.org>
Authored: Thu Jun 23 09:45:36 2016 +0100
Committer: Stian Soiland-Reyes <st...@apache.org>
Committed: Thu Jun 23 09:52:42 2016 +0100
----------------------------------------------------------------------
LICENSE | 5 +++++
app/assets/javascripts/sankey.js | 30 ++++++++++++++++++++++++++
app/assets/javascripts/vertical_sankey.js | 28 ++++++++++++++++++++++++
3 files changed, 63 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/blob/50486001/LICENSE
----------------------------------------------------------------------
diff --git a/LICENSE b/LICENSE
index d645695..5eed59f 100644
--- a/LICENSE
+++ b/LICENSE
@@ -200,3 +200,8 @@
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+---------------------------------------------------
+app/assets/javascripts/sankey.js and vertical_sankey.js
+are licensed as BSD-3 clause
+https://github.com/d3/d3-plugins/blob/master/LICENSE
+---------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/blob/50486001/app/assets/javascripts/sankey.js
----------------------------------------------------------------------
diff --git a/app/assets/javascripts/sankey.js b/app/assets/javascripts/sankey.js
index 7f3580b..693505f 100644
--- a/app/assets/javascripts/sankey.js
+++ b/app/assets/javascripts/sankey.js
@@ -1,3 +1,33 @@
+// Copyright (c) 2012-2015, Michael Bostock,
+// Nathan Malkin, Kunal Bhalla, Claudiu Stefan Padurariu
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// * Redistributions of source code must retain the above copyright notice, this
+// list of conditions and the following disclaimer.
+//
+// * Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+//
+// * The name Michael Bostock may not be used to endorse or promote products
+// derived from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL MICHAEL BOSTOCK BE LIABLE FOR ANY DIRECT,
+// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
+// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Adapted from https://github.com/kunalb/d3-plugins/blob/sankey/sankey/sankey.js
+
d3.sankey = function() {
var sankey = {},
nodeWidth = 24,
http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/blob/50486001/app/assets/javascripts/vertical_sankey.js
----------------------------------------------------------------------
diff --git a/app/assets/javascripts/vertical_sankey.js b/app/assets/javascripts/vertical_sankey.js
index 67fa517..6b29b62 100644
--- a/app/assets/javascripts/vertical_sankey.js
+++ b/app/assets/javascripts/vertical_sankey.js
@@ -1,3 +1,31 @@
+// Copyright (c) 2012-2015, Michael Bostock,
+// Nathan Malkin, Kunal Bhalla, Claudiu Stefan Padurariu
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// * Redistributions of source code must retain the above copyright notice, this
+// list of conditions and the following disclaimer.
+//
+// * Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+//
+// * The name Michael Bostock may not be used to endorse or promote products
+// derived from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL MICHAEL BOSTOCK BE LIABLE FOR ANY DIRECT,
+// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
+// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
d3.vertical_sankey = function() {
var sankey = {},
nodeWidth = 24,
[08/14] incubator-taverna-databundle-viewer git commit: Add files via
upload
Posted by st...@apache.org.
Add files via upload
Project: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/commit/7421fc77
Tree: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/tree/7421fc77
Diff: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/diff/7421fc77
Branch: refs/heads/master
Commit: 7421fc777f847dd694db7f88847ca458b997c3e6
Parents: 877aa41
Author: PCStefan <pa...@gmail.com>
Authored: Mon Jun 6 12:53:49 2016 +0100
Committer: PCStefan <pa...@gmail.com>
Committed: Mon Jun 6 12:53:49 2016 +0100
----------------------------------------------------------------------
----------------------------------------------------------------------
[09/14] incubator-taverna-databundle-viewer git commit: Add files via
upload
Posted by st...@apache.org.
Add files via upload
Project: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/commit/defd6df0
Tree: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/tree/defd6df0
Diff: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/diff/defd6df0
Branch: refs/heads/master
Commit: defd6df0b2c112cc57b29b309b9dbe31d78acd36
Parents: 7421fc7
Author: PCStefan <pa...@gmail.com>
Authored: Mon Jun 6 12:54:16 2016 +0100
Committer: PCStefan <pa...@gmail.com>
Committed: Mon Jun 6 12:54:16 2016 +0100
----------------------------------------------------------------------
app/views/data_bundles/edit.html.slim | 5 +-
app/views/data_bundles/index.html.slim | 11 ++-
app/views/data_bundles/show.html.slim | 109 +++++++++++++++++++++-------
3 files changed, 90 insertions(+), 35 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/blob/defd6df0/app/views/data_bundles/edit.html.slim
----------------------------------------------------------------------
diff --git a/app/views/data_bundles/edit.html.slim b/app/views/data_bundles/edit.html.slim
index 4b38688..aad212c 100644
--- a/app/views/data_bundles/edit.html.slim
+++ b/app/views/data_bundles/edit.html.slim
@@ -5,8 +5,9 @@ section.content
.row
.col-xs-12
.box.box-primary
- = render partial: 'form'
-
+ # = render partial: 'form'
+ | This option has been disabled by the administrator.
+
= link_to 'Show', @data_bundle
'|
= link_to 'Back', data_bundles_path
http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/blob/defd6df0/app/views/data_bundles/index.html.slim
----------------------------------------------------------------------
diff --git a/app/views/data_bundles/index.html.slim b/app/views/data_bundles/index.html.slim
index 88c00bf..d8331f5 100644
--- a/app/views/data_bundles/index.html.slim
+++ b/app/views/data_bundles/index.html.slim
@@ -1,6 +1,6 @@
section.content-header
h1
- | DataBundles
+ | Data Bundles
section.content
.row
- if user_signed_in?
@@ -12,7 +12,7 @@ section.content
tr
th Name
th Date
- th Donwload
+ th Download
th Links
- @data_bundles.each do |data_bundle|
tr
@@ -23,11 +23,9 @@ section.content
td
= link_to data_bundle.file_identifier, data_bundle.file.url
td
- = link_to 'Show', data_bundle, id: "to_show_#{data_bundle.id}"
+ = link_to 'Visualize', data_bundle, id: "to_show_#{data_bundle.id}"
td
- = link_to 'Edit', edit_data_bundle_path(data_bundle), id: "to_edit_#{data_bundle.id}"
- td
- = link_to 'Destroy', data_bundle_path(data_bundle), method: :delete, data: {confirm: 'Are you sure?'}, id: "to_delete_#{data_bundle.id}"
+ = link_to 'Delete bundle', data_bundle_path(data_bundle), method: :delete, data: {confirm: 'Are you sure?'}, id: "to_delete_#{data_bundle.id}"
= paginate @data_bundles
.col-xs-5
.box.box-primary
@@ -39,3 +37,4 @@ section.content
.box
.box-body
| For use the databundle viewer you must be logged in
+ = redirectToLogin
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/blob/defd6df0/app/views/data_bundles/show.html.slim
----------------------------------------------------------------------
diff --git a/app/views/data_bundles/show.html.slim b/app/views/data_bundles/show.html.slim
index 218479c..a652727 100644
--- a/app/views/data_bundles/show.html.slim
+++ b/app/views/data_bundles/show.html.slim
@@ -1,31 +1,86 @@
section.content-header
h1
- = "DataBundle #{@data_bundle.name}"
+ = "#{link_to 'DataBundle: ', data_bundles_path}#{@data_bundle.name}".html_safe
+
section.content
- = link_to '<- Back', data_bundles_path
- .box.box-default
- .box-body
- table.table.table-bordered
- tbody
- tr
- td Workflow name
- td
- = @data_bundle.workflow.annotations.name
- tr
- td Authors
- td
- = @data_bundle.workflow.annotations.authors.join(' ')
- tr
- td Titles
- td
- = @data_bundle.workflow.annotations.titles.join(' ')
- tr
- td Description
- td
- = @data_bundle.workflow.annotations.descriptions.join(' ')
- tr
- td Semantic annotation
- td
- = @data_bundle.workflow.annotations.semantic_annotation
- svg#graphContainer
+ .container-fluid
+ .row-fluid
+ .span8.well
+ ul#subject.nav.nav-tabs
+ li.active
+ a data-target="#workflow" data-toggle="tab" Workflow
+ li
+ a data-target="#provenance" data-toggle="tab" Provenance
+ .tab-content
+ #workflow.tab-pane.active.fade.in
+ .box.box-default
+ .box-body
+ table.table.table-bordered
+ tbody
+ tr
+ td Workflow name
+ td
+ = @data_bundle.workflow.annotations.name
+ tr
+ td Authors
+ td
+ = @data_bundle.workflow.annotations.authors.join(' ')
+ tr
+ td Titles
+ td
+ = @data_bundle.workflow.annotations.titles.join(' ')
+ tr
+ td Description
+ td
+ = @data_bundle.workflow.annotations.descriptions.join(' ')
+ tr
+ td Semantic annotation
+ td
+ - if @data_bundle.workflow.annotations.semantic_annotation.nil?
+ | -
+ - else
+ = @data_bundle.workflow.annotations.semantic_annotation
+ #wfContainer
+ svg#graphContainer
+ button#saveWF.svgImage Save the workflow as image
+ canvas#canvasWF height="500" style="display:none" width="960"
+
+
+ #provenance.tab-pane
+ - if @data_bundle.provenanceMain.present?
+ .container-fluid
+ .row-fluid
+ .span8.well
+ ul#diagramType.nav.nav-tabs
+ li.active
+ a#diagramSankey data-target="#provSankey" data-toggle="tab" Sankey
+ li
+ a#diagramMiserables data-target="#provCooccurrence" data-toggle="tab" Co-occurrence
+ .tab-content
+ #provSankey.tab-pane.active.fade.in
+ .box.box-default
+ .box-body
+ #provCooccurrence.tab-pane
+ .box.box-default
+ .box-body
+ p#porder
+ | Order:
+ select#order
+ option value="name" by Name
+ option value="count" by Frequency
+ option value="type" by Type
+ .provCont
+ #mapContainer
+ svg#provContainer
+ button#enableZooming.svgImage alt=("Enabling zooming will mouse-mid button scrolling ability") type="submit" value="True"
+ span Enable Zooming
+
+ button#savePROV.svgImage Save the provenance as image
+ canvas#canvasPROV height="500" style="display:none" width="960"
+ - else
+ .row No data to display
+ br
+
+
+
#data_bundle data-url=data_bundle_url(@data_bundle, format: :json)
[05/14] incubator-taverna-databundle-viewer git commit: Add files via
upload
Posted by st...@apache.org.
Add files via upload
Project: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/commit/590ee999
Tree: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/tree/590ee999
Diff: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/diff/590ee999
Branch: refs/heads/master
Commit: 590ee9998fc452cd106c7829dafe97cd0682e24c
Parents: 043417a
Author: PCStefan <pa...@gmail.com>
Authored: Mon Jun 6 12:52:48 2016 +0100
Committer: PCStefan <pa...@gmail.com>
Committed: Mon Jun 6 12:52:48 2016 +0100
----------------------------------------------------------------------
app/decorators/data_bundle_decorator.rb | 90 +++++++++++++++++++++++++---
1 file changed, 81 insertions(+), 9 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/blob/590ee999/app/decorators/data_bundle_decorator.rb
----------------------------------------------------------------------
diff --git a/app/decorators/data_bundle_decorator.rb b/app/decorators/data_bundle_decorator.rb
index 07332cd..b554509 100644
--- a/app/decorators/data_bundle_decorator.rb
+++ b/app/decorators/data_bundle_decorator.rb
@@ -20,15 +20,11 @@
class DataBundleDecorator < Draper::Decorator
delegate_all
- FILE_TYPES = {
- inputs: '/inputs/',
- intermediates: '/intermediates/',
- outputs: '/outputs/'
- }
+ FILE_TYPES = {:inputs => '/inputs/' , :intermediates => '/intermediates/', :outputs => 'outputs'}
FILE_TYPES.each do |type_key, type_name|
define_method :"#{type_key}" do
- files = manifest['aggregates'].select { |files| files['folder'].start_with?(type_name) }
+ files = manifest['aggregates'].select { |file| !file['folder'].nil? && file['folder'].start_with?(type_name) }
result = {}
files.each do |file|
key = file['file'].split('/').last.split('.').first
@@ -53,6 +49,7 @@ class DataBundleDecorator < Draper::Decorator
def workflow
if @workflow.nil?
+
manifest = Nokogiri::XML(File.open("#{object.file_path}#{DataBundle::EXTRACTED_WORKFLOW_PATH}/META-INF/manifest.xml"))
t2flow_name = manifest.xpath('//manifest:file-entry[@manifest:media-type="application/vnd.taverna.t2flow+xml"][@manifest:size]').first['manifest:full-path']
file = File.open("#{object.file_path}#{DataBundle::EXTRACTED_WORKFLOW_PATH}/#{t2flow_name}")
@@ -62,9 +59,48 @@ class DataBundleDecorator < Draper::Decorator
@workflow
end
- def to_json
- stream = []
- workflow.datalinks.each { |link| stream << write_link(link, workflow) }
+ def to_dataHashObject
+ paths = []
+
+ workflow.datalinks.each do |link|
+ paths << write_link(link, workflow)
+ end
+
+ stream = {}
+ nodes = []
+ links = []
+
+ paths.each do |path|
+ #get source node
+ source = {:name => path[:source] }
+ target = {:name => path[:target] }
+
+ indexSource = -1
+ indexTarget = -1
+
+ nodes.each_with_index do |node, index|
+ if node[:name].to_s == source[:name]
+ indexSource = index
+ elsif node[:name].to_s == target[:name]
+ indexTarget = index
+ end
+ end
+
+ if indexSource == -1
+ indexSource = nodes.count
+ nodes << source
+ end
+
+ if indexTarget == -1
+ indexTarget = nodes.count
+ nodes << target
+ end
+
+ links << {:source => indexSource, :target => indexTarget, :value => 50}
+
+ end
+
+ stream = {:nodes => nodes, :links => links }
stream
end
@@ -87,4 +123,40 @@ class DataBundleDecorator < Draper::Decorator
def processor_by_name(dataflow, name)
dataflow.processors.select { |p| p.name == name.split(':').first }.first.name
end
+
+
+
+ # find the provenance file
+ # how to extract info from file see http://ruby-rdf.github.io/ , section Querying RDF data using basic graph patterns
+ def provenanceMain
+
+ if @provenance.nil?
+
+ provenanceObj = Provenance.new("#{object.file_path}workflowrun.prov.ttl")
+ @provenance = provenanceObj.to_dataHashObject("#{object.file_path}")
+
+ # stream = {}
+ # nodes = []
+ # links = []
+
+ # iteration = 12
+
+ # iteration.times do |i|
+ # nodes << {:name => i, :label => i, :type => "Artifact"}
+ # end
+
+ # (iteration - 1).times do |i|
+ # links << {:source => i, :target => i+1, :value => 50}
+ # end
+
+ # stream = {:nodes => nodes, :links => links }
+
+ # @provenance = stream
+
+ end # if provenance
+
+ #return
+ @provenance
+ end # def provenance
+
end
[02/14] incubator-taverna-databundle-viewer git commit: Add files via
upload
Posted by st...@apache.org.
Add files via upload
Project: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/commit/7dddbeb4
Tree: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/tree/7dddbeb4
Diff: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/diff/7dddbeb4
Branch: refs/heads/master
Commit: 7dddbeb48ca8d9a651cf2377e590417e0fa11f17
Parents: 94ddf9b
Author: PCStefan <pa...@gmail.com>
Authored: Mon Jun 6 12:51:05 2016 +0100
Committer: PCStefan <pa...@gmail.com>
Committed: Mon Jun 6 12:51:05 2016 +0100
----------------------------------------------------------------------
app/assets/javascripts/application.coffee | 5 +-
app/assets/javascripts/data_bundle.coffee | 951 +++++++++++++++++++++++--
app/assets/javascripts/sankey.js | 577 +++++++++++++++
app/assets/javascripts/vertical_sankey.js | 292 ++++++++
4 files changed, 1777 insertions(+), 48 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/blob/7dddbeb4/app/assets/javascripts/application.coffee
----------------------------------------------------------------------
diff --git a/app/assets/javascripts/application.coffee b/app/assets/javascripts/application.coffee
index 529d3f7..0cbd92a 100644
--- a/app/assets/javascripts/application.coffee
+++ b/app/assets/javascripts/application.coffee
@@ -32,8 +32,11 @@
#= require jquery_ujs
#= require bootstrap_theme/bootstrap/js/bootstrap.min
#= require d3/d3.min
+#= require d3/d3.js
+#= require sankey.js
+#= require vertical_sankey.js
#= require data_bundle
$ ->
- draw_workflow()
+ draw()
return
http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/blob/7dddbeb4/app/assets/javascripts/data_bundle.coffee
----------------------------------------------------------------------
diff --git a/app/assets/javascripts/data_bundle.coffee b/app/assets/javascripts/data_bundle.coffee
index c45201a..ab00c91 100644
--- a/app/assets/javascripts/data_bundle.coffee
+++ b/app/assets/javascripts/data_bundle.coffee
@@ -17,55 +17,912 @@
# under the License.
#
-@draw_workflow = ->
- if $('svg#graphContainer').length > 0
- d3.json $('#data_bundle').attr('data-url'), (error, links) ->
- tick = ->
- path.attr 'd', (d) ->
- dx = d.target.x - (d.source.x)
- dy = d.target.y - (d.source.y)
- dr = Math.sqrt(dx * dx + dy * dy)
- 'M' + d.source.x + ',' + d.source.y + 'A' + dr + ',' + dr + ' 0 0,1 ' + d.target.x + ',' + d.target.y
- node.attr 'transform', (d) ->
- 'translate(' + d.x + ',' + d.y + ')'
+## sankey diagram : http://bl.ocks.org/d3noob/5028304
+## concur. matrix : https://bost.ocks.org/mike/miserables/
+
+# for every type of activity call draw_provenance
+$(document).ready ->
+ $('#diagramType li a').click ->
+ draw_provenance($(this).text())
+ return
+
+@isEnabled = false #is the mouse wheel scroll disabled or enabled
+
+$('#enableZooming').click ->
+
+ isEnabled = undefined
+
+ if $("#enableZooming span").html() == 'Enable Zooming'
+ isEnabled = true
+ $("#enableZooming span").html('Disable Zooming')
+ else
+ isEnabled = false
+ $("#enableZooming span").html('Enable Zooming')
+
+ disableMidMouse(isEnabled)
+
+ return
+
+@disableMidMouse = (status) ->
+ # enable/disable middle mouse button scrolling
+ wheelEnable = (event) ->
+ event.preventDefault()
+ event.returnValue = true
+ return
+
+ wheelDisable = (event) ->
+ event.preventDefault()
+ event.returnValue = false
+ return
+
+ setisEnabled(status)
+
+ if window.addEventListener
+ if status
+ window.addEventListener('DOMMouseScroll', wheelDisable, false)
+ window.onmousewheel = document.onmousewheel = wheelDisable
+ else
+ window.addEventListener('DOMMouseScroll', wheelEnable, false)
+ window.onmousewheel = document.onmousewheel = wheelEnable
+
+ return
+
+# distingush between single click and double click
+# see http://bl.ocks.org/couchand/6394506
+@clickCancel = ->
+ event = d3.dispatch('click', 'dblclick')
+
+ cc = (selection) ->
+ down = undefined
+ tolerance = 5
+ last = undefined
+ wait = null
+ # euclidean distance
+
+ dist = (a, b) ->
+ Math.sqrt (a[0] - (b[0])) ** 2, (a[1] - (b[1])) ** 2
+
+ selection.on('mousedown', ->
+ down = d3.mouse(document.body)
+ last = +new Date
+ return
+ )
+ selection.on('mouseup', ->
+ if dist(down, d3.mouse(document.body)) > tolerance
return
+ else
+ if wait
+ window.clearTimeout wait
+ wait = null
+ event.dblclick d3.event
+ else
+ wait = window.setTimeout(((e) ->
+ ->
+ event.click e
+ wait = null
+ return
+ )(d3.event), 300)
+ return
+ )
+ return
+ d3.rebind(cc, event, 'on')
+
+# Here start diagrams func
+
+@glob_width = 0
+@dashLine = '\n---------------------------------------------------------------\n'
+@graph = {}
+@tempgraph = {}
+
+# set the width
+@setGLWidth =(reqWidth) ->
+ @glob_width = reqWidth
+ return
+
+@setisEnabled =(status) ->
+ @isEnabled = status
+ return
+
+
+# set a color for a node
+@getColorHex =(source) ->
+ color = d3.scale.category20()
+ colorType = undefined
+ switch source
+ when 'Workflow Run' then colorType = '#0eff7f'
+ when 'Process Run' then colorType = '#258fda'
+ when 'Artifact' then colorType = '#ff7f0e'
+ when 'Dictionary' then colorType = '#7f0eff'
+ else colorType = color(stringTextForColor.replace(RegExp(' .*'), ''))
+ colorType
+
+@getColorTransitionTypeHex =(value) ->
+
+ color = d3.scale.category20()
+ colorType = undefined
+ switch value
+ when 11 then colorType = '#004d24' # wfprov:wasPartOfWorkflow
+ when 12 then colorType = '#c3e221' # this case should not exist
+ when 13 then colorType = '#009947' # wfprov:usedInputArtifact
+ when 14 then colorType = '#003318' # ----- // ------Dictionary
+ when 21 then colorType = '#258fda' # wasPartOfWorkflow
+ when 23 then colorType = '#7cbce9' # usedInputArtifact
+ when 24 then colorType = '#12476d' # usedInputDictionary
+ when 31 then colorType = '#ffc999' # wasoutputFromWf
+ when 32 then colorType = '#ff7f0e' # wasOutputFromProcess
+ when 34 then colorType = '#663000' # insertInList
+ when 41 then colorType = '#bb80ff' # output from wf
+ when 42 then colorType = '#7f0eff' #outputFromProcess
+ when 43 then colorType = '#3c0080' #split
+ when 44 then colorType = '#990000' # insert into another funct
+ else '#c3e221'
+
+@createGroupType =(type) ->
+ group = -1
+ switch type
+ when 'Workflow Run' then group = 1
+ when 'Process Run' then group = 2
+ when 'Artifact' then group = 3
+ when 'Dictionary' then group = 4
+ else group = 0
+ group
+
+# limit a string to maxChar
+@shortenString =(temp, maxChar) ->
+ if temp.length > maxChar
+ temp = temp.substring(0, maxChar) + '..'
+ temp
+
+# limit a string to 32 chars : "{15 chars}..{15 chars}"
+@shortenStringNoMiddle =(temp) ->
+ if temp.length > 32
+ temp = temp.substring(0, 15) + '..' + temp.substring(temp.length - 15, temp.length)
+ temp
+
+@getTimes =(d) ->
+ startTime = new Date()
+ endTime = new Date()
+ nodeTime = 0
+ if(d.hasOwnProperty("startedAtTime"))
+ startTime = new Date(d.startedAtTime)
+ endTime = new Date(d.endedAtTime)
+ nodeTime = 1
+
+ elapsedTime = endTime - startTime
+
+ date_format_iso =(date) ->
+ date.toISOString().replace( /[T]/g, ' ').slice(0, -1)
+
+ hms =(ms) ->
+ date = new Date(ms);
+ str = '';
+ if date.getUTCDate()-1 > 0
+ str += date.getUTCDate()-1 + " days, ";
+ if date.getUTCHours > 0
+ str += date.getUTCHours() + " hours, ";
+ if date.getUTCMinutes() > 0
+ str += date.getUTCMinutes() + " minutes, ";
+ if date.getUTCSeconds() > 0
+ str += date.getUTCSeconds() + " seconds, ";
+ str += date.getUTCMilliseconds() + " millis";
+ str
+
+ if nodeTime == 1
+ 'Start Time: ' + date_format_iso(startTime) + '\nEnd Time: ' + date_format_iso(endTime) + '\nElapsed Time: ' + hms(elapsedTime)
+ else
+ ''
+
+# create function that to split the text into multiple lines for the svg-text
+# cannot find something like this online
+@wrap = (text) ->
+ text.each ->
+ text = d3.select(this)
+ labels = text.text().split("\\n")
+ text.text(null)
+
+ line = []
+
+ lineNumber = 1
+ if(labels.length != 0)
+ lineNumber = (-1) * (Math.floor(labels.length / 2) - 1)
+
+ lineHeight = 1.1
+ for temp in labels
+ temp = temp.substring(temp.lastIndexOf(' '))
+ text.append('tspan').attr('x', text.attr('x')).attr('y', text.attr('y')).attr('dy', lineNumber * lineHeight + 'em' ).text(temp).filter((d) ->
+ d.x < glob_width / 5
+ ).attr('x', "22")
+ lineNumber++
+
+ return
+ return
+# create function that to split the text into multiple lines for the svg-text
+# cannot find something like this online
+@wrapNoNewLine = (text) ->
+ text.each ->
+ text = d3.select(this)
+ labels = text.text().split("\\n")
+ text.text(null)
+
+ line = []
+
+ lineNumber = 1
+ if(labels.length != 0)
+ lineNumber = (-1) * (Math.floor(labels.length / 2) - 1)
+
+ lineHeight = 0.15
+ final = ''
+ for temp in labels
+ final += temp.substring(temp.lastIndexOf(' ')) + ', '
+
+ final = final[0...-2]
+ text.append('tspan').attr('x', text.attr('x')).attr('y', text.attr('y')).attr('dy', lineHeight + 'em' ).text(final).filter((d) ->
+ d.x < glob_width / 5
+ ).attr('x', "22")
+
+ return
+ return
+
+
+# some local functions for zooming in/out and for walking around
+@zoomed = ->
+ if isEnabled
+ d3.select('g#zoomContainer').attr 'transform', 'translate(' + d3.event.translate + ')scale(' + d3.event.scale + ')'
+ else
+ null
+ return
+
+@draw = ->
+ d3.json $('#data_bundle').attr('data-url'), (error, data) ->
+ @tempgraph = $.extend(true, {}, data)
+
+ if(Object.keys(tempgraph).length)
+ hasBeenDrawn = draw_workflow(hasBeenDrawn)
+ draw_provenance()
+
+ return
+ return
+
+@clone = (obj) ->
+ return obj if obj is null or typeof (obj) isnt "object"
+ temp = new obj.constructor()
+ for key of obj
+ temp[key] = clone(obj[key])
+ temp
+
+@draw_workflow =(draw) ->
+ data = clone(@tempgraph.workflow)
+ if !draw?
+ width = 960
+ height = 650
+ opacity = 0.7
+
+ color = d3.scale.category20()
+
+ $('canvas#canvasWF').attr
+ 'width': (width + 150)
+ 'height': (width)
+
+ svgContainer = d3.select('svg#graphContainer').attr('width', width+150).attr('height', width).append('g').attr('transform', (d) ->
+ "translate("+ (width) + ", 0) rotate (90)"
+ )
+
+ verticalSankey = d3.vertical_sankey().nodeWidth(25).nodePadding(20).size([width-128, height])
+
+ path = verticalSankey.link()
+
+ verticalSankey.nodes(data.nodes).links(data.links).layout(32)
+
+ link = svgContainer.append('g').selectAll('.link').data(data.links).enter().append('path').attr('class', 'link').attr('d', path).style('stroke-width', (d) ->
+ Math.max 1, d.dy
+ ).style('stroke', (d) ->
+ d.source.color = color(d.source.name.replace(RegExp(' .*'), ''))
+ ).sort((a, b) ->
+ b.dx - (a.dx)
+ )
+
+ link.attr('opacity', opacity)
+
+ link.append('title').text((d) ->
+ d.source.name + '\n\u2192\n' + d.target.name
+ )
+
+ node = svgContainer.append('g').selectAll('.node').data(data.nodes).enter().append('g').attr('class', 'node').attr('transform', (d) ->
+ 'translate(' + d.x + ',' + d.y + ')'
+ )
+
+ node.append('rect').attr('width', verticalSankey.nodeWidth()).attr('height', (d) ->
+ Math.abs d.dy
+ ).style('fill', (d) ->
+ d.color = color(d.name.replace(RegExp(' .*'), ''))
+ ).style('stroke', (d) ->
+ d3.rgb(d.color).darker 2
+ )
+
+ node.append('text').attr('text-anchor', 'middle').attr('y', (d) ->
+ 12
+ ).attr('x', (d) ->
+ d.dy/-2
+ ).attr('dy', '.35em').attr('transform', (d) ->
+ "translate("+ 0 + ", 0) rotate (270)"
+ ).text((d) ->
+ shortenName =(d) ->
+
+ # convert the text to pixels
+ canvas = document.createElement('canvas')
+ ctx = canvas.getContext("2d")
+ ctx.font = "14px Source Sans Pro"
+ textPX = ctx.measureText(d.name).width
+
+ if textPX > d.dy
+ d.name.substring(0, 9) + '..' + d.name.substring(d.name.length - 9, d.name.length)
+ else
+ d.name
+
+ shortenName(d)
+
+ ).filter (d) ->
+ d.x < width / 2
+
+ return true
+
+@draw_provenance =(diagramType) ->
+ # if diagramType is undefined or null, as default assign the current active
+ # else clear the svg for the diagram
+ if !diagramType?
+ diagramType = $('#diagramType li.active a').text()
+ else
+ d3.select('svg#provContainer').selectAll("*").remove()
+ d3.select('svg#provContainer').remove()
+ d3.select('#mapContainer').append('svg').attr('id','provContainer')
+
+ @graph = clone(@tempgraph)
+ if(diagramType == 'Sankey')
+ draw_sankey()
+ else if(diagramType == 'Co-occurrence')
+ draw_miserables()
+
+
+ return
+
+
+@draw_miserables = ->
+ width = 1580
+ height = 750
+
+ # compute a better width and height for the container
+ nodesCount = Object.keys(graph.provenance.nodes).length
+ linksCount = Object.keys(graph.provenance.links).length
+
+ if nodesCount > 0 or linksCount > 0
+ ratioN = 1.0 * nodesCount * 10
+ width = width + 25
+ height = width
+ divider = 3
+ setGLWidth(width)
+
+ $('canvas#canvasPROV').attr
+ 'width': width
+ 'height': height
+
+
+ x = d3.scale.ordinal().rangeBands([
+ 0
+ (5600 / divider)
+ ])
+ z = d3.scale.linear().domain([
+ 0
+ 4
+ ]).clamp(true)
+
+ color = d3.scale.category20()
+
+ svg = d3.select('svg#provContainer').attr('width', width+602).attr('height', height+535).append('g').attr('transform', 'translate(350,277)')
+
+ # build a [source, target] matrix
+ matrix = []
+ nodes = graph.provenance.nodes
+ n = nodes.length
+
+ row = (row) ->
+ cell = d3.select(this).selectAll('.cell').data(row.filter((d) ->
+ d.z + Math.floor(Math.random() * 10)
+ )).enter().append('rect').attr('class', 'cell').attr('x', (d) ->
+ x d.x
+ ).attr('width', x.rangeBand()).attr('height', x.rangeBand()).style('fill-opacity', (d) ->
+ z d.z
+ ).style('fill', (d) ->
+ console.log(d.x + " .. " + d.y)
+ if d.x == d.y
+ "#123456"
+ else
+ transition = createGroupType(nodes[d.x].type) * 10 + createGroupType(nodes[d.y].type)
+ getColorTransitionTypeHex(transition)
+ ).append('title').text((d) ->
+ str = nodes[d.x].type + ' \u2192 ' + nodes[d.y].type
+ str += dashLine + 'Source: ' + nodes[d.x].name
+
+ if createGroupType(nodes[d.x].type) == 2 # if process
+ str += '\n\n' + getTimes(nodes[d.x])
+ else if createGroupType(nodes[d.x].type) > 2 # if artifact or dictionary
+ if nodes[d.x].content?
+ str += '\n\n' + shortenString(nodes[d.x].content, 500)
+
+ str += dashLine + 'Target: ' + nodes[d.y].name
+ if createGroupType(nodes[d.y].type) == 2 # if process
+ str += '\n\n' + getTimes(nodes[d.y])
+ else if createGroupType(nodes[d.y].type) > 2 # if artifact or dictionary
+ if nodes[d.y].content?
+ str += '\n\n' + shortenString(nodes[d.y].content, 500)
+
+ str
+ ).on('mouseover', mouseover).on('mouseout', mouseout)
+ return
+
+ mouseover = (p) ->
+ d3.selectAll('.row text').classed('active', (d, i) ->
+ i == p.y
+ )
+ d3.selectAll('.column text').classed('active', (d, i) ->
+ i == p.x
+ )
+ return
+
+ mouseout = ->
+ d3.selectAll('text').classed('active', false)
+ return
- nodes = {}
- links.forEach (link) ->
- link.source = nodes[link.source] or (nodes[link.source] =
- name: link.source, file_content: link.file_content)
- link.target = nodes[link.target] or (nodes[link.target] =
- name: link.target, file_content: link.file_content)
- link.value = +link.value
+ order = (value) ->
+ x.domain orders[value]
+ t = svg.transition().duration(1500)
+ t.selectAll('.row').delay((d, i) ->
+ x(i) * 4
+ ).attr('transform', (d, i) ->
+ 'translate(0,' + x(i) + ')'
+ ).selectAll('.cell').delay((d) ->
+ x(d.x) * 4
+ ).attr 'x', (d) ->
+ x d.x
+ t.selectAll('.column').delay((d, i) ->
+ x(i) * 4
+ ).attr 'transform', (d, i) ->
+ 'translate(' + x(i) + ')rotate(-90)'
+ return
+
+
+
+ # Compute index per node.
+ nodes.forEach (node, i) ->
+ node.index = i
+ node.count = 0
+ matrix[i] = d3.range(n).map((j) ->
+ {
+ x: j
+ y: i
+ z: 0
+ }
+ )
+ return
+
+ # Add the legend
+
+ legendCategories = { "category":[{"name":"Workflow Run -wasPartOfWorkflow- Workflow Run", "transition":"11"},
+ {"name":"Workflow Run -usedInput- Artifact", "transition":"13"},
+ {"name":"Workflow Run -usedInput- Dictionary", "transition":"14"},
+ {"name":"Process Run -wasPartOfWorkflow- Workflow Run", "transition":"21"},
+ {"name":"Process Run -usedInput- Artifact", "transition":"23"},
+ {"name":"Process Run -usedInput- Dictionary", "transition":"24"},
+ {"name":"Artifact -wasOutputFrom- Workflow Run", "transition":"31"},
+ {"name":"Artifact -wasOutputFrom- Process Run", "transition":"32"},
+ {"name":"Artifact -isIntegratedIn- Dictionary", "transition":"34"},
+ {"name":"Dictionary -wasOutputFrom- Artifact", "transition":"41"},
+ {"name":"Dictionary -wasOutputFrom- Artifact", "transition":"42"},
+ {"name":"Dictionary -hasMember- Artifact", "transition":"43"},
+ {"name":"Dictionary -isSplit/PushedInto- Dictionary", "transition":"44"}]}
+
+ legend = svg.append('g').attr('class', 'legend').attr('x', 0).attr('y', 0).selectAll('.category').data(legendCategories.category).enter().append('g').attr('class', 'category')
+
+ legendConfig =
+ rectWidth: 20
+ rectHeight: 14
+ xOffset: -350
+ yOffset: -275
+ xOffsetText: 26
+ yOffsetText: -10
+ lineHeight: 10
+ wordApart: 20
+
+ legendConfig.yOffsetText += 20
+ legendConfig.xOffsetText += legendConfig.xOffset
+
+ legend.append('rect').attr('y', (d, i) ->
+ legendConfig.yOffset + i * legendConfig.wordApart
+ ).attr('x', legendConfig.xOffset).attr('height', legendConfig.rectHeight).attr('width', legendConfig.rectWidth).style('fill', (d) ->
+ getColorTransitionTypeHex(parseInt(d.transition))
+ ).style('stroke', '#000000')
+
+ legend.append('text').attr('y', (d, i) ->
+ legendConfig.yOffset + i * legendConfig.wordApart + legendConfig.yOffsetText
+ ).attr('x', legendConfig.xOffsetText).text((d) ->
+ d.name
+ )
+
+ # Convert links to matrix; count character occurrences.
+ graph.provenance.links.forEach (link) ->
+ matrix[link.source][link.target].z = createGroupType(nodes[link.source].type) * 10 + createGroupType(nodes[link.target].type)
+ nodes[link.source].count += link.value
+ nodes[link.target].count += link.value
+ return
+
+ # Precompute the orders.
+ orders =
+ name: d3.range(n).sort((a, b) ->
+ d3.ascending nodes[a].name, nodes[b].name
+ )
+ count: d3.range(n).sort((a, b) ->
+ nodes[b].count - (nodes[a].count)
+ )
+ type: d3.range(n).sort((a, b) ->
+ createGroupType(nodes[b].type) - createGroupType(nodes[a].type)
+ )
+
+ # The default sort order.
+ x.domain orders.name
+
+ #svg.append('rect').attr('class', 'misbackground').attr('width', width/divider).attr('height', height/divider)
+
+ row = svg.selectAll('.row').data(matrix).enter().append('g').attr('class', 'row').attr('transform', (d, i) ->
+ 'translate(0,' + x(i) + ')'
+ ).each(row)
+
+ row.append('line').attr('x2', 5600/divider)
+
+ row.append('text').attr('x', -6).attr('y', x.rangeBand() / 2).attr('dy', '.32em').attr('text-anchor', 'end').text((d, i) ->
+ if nodes[i].hasOwnProperty("label")
+ nodes[i].label
+ else
+ shortenStringNoMiddle(nodes[i].name)
+ ).call(wrapNoNewLine)
+
+ column = svg.selectAll('.column').data(matrix).enter().append('g').attr('class', 'column').attr('transform', (d, i) ->
+ 'translate(' + x(i) + ')rotate(-90)'
+ )
+
+ column.append('line').attr('x1', 5600/divider * (-1))
+
+ column.append('text').attr('x', 6).attr('y', x.rangeBand() / 2).attr('dy', '.32em').attr('text-anchor', 'start').text((d, i) ->
+ if nodes[i].hasOwnProperty("label")
+ nodes[i].label
+ else
+ shortenStringNoMiddle(nodes[i].name)
+ ).call(wrapNoNewLine)
+
+ d3.select('#order').on 'change', ->
+ # clearTimeout timeout
+ order @value
+ return
+
+ return
+
+
+@draw_sankey = ->
+ width = 950
+ height = 750
+ lowOpacity = 0.3
+ hoverOpacity = 0.7
+ highOpacity = 0.9
+
+ # zoom the d3
+ zoom = d3.behavior.zoom().scaleExtent([
+ 0.5
+ 10
+ ]).on('zoom', zoomed)
+
+ # load the svg#sankeyContainer
+ # set the width and height attributes
+ # append a function g that has a tranform process defined by translation
+ svgContainer = d3.select('svg#provContainer')
+
+ # define the sankey object
+ # set the node width to 15
+ # set the node padding to 10
+ sankey = d3.sankey().nodeWidth(20).nodePadding(10)
+
+ # request the sankey path of current sankey
+ path = sankey.reversibleLink()
+
+ # load data to work with
+
+ # compute a better width and height for the container
+ nodesCount = Object.keys(graph.provenance.nodes).length
+ linksCount = Object.keys(graph.provenance.links).length
+
+ if nodesCount > 0 or linksCount > 0
+ ratioLN = linksCount / nodesCount * 100
+ width = width + Math.floor( ratioLN * 3 )
+ height = height + Math.floor( ratioLN )
+ setGLWidth(width)
+
+ $('canvas#canvasPROV').attr
+ 'width': width
+ 'height': height
+
+ svgContainer.attr('width', width+125).attr('height', height+150).append('g')
+
+ rect = svgContainer.append('rect').attr('width', width).attr('height', height).style('fill', 'none').style('pointer-events', 'all')
+
+ sankey = sankey.size([width, height])
+
+
+ svg = svgContainer.append('g').attr("id", "zoomContainer").attr('transform', 'translate(0,' + 75 + ')')
+
+ svgContainer.call(zoom).on("dblclick.zoom", null).on("click.zoom", null).on("mousedown.zoom", null)
+
+ # set the nodes
+ # set the links
+ # set the layout
+ sankey.nodes(graph.provenance.nodes).links(graph.provenance.links)
+ sankey.layout(32)
+
+ legendCategories = { "category":[{"type":"Workflow Run"},{"type":"Process Run"}, {"type":"Artifact"}, {"type":"Dictionary"}] }
+ legend = svgContainer.append('g').attr('class', 'legend').attr('x', 0).attr('y', 0).selectAll('.category').data(legendCategories.category).enter().append('g').attr('class', 'category')
+
+ legendConfig =
+ rectWidth: 20
+ rectHeight: 14
+ xOffset: 625
+ yOffset: 30
+ xOffsetText: 5
+ yOffsetText: 11
+ lineHeight: 10
+ wordApart: 125
+
+ legendConfig.xOffsetText += 20
+ legendConfig.yOffsetText += legendConfig.yOffset
+
+ legend.append('rect').attr('x', (d, i) ->
+ legendConfig.xOffset + i * legendConfig.wordApart
+ )
+ .attr('y', legendConfig.yOffset).attr('height', legendConfig.rectHeight).attr('width', legendConfig.rectWidth).style('fill', (d) ->
+ getColorHex(d.type)
+ ).style('stroke', (d) ->
+ d3.rgb(d.color).darker 1
+ )
+
+ legend.append('text').attr('x', (d, i) ->
+ legendConfig.xOffset + i * legendConfig.wordApart + legendConfig.xOffsetText
+ ).attr('y', legendConfig.yOffsetText).text((d) ->
+ d.type
+ )
+
+
+ # select all the links from the json-data and append them to the Sankey obj in alphabetical order
+ link = svg.append('g').selectAll('.link').data(graph.provenance.links).enter().append('g').attr('class', 'link').attr('id', (d,i) ->
+ d.id = i
+ "link-" + i
+ ).sort((a, b) ->
+ b.dy - (a.dy))
+
+ p0 = link.append("path").attr("d", path(0))
+ p1 = link.append("path").attr("d", path(1))
+ p2 = link.append("path").attr("d", path(2))
+
+ link.attr('fill', (d) ->
+ getColorHex(d.source.type)
+ ).attr('opacity', lowOpacity).on('mouseover', (d) ->
+ if parseFloat(d3.select(this).style('opacity')) != highOpacity
+ d3.select(this).style('opacity', hoverOpacity)
+ ).on('mouseout', (d) ->
+ if parseFloat(d3.select(this).style('opacity')) != highOpacity
+ d3.select(this).style('opacity', lowOpacity)
+ )
+
+ # set the text for the edges
+ link.append('title').text (d) ->
+ dash = '\n-----------------------------------------------------------\n'
+ startText = d.source.type + ' \u2192 ' + d.target.type + dash + 'Source:\nURI: ' + d.source.name
+ endText = 'Target:\nURI: ' + d.target.name
+ startText + dash + endText
+
+ # create the function to drag the node
+ dragmove = (d) ->
+ # uncomment the following to disable x movement (and comment the next line )
+ #d3.select(this).attr('transform', 'translate(' + d.x + ',' + (d.y = Math.max(0, Math.min(height - (d.dy), d3.event.y))) + ')')
+ d3.select(this).attr('transform', 'translate(' + (d.x = Math.max(0, Math.min(width - (d.dx), d3.event.x))) + ',' + (d.y = Math.max(0, Math.min(height - (d.dy), d3.event.y))) + ')')
+ sankey.relayout()
+ p0.attr("d", path(1))
+ p1.attr("d", path(0))
+ p2.attr("d", path(2))
+ return
+
+
+ # select all the nodes from the json-data and append them to the Sankey obj
+ # add behavior : dragmove
+ node = svg.append('g').selectAll('.node').data(graph.provenance.nodes).enter().append('g').attr('class', 'node').attr('transform', (d) ->
+ yValue = Math.min(d.y, height)
+ 'translate(' + d.x + ',' + yValue + ')'
+ ).call(d3.behavior.drag().origin((d) ->
+ d
+ ).on('dragstart', ->
+ @parentNode.appendChild this
+ return
+ ).on('drag', dragmove))
+
+ # choose the form of the node : filled rectangle
+ # set the height of the rectangle to d.dy
+ # set the width of the rectangle to nodeWidth?
+ # set the style to be filled with default color
+ node.append('rect').attr('height', (d) ->
+ Math.max 10, d.dy
+ ).attr('data-clicked', '0').attr('width', sankey.nodeWidth()).style('fill', (d) ->
+ getColorHex(d.type)
+ ).style('stroke', (d) ->
+ d3.rgb(d.color).darker 1
+ ).append('title').text((d) ->
+
+ returnedStr = d.type + dashLine
+ returnedStr += 'URI: ' + d.name + dashLine
+ returnedStr += d.label.split("\\n").join("\n")
+
+ if(d.type == "Process Run")
+ returnedStr += dashLine + getTimes(d)
+ else if(d.type == "Artifact" || d.type == "Dictionary" && d.content)
+ returnedStr += dashLine + "Content :\n" + shortenString(d.content, 500)
+
+ returnedStr
+ )
+
+ #modify the link opacity to the given opacity
+ click_highlight_path_color = (id, opacity) ->
+ d3.select('#link-' + id).style('opacity', opacity)
+
+ click_highlight_path = (node, i) ->
+ # check if the user wants to drag or to click the node
+ # if he wants to drag then the following will be true
+ if (d3.event.defaultPrevented)
+ return
+
+ remainingNodes = []
+ nextNodes = []
+ stroke_opacity = 0
+
+ # if a node has been clicked and then mark it as unclick if clicked again
+ if d3.select(this).attr('data-clicked') == '1'
+ d3.select(this).attr('data-clicked', '0')
+ stroke_opacity = lowOpacity
+ else
+ d3.select(this).attr('data-clicked', '1')
+ stroke_opacity = highOpacity
+
+ # remember all visited nodes and the path
+ # traverse will be a JSON array
+ traverse = [
+ {
+ linkType: 'sourceLinks'
+ nodeType: 'target'
+ }
+ {
+ linkType: 'targetLinks'
+ nodeType: 'source'
+ }
+ ]
+
+ # for each object inside traverse
+ traverse.forEach (step) ->
+ # for each (outgoing,incoming) link
+ node[step.linkType].forEach (link) ->
+ remainingNodes.push(link[step.nodeType])
+ click_highlight_path_color(link.id, stroke_opacity)
+ return
+
+ while remainingNodes.length
+ nextNodes = []
+ remainingNodes.forEach (node) ->
+ node[step.linkType].forEach (link) ->
+ nextNodes.push(link[step.nodeType])
+ click_highlight_path_color(link.id, stroke_opacity)
+ return
+ return
+ remainingNodes = nextNodes
return
+ return
- width = 960
- height = 900
-
- force = d3.layout.force().nodes(d3.values(nodes)).links(links).size([width, height])
- .linkDistance(100).charge(-500).on('tick', tick).start()
- svgContainer = d3.select('svg#graphContainer').attr('width', width).attr('height', height)
- # build the arrow.
- svgContainer.append('svg:defs').selectAll('marker').data(['end']).enter().append('svg:marker').attr('id', String)
- .attr('viewBox', '0 -5 10 10').attr('refX', 15).attr('refY', -1.5).attr('markerWidth', 6)
- .attr('markerHeight', 6).attr('orient', 'auto').append('svg:path').attr 'd', 'M0,-5L10,0L0,5'
- # add the links and the arrows
- path = svgContainer.append('svg:g').selectAll('path').data(force.links()).enter().append('svg:path')
- .attr('class', 'link').attr('marker-end', 'url(#end)')
- # define the nodes
- node = svgContainer.selectAll('.node').data(force.nodes()).enter().append('g').attr('class', 'node')
- .attr('id', (d) -> d.name).call(force.drag)
- # add the nodes
- node.append('circle').attr('r', 5)
- # add the text
- node.append('text').attr('x', 12).attr('dy', '.35em').text (d) ->
- d.name
- node.append('text').attr('class', 'file_content').attr('visibility', 'hidden').text (d) ->
- return d.file_content
-
- node.on 'click', (d) ->
- rect = svgContainer.append('rect').transition().duration(500).attr('width', 250)
- .attr('height', 300).attr('x', 10).attr('y', 10).style('fill', 'white').attr('stroke', 'black')
- text = svgContainer.append('text').text(d.file_content)
- .attr('x', 50).attr('y', 150).attr('fill', 'black')
+
+ cc = clickCancel()
+
+ # show the whole path on single click on nodes
+ # the function highlight_node_links uses Breadth First Search alghorithm to find the reachable nodes
+ # add remove the outgoing edges from current node on dblclick
+ node.call(cc).on('click', click_highlight_path).on('dblclick', (d)->
+ if (d3.event.defaultPrevented)
+ return
+ svg.selectAll('.link').filter((l) ->
+ l.source == d
+ ).attr('display', ->
+ if d3.select(this).attr('display') == 'none'
+ 'inline'
+ else
+ 'none'
+ )
return
+ )
+
+ # set the text of the nodes
+ # set their position
+ # set their font
+ # set the anchor of the text
+ node.append('text').attr('x', (d) ->
+ d.dx/2 - 12
+ ).attr('y', (d) ->
+ d.dy/2 - 10
+ ).attr('text-anchor', 'end')
+ .text((d) ->
+ if d.hasOwnProperty("label")
+ d.label
+ else
+ shortenStringNoMiddle(d.name)
+ ).call(wrap).filter((d) ->
+ d.x < width / 5
+ ).attr('x', "22").attr('text-anchor', 'start')
+
+ # select all the nodes from the json-data and append them to the Sankey obj
+ # add behavior : dragmove
+
+ return
+
+
+#http://techslides.com/save-svg-as-an-image
+d3.select('#saveWF').on('click', ->
+ html = d3.select('svg#graphContainer').attr('version', 1.1).attr('xmlns', 'http://www.w3.org/2000/svg').node().parentNode.innerHTML
+ imgsrc = 'data:image/svg+xml;base64,' + btoa(unescape(encodeURIComponent(html)))
+ img = '<img src="' + imgsrc + '">'
+
+ canvas = document.querySelector('canvas#canvasWF')
+ context = canvas.getContext("2d")
+ image = new Image
+ image.src = imgsrc
+
+ image.onload = ->
+ context.drawImage(image, 0, 0)
+ canvasdata = canvas.toDataURL('image/png')
+ pngimg = '<img src="' + canvasdata + '">'
+
+ now = new Date
+ differential = now.getDate() + "_" + now.getMonth() + "_" + now.getFullYear() + "_" + now.getHours() + "_" + now.getMinutes() + "_" + now.getSeconds()
+
+ a = document.createElement('a')
+ a.download = 'workflow_' + differential + '.png'
+ a.href = canvasdata
+ a.click()
+ return
+
+ return
+)
+
+d3.select('#savePROV').on('click', ->
+ html = d3.select('svg#provContainer').attr('version', 1.1).attr('xmlns', 'http://www.w3.org/2000/svg').node().parentNode.innerHTML
+ imgsrc = 'data:image/svg+xml;base64,' + btoa(unescape(encodeURIComponent(html)))
+ img = '<img src="' + imgsrc + '">'
+
+ canvas = document.querySelector('canvas#canvasPROV')
+ context = canvas.getContext("2d")
+ image = new Image
+ image.src = imgsrc
+
+ image.onload = ->
+ context.drawImage(image, 0, 0)
+ canvasdata = canvas.toDataURL('image/png')
+ pngimg = '<img src="' + canvasdata + '">'
+
+ now = new Date
+ differential = now.getDate() + "_" + now.getMonth() + "_" + now.getFullYear() + "_" + now.getHours() + "_" + now.getMinutes() + "_" + now.getSeconds()
+
+
+ a = document.createElement('a')
+ a.download = 'provenance_' + differential + '.png'
+ a.href = canvasdata
+ a.click()
+ return
+
+ return
+)
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/blob/7dddbeb4/app/assets/javascripts/sankey.js
----------------------------------------------------------------------
diff --git a/app/assets/javascripts/sankey.js b/app/assets/javascripts/sankey.js
new file mode 100644
index 0000000..7f3580b
--- /dev/null
+++ b/app/assets/javascripts/sankey.js
@@ -0,0 +1,577 @@
+d3.sankey = function() {
+ var sankey = {},
+ nodeWidth = 24,
+ nodePadding = 8,
+ size = [1, 1],
+ nodes = [],
+ links = [],
+ components = [];
+
+ sankey.nodeWidth = function(_) {
+ if (!arguments.length) return nodeWidth;
+ nodeWidth = +_;
+ return sankey;
+ };
+
+ sankey.nodePadding = function(_) {
+ if (!arguments.length) return nodePadding;
+ nodePadding = +_;
+ return sankey;
+ };
+
+ sankey.nodes = function(_) {
+ if (!arguments.length) return nodes;
+ nodes = _;
+ return sankey;
+ };
+
+ sankey.links = function(_) {
+ if (!arguments.length) return links;
+ links = _;
+ return sankey;
+ };
+
+ sankey.size = function(_) {
+ if (!arguments.length) return size;
+ size = _;
+ return sankey;
+ };
+
+ sankey.layout = function(iterations) {
+ computeNodeLinks();
+ computeNodeValues();
+
+ computeNodeStructure();
+ computeNodeBreadths();
+
+ computeNodeDepths(iterations);
+ computeLinkDepths();
+
+ return sankey;
+ };
+
+ sankey.relayout = function() {
+ computeLinkDepths();
+ return sankey;
+ };
+
+ // A more involved path generator that requires 3 elements to render --
+ // It draws a starting element, intermediate and end element that are useful
+ // while drawing reverse links to get an appropriate fill.
+ //
+ // Each link is now an area and not a basic spline and no longer guarantees
+ // fixed width throughout.
+ //
+ // Sample usage:
+ //
+ // linkNodes = this._svg.append("g").selectAll(".link")
+ // .data(this.links)
+ // .enter().append("g")
+ // .attr("fill", "none")
+ // .attr("class", ".link")
+ // .sort(function(a, b) { return b.dy - a.dy; });
+ //
+ // linkNodePieces = [];
+ // for (var i = 0; i < 3; i++) {
+ // linkNodePieces[i] = linkNodes.append("path")
+ // .attr("class", ".linkPiece")
+ // .attr("d", path(i))
+ // .attr("fill", ...)
+ // }
+ sankey.reversibleLink = function() {
+ var curvature = .5;
+
+ // Used when source is behind target, the first and last paths are simple
+ // lines at the start and end node while the second path is the spline
+ function forwardLink(part, d) {
+ var x0 = d.source.x + d.source.dx,
+ x1 = d.target.x,
+ xi = d3.interpolateNumber(x0, x1),
+ x2 = xi(curvature),
+ x3 = xi(1 - curvature),
+ y0 = d.source.y + d.sy,
+ y1 = d.target.y + d.ty,
+ y2 = d.source.y + d.sy + d.dy,
+ y3 = d.target.y + d.ty + d.dy;
+
+ switch (part) {
+ case 0:
+ return "M" + x0 + "," + y0 + "L" + x0 + "," + (y0 + d.dy);
+
+ case 1:
+ return "M" + x0 + "," + y0
+ + "C" + x2 + "," + y0 + " " + x3 + "," + y1 + " " + x1 + "," + y1
+ + "L" + x1 + "," + y3
+ + "C" + x3 + "," + y3 + " " + x2 + "," + y2 + " " + x0 + "," + y2
+ + "Z";
+
+ case 2:
+ return "M" + x1 + "," + y1 + "L" + x1 + "," + (y1 + d.dy);
+ }
+ }
+
+ // Used for self loops and when the source is actually in front of the
+ // target; the first element is a turning path from the source to the
+ // destination, the second element connects the two twists and the last
+ // twists into the target element.
+ //
+ //
+ // /--Target
+ // \----------------------\
+ // Source--/
+ //
+ function backwardLink(part, d) {
+
+ var curveExtension = 30;
+ var curveDepth = 15;
+
+ function getDir(d) {
+ return d.source.y + d.sy > d.target.y + d.ty ? -1 : 1;
+ }
+
+ function p(x, y) {
+ return x + "," + y + " ";
+ }
+
+ var dt = getDir(d) * curveDepth,
+ x0 = d.source.x + d.source.dx,
+ y0 = d.source.y + d.sy,
+ x1 = d.target.x,
+ y1 = d.target.y + d.ty;
+
+ switch (part) {
+ case 0:
+ return "M" + p(x0, y0) +
+ "C" + p(x0, y0) +
+ p(x0 + curveExtension, y0) +
+ p(x0 + curveExtension, y0 + dt) +
+ "L" + p(x0 + curveExtension, y0 + dt + d.dy) +
+ "C" + p(x0 + curveExtension, y0 + d.dy) +
+ p(x0, y0 + d.dy) +
+ p(x0, y0 + d.dy) +
+ "Z";
+ case 1:
+ return "M" + p(x0 + curveExtension, y0 + dt) +
+ "C" + p(x0 + curveExtension, y0 + 3 * dt) +
+ p(x1 - curveExtension, y1 - 3 * dt) +
+ p(x1 - curveExtension, y1 - dt) +
+ "L" + p(x1 - curveExtension, y1 - dt + d.dy) +
+ "C" + p(x1 - curveExtension, y1 - 3 * dt + d.dy) +
+ p(x0 + curveExtension, y0 + 3 * dt + d.dy) +
+ p(x0 + curveExtension, y0 + dt + d.dy) +
+ "Z";
+
+ case 2:
+ return "M" + p(x1 - curveExtension, y1 - dt) +
+ "C" + p(x1 - curveExtension, y1) +
+ p(x1, y1) +
+ p(x1, y1) +
+ "L" + p(x1, y1 + d.dy) +
+ "C" + p(x1, y1 + d.dy) +
+ p(x1 - curveExtension, y1 + d.dy) +
+ p(x1 - curveExtension, y1 + d.dy - dt) +
+ "Z";
+ }
+ }
+
+ return function(part) {
+ return function(d) {
+ if (d.source.x < d.target.x) {
+ return forwardLink(part, d);
+ } else {
+ return backwardLink(part, d);
+ }
+ }
+ }
+ };
+
+ // The standard link path using a constant width spline that needs a
+ // single path element.
+ sankey.link = function() {
+ var curvature = .5;
+
+ function link(d) {
+ var x0 = d.source.x + d.source.dx,
+ x1 = d.target.x,
+ xi = d3.interpolateNumber(x0, x1),
+ x2 = xi(curvature),
+ x3 = xi(1 - curvature),
+ y0 = d.source.y + d.sy + d.dy / 2,
+ y1 = d.target.y + d.ty + d.dy / 2;
+
+ return "M" + x0 + "," + y0
+ + "C" + x2 + "," + y0
+ + " " + x3 + "," + y1
+ + " " + x1 + "," + y1;
+ }
+
+
+
+ link.curvature = function(_) {
+ if (!arguments.length) return curvature;
+ curvature = +_;
+ return link;
+ };
+
+ return link;
+ };
+
+ // Populate the sourceLinks and targetLinks for each node.
+ // Also, if the source and target are not objects, assume they are indices.
+ function computeNodeLinks() {
+ nodes.forEach(function(node) {
+ node.sourceLinks = [];
+ node.targetLinks = [];
+ });
+
+ links.forEach(function(link) {
+ var source = link.source,
+ target = link.target;
+ if (typeof source === "number") source = link.source = nodes[link.source];
+ if (typeof target === "number") target = link.target = nodes[link.target];
+ source.sourceLinks.push(link);
+ target.targetLinks.push(link);
+ });
+ }
+
+ // Compute the value (size) of each node by summing the associated links.
+ function computeNodeValues() {
+ nodes.forEach(function(node) {
+ if (!(node.value)) //if not already given
+ node.value = Math.max(
+ d3.sum(node.sourceLinks, value),
+ d3.sum(node.targetLinks, value)
+ );
+ });
+ }
+
+ // Take the list of nodes and create a DAG of supervertices, each consisting
+ // of a strongly connected component of the graph
+ //
+ // Based off:
+ // http://en.wikipedia.org/wiki/Tarjan's_strongly_connected_components_algorithm
+ function computeNodeStructure() {
+ var nodeStack = [],
+ index = 0;
+
+ nodes.forEach(function(node) {
+ if (!node.index) {
+ connect(node);
+ }
+
+ });
+
+ function connect(node) {
+ node.index = index++;
+ node.lowIndex = node.index;
+ node.onStack = true;
+ nodeStack.push(node);
+
+ if (node.sourceLinks) {
+ node.sourceLinks.forEach(function(sourceLink){
+ var target = sourceLink.target;
+ if (!target.hasOwnProperty('index')) {
+ connect(target);
+ node.lowIndex = Math.min(node.lowIndex, target.lowIndex);
+ } else if (target.onStack) {
+ node.lowIndex = Math.min(node.lowIndex, target.index);
+ }
+ });
+
+ if (node.lowIndex === node.index) {
+ var component = [], currentNode;
+ do {
+ currentNode = nodeStack.pop()
+ currentNode.onStack = false;
+ component.push(currentNode);
+ } while (currentNode != node);
+ components.push({
+ root: node,
+ scc: component
+ });
+ }
+ }
+ }
+
+ components.forEach(function(component, i){
+ component.index = i;
+ component.scc.forEach(function(node) {
+ node.component = i;
+ });
+ });
+ }
+
+ // Assign the breadth (x-position) for each strongly connected component,
+ // followed by assigning breadth within the component.
+ function computeNodeBreadths() {
+
+ layerComponents();
+
+ components.forEach(function(component, i){
+ bfs(component.root, function(node){
+ var result = node.sourceLinks
+ .filter(function(sourceLink){
+ return sourceLink.target.component == i;
+ })
+ .map(function(sourceLink){
+ return sourceLink.target;
+ });
+ return result;
+ });
+ });
+
+ var max = 0;
+ var componentsByBreadth = d3.nest()
+ .key(function(d) { return d.x; })
+ // .sortKeys(d3.ascending)
+ .entries(components)
+ .map(function(d) { return d.values; });
+
+ var max = -1, nextMax = -1;
+ componentsByBreadth.forEach(function(c){
+ c.forEach(function(component){
+ component.x = max + 1;
+ component.scc.forEach(function(node){
+ if (node.layer)
+ node.x = node.layer;
+ else
+ node.x = component.x + node.x;
+ nextMax = Math.max(nextMax, node.x);
+
+ });
+ });
+ max = nextMax;
+ });
+
+
+ nodes
+ .filter(function(node) {
+ var outLinks = node.sourceLinks.filter(function(link){ return link.source.name != link.target.name; });
+ return (outLinks.length == 0);
+ })
+ .forEach(function(node) { node.x = max; })
+
+ scaleNodeBreadths((size[0] - nodeWidth) / Math.max(max, 1));
+
+ function flatten(a) {
+ return [].concat.apply([], a);
+ }
+
+ function layerComponents() {
+ var remainingComponents = components,
+ nextComponents,
+ visitedIndex,
+ x = 0;
+
+ while (remainingComponents.length) {
+ nextComponents = [];
+ visitedIndex = {};
+
+ remainingComponents.forEach(function(component) {
+ component.x = x;
+
+ component.scc.forEach(function(n) {
+ n.sourceLinks.forEach(function(l) {
+ if (!visitedIndex.hasOwnProperty(l.target.component) &&
+ l.target.component != component.index) {
+ nextComponents.push(components[l.target.component]);
+ visitedIndex[l.target.component] = true;
+ }
+ })
+ });
+ });
+
+ remainingComponents = nextComponents;
+ ++x;
+ }
+ }
+
+ function bfs(node, extractTargets) {
+ var queue = [node], currentCount = 1, nextCount = 0;
+ var x = 0;
+
+ while(currentCount > 0) {
+ var currentNode = queue.shift();
+ currentCount--;
+
+ if (!currentNode.hasOwnProperty('x')) {
+ currentNode.x = x;
+ currentNode.dx = nodeWidth;
+
+ var targets = extractTargets(currentNode);
+
+ queue = queue.concat(targets);
+ nextCount += targets.length;
+ }
+
+
+ if (currentCount == 0) { // level change
+ x++;
+ currentCount = nextCount;
+ nextCount = 0;
+ }
+
+ }
+ }
+ }
+
+ function moveSourcesRight() {
+ nodes.forEach(function(node) {
+ if (!node.targetLinks.length) {
+ node.x = d3.min(node.sourceLinks, function(d) { return d.target.x; }) - 1;
+ }
+ });
+ }
+
+ function moveSinksRight(x) {
+ nodes.forEach(function(node) {
+ if (!node.sourceLinks.length) {
+ node.x = x - 1;
+ }
+ });
+ }
+
+ function scaleNodeBreadths(kx) {
+ nodes.forEach(function(node) {
+ node.x *= kx;
+ });
+ }
+
+ function computeNodeDepths(iterations) {
+ var nodesByBreadth = d3.nest()
+ .key(function(d) { return d.x; })
+ .sortKeys(d3.ascending)
+ .entries(nodes)
+ .map(function(d) { return d.values; });
+
+ initializeNodeDepth();
+ resolveCollisions();
+
+ for (var alpha = 1; iterations > 0; --iterations) {
+ relaxRightToLeft(alpha *= .99);
+ resolveCollisions();
+ relaxLeftToRight(alpha);
+ resolveCollisions();
+ }
+
+ function initializeNodeDepth() {
+ var ky = d3.min(nodesByBreadth, function(nodes) {
+ return (size[1] - (nodes.length - 1) * nodePadding) / d3.sum(nodes, value);
+ });
+
+ nodesByBreadth.forEach(function(nodes) {
+ nodes.forEach(function(node, i) {
+ node.y = i;
+ node.dy = node.value * ky;
+ });
+ });
+
+ links.forEach(function(link) {
+ link.dy = link.value * ky;
+ });
+ }
+
+ function relaxLeftToRight(alpha) {
+ nodesByBreadth.forEach(function(nodes, breadth) {
+ nodes.forEach(function(node) {
+ if (node.targetLinks.length) {
+ var y = d3.sum(node.targetLinks, weightedSource) / d3.sum(node.targetLinks, value);
+ node.y += (y - center(node)) * alpha;
+ }
+ });
+ });
+
+ function weightedSource(link) {
+ return center(link.source) * link.value;
+ }
+ }
+
+ function relaxRightToLeft(alpha) {
+ nodesByBreadth.slice().reverse().forEach(function(nodes) {
+ nodes.forEach(function(node) {
+ if (node.sourceLinks.length) {
+ var y = d3.sum(node.sourceLinks, weightedTarget) / d3.sum(node.sourceLinks, value);
+ node.y += (y - center(node)) * alpha;
+ }
+ });
+ });
+
+ function weightedTarget(link) {
+ return center(link.target) * link.value;
+ }
+ }
+
+ function resolveCollisions() {
+ nodesByBreadth.forEach(function(nodes) {
+ var node,
+ dy,
+ y0 = 0,
+ n = nodes.length,
+ i;
+
+ // Push any overlapping nodes down.
+ nodes.sort(ascendingDepth);
+ for (i = 0; i < n; ++i) {
+ node = nodes[i];
+ dy = y0 - node.y;
+ if (dy > 0) node.y += dy;
+ y0 = node.y + node.dy + nodePadding;
+ }
+
+ // If the bottommost node goes outside the bounds, push it back up.
+ dy = y0 - nodePadding - size[1];
+ if (dy > 0) {
+ y0 = node.y -= dy;
+
+ // Push any overlapping nodes back up.
+ for (i = n - 2; i >= 0; --i) {
+ node = nodes[i];
+ dy = node.y + node.dy + nodePadding - y0;
+ if (dy > 0) node.y -= dy;
+ y0 = node.y;
+ }
+ }
+ });
+ }
+
+ function ascendingDepth(a, b) {
+ return a.y - b.y;
+ }
+ }
+
+ function computeLinkDepths() {
+ nodes.forEach(function(node) {
+ node.sourceLinks.sort(ascendingTargetDepth);
+ node.targetLinks.sort(ascendingSourceDepth);
+ });
+ nodes.forEach(function(node) {
+ var sy = 0, ty = 0;
+ node.sourceLinks.forEach(function(link) {
+ link.sy = sy;
+ sy += link.dy;
+ });
+ node.targetLinks.forEach(function(link) {
+ link.ty = ty;
+ ty += link.dy;
+ });
+ });
+
+ function ascendingSourceDepth(a, b) {
+ return a.source.y - b.source.y;
+ }
+
+ function ascendingTargetDepth(a, b) {
+ return a.target.y - b.target.y;
+ }
+ }
+
+ function center(node) {
+ return node.y + node.dy / 2;
+ }
+
+ function value(link) {
+ return link.value;
+ }
+
+ return sankey;
+};
http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/blob/7dddbeb4/app/assets/javascripts/vertical_sankey.js
----------------------------------------------------------------------
diff --git a/app/assets/javascripts/vertical_sankey.js b/app/assets/javascripts/vertical_sankey.js
new file mode 100644
index 0000000..67fa517
--- /dev/null
+++ b/app/assets/javascripts/vertical_sankey.js
@@ -0,0 +1,292 @@
+d3.vertical_sankey = function() {
+ var sankey = {},
+ nodeWidth = 24,
+ nodePadding = 8,
+ size = [1, 1],
+ nodes = [],
+ links = [];
+
+ sankey.nodeWidth = function(_) {
+ if (!arguments.length) return nodeWidth;
+ nodeWidth = +_;
+ return sankey;
+ };
+
+ sankey.nodePadding = function(_) {
+ if (!arguments.length) return nodePadding;
+ nodePadding = +_;
+ return sankey;
+ };
+
+ sankey.nodes = function(_) {
+ if (!arguments.length) return nodes;
+ nodes = _;
+ return sankey;
+ };
+
+ sankey.links = function(_) {
+ if (!arguments.length) return links;
+ links = _;
+ return sankey;
+ };
+
+ sankey.size = function(_) {
+ if (!arguments.length) return size;
+ size = _;
+ return sankey;
+ };
+
+ sankey.layout = function(iterations) {
+ computeNodeLinks();
+ computeNodeValues();
+ computeNodeBreadths();
+ computeNodeDepths(iterations);
+ computeLinkDepths();
+ return sankey;
+ };
+
+ sankey.relayout = function() {
+ computeLinkDepths();
+ return sankey;
+ };
+
+ sankey.link = function() {
+ var curvature = .5;
+
+ function link(d) {
+ var x0 = d.source.x + d.source.dx,
+ x1 = d.target.x,
+ xi = d3.interpolateNumber(x0, x1),
+ x2 = xi(curvature),
+ x3 = xi(1 - curvature),
+ y0 = d.source.y + d.sy + d.dy / 2,
+ y1 = d.target.y + d.ty + d.dy / 2;
+ return "M" + x0 + "," + y0
+ + "C" + x2 + "," + y0
+ + " " + x3 + "," + y1
+ + " " + x1 + "," + y1;
+ }
+
+ link.curvature = function(_) {
+ if (!arguments.length) return curvature;
+ curvature = +_;
+ return link;
+ };
+
+ return link;
+ };
+
+ // Populate the sourceLinks and targetLinks for each node.
+ // Also, if the source and target are not objects, assume they are indices.
+ function computeNodeLinks() {
+ nodes.forEach(function(node) {
+ node.sourceLinks = [];
+ node.targetLinks = [];
+ });
+ links.forEach(function(link) {
+ var source = link.source,
+ target = link.target;
+ if (typeof source === "number") source = link.source = nodes[link.source];
+ if (typeof target === "number") target = link.target = nodes[link.target];
+ source.sourceLinks.push(link);
+ target.targetLinks.push(link);
+ });
+ }
+
+ // Compute the value (size) of each node by summing the associated links.
+ function computeNodeValues() {
+ nodes.forEach(function(node) {
+ node.value = Math.max(
+ d3.sum(node.sourceLinks, value),
+ d3.sum(node.targetLinks, value)
+ );
+ });
+ }
+
+ // Iteratively assign the breadth (x-position) for each node.
+ // Nodes are assigned the maximum breadth of incoming neighbors plus one;
+ // nodes with no incoming links are assigned breadth zero, while
+ // nodes with no outgoing links are assigned the maximum breadth.
+ function computeNodeBreadths() {
+ var remainingNodes = nodes,
+ nextNodes,
+ x = 0;
+
+ while (remainingNodes.length) {
+ nextNodes = [];
+ remainingNodes.forEach(function(node) {
+ node.x = x;
+ node.dx = nodeWidth;
+ node.sourceLinks.forEach(function(link) {
+ nextNodes.push(link.target);
+ });
+ });
+ remainingNodes = nextNodes;
+ ++x;
+ }
+
+ //
+ moveSinksRight(x);
+ scaleNodeBreadths((size[0] - nodeWidth) / (x - 1));
+ }
+
+ function moveSourcesRight() {
+ nodes.forEach(function(node) {
+ if (!node.targetLinks.length) {
+ node.x = d3.min(node.sourceLinks, function(d) { return d.target.x; }) - 1;
+ }
+ });
+ }
+
+ function moveSinksRight(x) {
+ nodes.forEach(function(node) {
+ if (!node.sourceLinks.length) {
+ node.x = x - 1;
+ }
+ });
+ }
+
+ function scaleNodeBreadths(kx) {
+ nodes.forEach(function(node) {
+ node.x *= kx;
+ });
+ }
+
+ function computeNodeDepths(iterations) {
+ var nodesByBreadth = d3.nest()
+ .key(function(d) { return d.x; })
+ .sortKeys(d3.ascending)
+ .entries(nodes)
+ .map(function(d) { return d.values; });
+
+ //
+ initializeNodeDepth();
+ resolveCollisions();
+ for (var alpha = 1; iterations > 0; --iterations) {
+ relaxRightToLeft(alpha *= .99);
+ resolveCollisions();
+ relaxLeftToRight(alpha);
+ resolveCollisions();
+ }
+
+ function initializeNodeDepth() {
+ var ky = d3.min(nodesByBreadth, function(nodes) {
+ return (size[1] - (nodes.length - 1) * nodePadding) / d3.sum(nodes, value);
+ });
+
+ nodesByBreadth.forEach(function(nodes) {
+ nodes.forEach(function(node, i) {
+ node.y = i;
+ node.dy = node.value * ky;
+ });
+ });
+
+ links.forEach(function(link) {
+ link.dy = link.value * ky;
+ });
+ }
+
+ function relaxLeftToRight(alpha) {
+ nodesByBreadth.forEach(function(nodes, breadth) {
+ nodes.forEach(function(node) {
+ if (node.targetLinks.length) {
+ var y = d3.sum(node.targetLinks, weightedSource) / d3.sum(node.targetLinks, value);
+ node.y += (y - center(node)) * alpha;
+ }
+ });
+ });
+
+ function weightedSource(link) {
+ return center(link.source) * link.value;
+ }
+ }
+
+ function relaxRightToLeft(alpha) {
+ nodesByBreadth.slice().reverse().forEach(function(nodes) {
+ nodes.forEach(function(node) {
+ if (node.sourceLinks.length) {
+ var y = d3.sum(node.sourceLinks, weightedTarget) / d3.sum(node.sourceLinks, value);
+ node.y += (y - center(node)) * alpha;
+ }
+ });
+ });
+
+ function weightedTarget(link) {
+ return center(link.target) * link.value;
+ }
+ }
+
+ function resolveCollisions() {
+ nodesByBreadth.forEach(function(nodes) {
+ var node,
+ dy,
+ y0 = 0,
+ n = nodes.length,
+ i;
+
+ // Push any overlapping nodes down.
+ nodes.sort(ascendingDepth);
+ for (i = 0; i < n; ++i) {
+ node = nodes[i];
+ dy = y0 - node.y;
+ if (dy > 0) node.y += dy;
+ y0 = node.y + node.dy + nodePadding;
+ }
+
+ // If the bottommost node goes outside the bounds, push it back up.
+ dy = y0 - nodePadding - size[1];
+ if (dy > 0) {
+ y0 = node.y -= dy;
+
+ // Push any overlapping nodes back up.
+ for (i = n - 2; i >= 0; --i) {
+ node = nodes[i];
+ dy = node.y + node.dy + nodePadding - y0;
+ if (dy > 0) node.y -= dy;
+ y0 = node.y;
+ }
+ }
+ });
+ }
+
+ function ascendingDepth(a, b) {
+ return a.y - b.y;
+ }
+ }
+
+ function computeLinkDepths() {
+ nodes.forEach(function(node) {
+ node.sourceLinks.sort(ascendingTargetDepth);
+ node.targetLinks.sort(ascendingSourceDepth);
+ });
+ nodes.forEach(function(node) {
+ var sy = 0, ty = 0;
+ node.sourceLinks.forEach(function(link) {
+ link.sy = sy;
+ sy += link.dy;
+ });
+ node.targetLinks.forEach(function(link) {
+ link.ty = ty;
+ ty += link.dy;
+ });
+ });
+
+ function ascendingSourceDepth(a, b) {
+ return a.source.y - b.source.y;
+ }
+
+ function ascendingTargetDepth(a, b) {
+ return a.target.y - b.target.y;
+ }
+ }
+
+ function center(node) {
+ return node.y + node.dy / 2;
+ }
+
+ function value(link) {
+ return link.value;
+ }
+
+ return sankey;
+};
[14/14] incubator-taverna-databundle-viewer git commit: no need for
graphviz library anymore
Posted by st...@apache.org.
no need for graphviz library anymore
(also it was licensed as GPL)
Project: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/commit/5c084b7b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/tree/5c084b7b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/diff/5c084b7b
Branch: refs/heads/master
Commit: 5c084b7b88ef0b4071672789d2bb47566a9d20f9
Parents: 5048600
Author: Stian Soiland-Reyes <st...@apache.org>
Authored: Thu Jun 23 09:52:47 2016 +0100
Committer: Stian Soiland-Reyes <st...@apache.org>
Committed: Thu Jun 23 09:52:47 2016 +0100
----------------------------------------------------------------------
Gemfile | 3 ---
1 file changed, 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/blob/5c084b7b/Gemfile
----------------------------------------------------------------------
diff --git a/Gemfile b/Gemfile
index 43029d0..066fc72 100644
--- a/Gemfile
+++ b/Gemfile
@@ -57,9 +57,6 @@ gem 'ro-bundle'
gem 'workflow_parser', github: 'myExperiment/workflow_parser'
gem 'taverna-t2flow', github: 'myExperiment/workflow_parser-t2flow'
-# A gem to build digraphs
-gem 'ruby-graphviz'
-
# A gem to query the prov.ttl
gem 'sparql'
[06/14] incubator-taverna-databundle-viewer git commit: Add files via
upload
Posted by st...@apache.org.
Add files via upload
Project: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/commit/5f01fa1c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/tree/5f01fa1c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/diff/5f01fa1c
Branch: refs/heads/master
Commit: 5f01fa1c2a98a73c6a1db1c4dfcf87e57bc04c5b
Parents: 590ee99
Author: PCStefan <pa...@gmail.com>
Authored: Mon Jun 6 12:53:05 2016 +0100
Committer: PCStefan <pa...@gmail.com>
Committed: Mon Jun 6 12:53:05 2016 +0100
----------------------------------------------------------------------
----------------------------------------------------------------------
[10/14] incubator-taverna-databundle-viewer git commit: Add files via
upload
Posted by st...@apache.org.
Add files via upload
Project: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/commit/380bdcfa
Tree: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/tree/380bdcfa
Diff: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/diff/380bdcfa
Branch: refs/heads/master
Commit: 380bdcfaa78a00792205b2514725295fb3fba2fc
Parents: defd6df
Author: PCStefan <pa...@gmail.com>
Authored: Mon Jun 6 12:54:40 2016 +0100
Committer: PCStefan <pa...@gmail.com>
Committed: Mon Jun 6 12:54:40 2016 +0100
----------------------------------------------------------------------
app/views/layouts/application.html.slim | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/blob/380bdcfa/app/views/layouts/application.html.slim
----------------------------------------------------------------------
diff --git a/app/views/layouts/application.html.slim b/app/views/layouts/application.html.slim
index 5e25e12..61725c3 100644
--- a/app/views/layouts/application.html.slim
+++ b/app/views/layouts/application.html.slim
@@ -3,7 +3,7 @@ html
head
meta[charset="UTF-8"]
title
- | DatabundleViewer
+ | WProv-Visualizer
meta[content="width=device-width, initial-scale=1, maximum-scale=1, user-scalable=no" name="viewport"]
= stylesheet_link_tag 'application', media: 'all'
= csrf_meta_tags
[07/14] incubator-taverna-databundle-viewer git commit: Add files via
upload
Posted by st...@apache.org.
Add files via upload
Project: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/commit/877aa41b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/tree/877aa41b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/diff/877aa41b
Branch: refs/heads/master
Commit: 877aa41be15534a0a51fb4ed63dcb816dec8778b
Parents: 5f01fa1
Author: PCStefan <pa...@gmail.com>
Authored: Mon Jun 6 12:53:31 2016 +0100
Committer: PCStefan <pa...@gmail.com>
Committed: Mon Jun 6 12:53:31 2016 +0100
----------------------------------------------------------------------
app/models/provenance.rb | 606 ++++++++++++++++++++++++++++++++++++++++++
1 file changed, 606 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-taverna-databundle-viewer/blob/877aa41b/app/models/provenance.rb
----------------------------------------------------------------------
diff --git a/app/models/provenance.rb b/app/models/provenance.rb
new file mode 100644
index 0000000..5ec3a40
--- /dev/null
+++ b/app/models/provenance.rb
@@ -0,0 +1,606 @@
+require 'sparql' # query the graph
+require 'uri' # used to decode urls
+
+class Provenance
+
+ # TODO: try to read the prefixes from the file
+ @@prefixes = "PREFIX dc: <http://purl.org/dc/elements/1.1/>
+ PREFIX prov: <http://www.w3.org/ns/prov#>
+ PREFIX cnt: <http://www.w3.org/2011/content#>
+ PREFIX foaf: <http://xmlns.com/foaf/0.1/>
+ PREFIX dcmitype: <http://purl.org/dc/dcmitype/>
+ PREFIX wfprov: <http://purl.org/wf4ever/wfprov#>
+ PREFIX dcam: <http://purl.org/dc/dcam/>
+ PREFIX xml: <http://www.w3.org/XML/1998/namespace>
+ PREFIX vs: <http://www.w3.org/2003/06/sw-vocab-status/ns#>
+ PREFIX dcterms: <http://purl.org/dc/terms/>
+ PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
+ PREFIX wot: <http://xmlns.com/wot/0.1/>
+ PREFIX wfdesc: <http://purl.org/wf4ever/wfdesc#>
+ PREFIX dct: <http://purl.org/dc/terms/>
+ PREFIX tavernaprov: <http://ns.taverna.org.uk/2012/tavernaprov/>
+ PREFIX owl: <http://www.w3.org/2002/07/owl#>
+ PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
+ PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
+ PREFIX skos: <http://www.w3.org/2004/02/skos/core#>
+ PREFIX scufl2: <http://ns.taverna.org.uk/2010/scufl2#>
+ "
+ cattr_reader :prefixes
+ attr_reader :graph
+
+ @file = ''
+
+ #constructor
+ def initialize(filepath)
+ @file = filepath
+
+ @graph = RDF::Graph.new
+
+ RDF::Reader.open("#{@file}") do |reader|
+ reader.each_statement do |statement|
+ @graph.insert(statement)
+ end
+ end
+ end
+
+ #Extract all the workflows and their parent workflow
+ def getAllWorkflowRuns
+ # create the query
+ sparql_query = SPARQL.parse("#{Provenance.prefixes}
+ SELECT *
+ WHERE
+ {
+ ?workflowRun rdf:type wfprov:WorkflowRun ;
+ rdfs:label ?workflowRunLabel .
+ OPTIONAL
+ {
+ ?workflowRun wfprov:wasPartOfWorkflowRun ?wasPartOfWorkflowRun .
+ ?wasPartOfWorkflowRun rdfs:label ?wasPartOfWorkflowRunLabel .
+ FILTER NOT EXISTS { ?something foaf:primaryTopic ?wasPartOfWorkflowRun }
+ }
+ OPTIONAL
+ {
+ {
+ ?workflowRun wfprov:usedInput ?usedDictionaryInput .
+ ?usedDictionaryInput rdf:type prov:Dictionary
+ }
+ UNION
+ {
+ ?workflowRun wfprov:usedInput ?usedArtifactInput
+ FILTER NOT EXISTS { ?usedArtifactInput rdf:type prov:Dictionary }
+ }
+ }
+ FILTER NOT EXISTS { ?something foaf:primaryTopic ?workflowRun }
+ }")
+
+ #return the result of the performing the query
+ sparql_query.execute(graph)
+ end
+
+ # Get all the ProcessRuns and their outlinks
+ def getAllProcessRuns
+ sparql_query = SPARQL.parse("#{Provenance.prefixes}
+ SELECT *
+ WHERE
+ {
+ ?processURI rdf:type wfprov:ProcessRun ;
+ prov:startedAtTime ?startedAtTime ;
+ prov:endedAtTime ?endedAtTime ;
+ wfprov:wasEnactedBy ?engineUsed ;
+ rdfs:label ?processLabel
+ OPTIONAL
+ {
+ ?processURI wfprov:wasPartOfWorkflowRun ?wasPartOfWorkflow .
+ ?wasPartOfWorkflow rdfs:label ?wasPartOfWorkflowLabel .
+ FILTER NOT EXISTS { ?something foaf:primaryTopic ?wasPartOfWorkflow }
+ }
+ OPTIONAL
+ {
+ {
+ ?processURI wfprov:usedInput ?usedDictionaryInput .
+ ?usedDictionaryInput rdf:type prov:Dictionary
+ }
+ UNION
+ {
+ ?processURI wfprov:usedInput ?usedArtifactInput
+ FILTER NOT EXISTS { ?usedArtifactInput rdf:type prov:Dictionary }
+ }
+ }
+ }")
+
+ # return the processes that were used
+ sparql_query.execute(graph)
+ end
+
+ #Extract all the workflows and their parent workflow
+ def getAllArtifacts
+ # create the query
+ sparql_query = SPARQL.parse("#{Provenance.prefixes}
+ SELECT *
+ WHERE
+ {
+ {
+ ?artifactURI rdf:type wfprov:Artifact ;
+ wfprov:describedByParameter ?describedByParameter .
+ ?describedByParameter rdfs:comment ?comment
+ OPTIONAL
+ {
+ ?artifactURI tavernaprov:content ?filepath
+ }
+ OPTIONAL
+ {
+ ?artifactURI wfprov:wasOutputFrom ?outputFromWorkflowRun .
+ ?outputFromWorkflowRun rdf:type wfprov:WorkflowRun ;
+ rdfs:label ?outputFromWorkflowRunLabel .
+ FILTER NOT EXISTS { ?something foaf:primaryTopic ?outputFromWorkflowRun }
+ }
+ OPTIONAL
+ {
+ ?artifactURI wfprov:wasOutputFrom ?outputFromProcessRun .
+ ?outputFromProcessRun rdf:type wfprov:ProcessRun ;
+ prov:startedAtTime ?startedAtTime ;
+ prov:endedAtTime ?endedAtTime ;
+ rdfs:label ?outputFromProcessRunLabel
+ }
+ FILTER NOT EXISTS { ?artifactURI rdf:type prov:Dictionary }
+ }
+ UNION
+ {
+ ?dictionary rdf:type prov:Dictionary
+ OPTIONAL
+ {
+ ?dictionary tavernaprov:content ?filepath
+ }
+ OPTIONAL
+ {
+ {
+ ?dictionary prov:hadMember ?hadMemberDictionary .
+ ?hadMemberDictionary rdf:type prov:Dictionary .
+ }
+ UNION
+ {
+ ?dictionary prov:hadMember ?hadMemberArtifact .
+ ?hadMemberArtifact wfprov:describedByParameter ?describedByParameter .
+ ?describedByParameter rdfs:comment ?comment .
+ FILTER NOT EXISTS { ?hadMemberArtifact rdf:type prov:Dictionary }
+ }
+ }
+ OPTIONAL
+ {
+ ?dictionary wfprov:wasOutputFrom ?outputFromWorkflowRun .
+ ?outputFromWorkflowRun rdf:type wfprov:WorkflowRun ;
+ rdfs:label ?outputFromWorkflowRunLabel .
+ FILTER NOT EXISTS { ?something foaf:primaryTopic ?outputFromWorkflowRun }
+ }
+ OPTIONAL
+ {
+ ?dictionary wfprov:wasOutputFrom ?outputFromProcessRun .
+ ?outputFromProcessRun rdf:type wfprov:ProcessRun ;
+ prov:startedAtTime ?startedAtTime ;
+ prov:endedAtTime ?endedAtTime ;
+ rdfs:label ?outputFromProcessRunLabel
+ }
+ }
+ }")
+
+ # return the result of the performing the query
+ sparql_query.execute(graph)
+ end
+
+ def getContentOf(extractedFilepath)
+ content = ""
+
+ if File.directory?(extractedFilepath)
+ content = "["
+
+ # ffs = Files or Folders
+ #for each folder/file inside this folder do
+ ffs = Dir.glob(extractedFilepath + "/*")
+ for file in ffs
+ content = content + getContentOf("#{file}") + ", "
+ end
+
+ content = content[0...-2] + "]"
+
+ elsif File.file?(extractedFilepath)
+ content = File.read(extractedFilepath)
+ end
+
+ content
+ end
+
+ def to_dataHashObject(bundle_filepath)
+
+ nodes = []
+ links = []
+
+ linkValue = 50
+ processorTrimCount = "Processor execution ".length
+ workflowRunTrimCount = "Workflow run of ".length
+
+ # get all the workflows
+ getAllWorkflowRuns.each do |result|
+
+ # get the name
+ workflowRunURI = result["workflowRun"].to_s
+ workflowRunLabel = result["workflowRunLabel"].to_s
+ if workflowRunLabel[0] == "W"
+ workflowRunLabel = workflowRunLabel[workflowRunTrimCount, workflowRunLabel.length]
+ elsif workflowRunLabel[0] == "P"
+ workflowRunLabel = workflowRunLabel[processorTrimCount, workflowRunLabel.length]
+ end
+
+ # a temp node for current (Decide whether to be added or not)
+ workflowRun = {:name => workflowRunURI, :type => "Workflow Run",
+ :label => workflowRunLabel}
+
+ # see if exists
+ indexSource = nodes.find_index(workflowRun)
+
+ # check
+ if indexSource.blank?
+ indexSource = nodes.count
+ nodes << workflowRun
+ end
+
+ # check if has property wasPartOfWorkflowRun
+ if result["wasPartOfWorkflowRun"].present?
+
+ secondWorkflowRunLabel = result["wasPartOfWorkflowRunLabel"].to_s
+ if secondWorkflowRunLabel[0] == "W"
+ secondWorkflowRunLabel = secondWorkflowRunLabel[workflowRunTrimCount, secondWorkflowRunLabel.length]
+ elsif secondWorkflowRunLabel[0] == "P"
+ secondWorkflowRunLabel = secondWorkflowRunLabel[processorTrimCount, secondWorkflowRunLabel.length]
+ end
+ secondWorkflowRun = {:name => result["wasPartOfWorkflowRun"].to_s, :type => "Workflow Run",
+ :label => secondWorkflowRunLabel}
+
+ indexTarget = nodes.find_index(secondWorkflowRun)
+
+ if indexTarget.blank?
+ indexTarget = nodes.count
+ nodes << secondWorkflowRun
+ end
+
+ # add the link
+ linkWfToWf = {:source => indexTarget, :target => indexSource, :value => linkValue}
+ if links.find_index(linkWfToWf).blank?
+ links << linkWfToWf
+ end
+ end
+
+ # check if has property usedInput
+ if result["usedArtifactInput"].present?
+ artifact = {:name => result["usedArtifactInput"].to_s, :type => "Artifact" }
+
+ indexTarget = nodes.find_index(artifact)
+
+ if indexTarget.blank?
+ indexTarget = nodes.count
+ nodes << artifact
+ end
+
+ # add the link
+ linkProcessToArtifact = {:source => indexTarget, :target => indexSource, :value => linkValue}
+ if links.find_index(linkProcessToArtifact).blank?
+ links << linkProcessToArtifact
+ end
+ end
+
+ # check if has property usedInput
+ if result["usedDictionaryInput"].present?
+ dictionary = {:name => result["usedDictionaryInput"].to_s, :type => "Dictionary" }
+
+ indexTarget = nodes.find_index(artifact)
+
+ if indexTarget.blank?
+ indexTarget = nodes.count
+ nodes << dictionary
+ end
+
+ # add the link
+ linkProcessToArtifact = {:source => indexTarget, :target => indexSource, :value => linkValue}
+ if links.find_index(linkProcessToArtifact).blank?
+ links << linkProcessToArtifact
+ end
+ end
+
+ end
+
+ # get all the processes
+ # get all the workflows
+ getAllProcessRuns.each do |result|
+ # get the name
+ processRunURI = result["processURI"].to_s
+ processRunLabel = result["processLabel"].to_s
+
+ # a temp node for current (Decide whether to be added or not)
+ processRun = {:name => processRunURI, :type => "Process Run",
+ :startedAtTime => result["startedAtTime"].to_s, :endedAtTime =>result["endedAtTime"].to_s,
+ :label => processRunLabel[processorTrimCount, processRunLabel.length]}
+
+
+ # see if exists
+ indexSource = nodes.find_index(processRun)
+
+ # check
+ if indexSource.blank?
+ indexSource = nodes.count
+ nodes << processRun
+ end
+
+ # check if has property wasPartOfWorkflow
+ if result["wasPartOfWorkflow"].present?
+
+ workflowRunLabel = result["wasPartOfWorkflowLabel"].to_s
+ if workflowRunLabel[0] == "W"
+ workflowRunLabel = workflowRunLabel[workflowRunTrimCount, workflowRunLabel.length]
+ elsif workflowRunLabel[0] == "P"
+ workflowRunLabel = workflowRunLabel[processorTrimCount, workflowRunLabel.length]
+ end
+
+
+ workflowRun = {:name => result["wasPartOfWorkflow"].to_s, :type => "Workflow Run",
+ :label => workflowRunLabel}
+
+ indexTarget = nodes.find_index(workflowRun)
+
+ if indexTarget.blank?
+ indexTarget = nodes.count
+ nodes << workflowRun
+ end
+
+ # add the link
+ linkProcessToWf = {:source => indexTarget, :target => indexSource, :value => linkValue}
+ if links.find_index(linkProcessToWf).blank?
+ links << linkProcessToWf
+ end
+ end
+
+ # check if has property usedInput
+ if result["usedArtifactInput"].present?
+ artifact = {:name => result["usedArtifactInput"].to_s, :type => "Artifact" }
+
+ indexTarget = nodes.find_index(artifact)
+
+ if indexTarget.blank?
+ indexTarget = nodes.count
+ nodes << artifact
+ end
+
+ # add the link
+ linkProcessToArtifact = {:source => indexTarget, :target => indexSource, :value => linkValue}
+ if links.find_index(linkProcessToArtifact).blank?
+ links << linkProcessToArtifact
+ end
+ end
+
+ # check if has property usedInput
+ if result["usedDictionaryInput"].present?
+ dictionary = {:name => result["usedDictionaryInput"].to_s, :type => "Dictionary" }
+
+ indexTarget = nodes.find_index(artifact)
+
+ if indexTarget.blank?
+ indexTarget = nodes.count
+ nodes << dictionary
+ end
+
+ # add the link
+ linkProcessToArtifact = {:source => indexTarget, :target => indexSource, :value => linkValue}
+ if links.find_index(linkProcessToArtifact).blank?
+ links << linkProcessToArtifact
+ end
+ end
+
+
+
+ # # check if has property engineUsed which represents the wfprov:wasEnactedBy
+ # if result["engineUsed"].present?
+ # engine = {:name => result["engineUsed"].to_s, :type => "Engine"}
+
+ # indexTarget = nodes.find_index(engine)
+
+ # if indexTarget.blank?
+ # indexTarget = nodes.count
+ # nodes << engine
+ # end
+
+ # # add the link
+ # linkProcessToEngine = {:source => indexTarget, :target => indexSource, :value => linkValue}
+ # if links.find_index(linkProcessToEngine).blank?
+ # links << linkProcessToEngine
+ # end
+ # end
+
+ end
+
+
+ # get all the nodes and links related to the artifact
+ getAllArtifacts.each do |result|
+
+ if result["artifactURI"].present?
+ # get the name
+ artifactURI = result["artifactURI"].to_s
+
+ # the node that needs to be added to the nodes
+ artifact = {:name => artifactURI, :type => "Artifact"}
+ else
+ # get the name
+ artifactURI = result["dictionary"].to_s
+
+ # the node that needs to be added to the nodes
+ artifact = {:name => artifactURI, :type => "Dictionary"}
+ end
+
+ # get the index of the artifact if present otherwise nil
+ indexSource = -1
+
+ nodes.each_with_index do |node, index|
+ if node[:type].to_s == artifact[:type].to_s
+ if node[:name].to_s == artifact[:name].to_s
+ indexSource = index
+ artifactLabel = "List"
+ if result["comment"].present?
+ artifactLabel = result["comment"].to_s
+ end
+
+ if node[:label].present? and node[:label] != "List"
+ node[:label] = node[:label] + "\\n" + artifactLabel
+
+ else
+ node.merge!(:label => artifactLabel)
+ end
+
+ if !(node[:content].present?) and result["filepath"].present?
+ artifactContent = getContentOf("#{bundle_filepath}#{result["filepath"].to_s}")
+ node[:content] = artifactContent
+ end
+ end
+ end
+ end
+
+ # check if is already in the list if not add to nodes
+ if indexSource == -1
+ indexSource = nodes.count
+ artifactLabel = "List"
+ if result["comment"].present?
+ artifactLabel = result["comment"].to_s
+ end
+ artifact[:label] = artifactLabel
+
+ artifactContent = ""
+ if result["filepath"].present?
+ artifactContent = getContentOf("#{bundle_filepath}#{result["filepath"].to_s}")
+ artifact[:content] = artifactContent
+ end
+ nodes << artifact
+ end
+
+ # check if it has the property wasOutputFrom a process Run and add a link entity-process
+ if result["outputFromProcessRun"].present?
+ processRunLabel = result["outputFromProcessRunLabel"].to_s
+
+ processRun = {:name => result["outputFromProcessRun"].to_s, :type => "Process Run",
+ :startedAtTime => result["startedAtTime"].to_s, :endedAtTime =>result["endedAtTime"].to_s,
+ :label => processRunLabel[processorTrimCount, processRunLabel.length]}
+
+ indexTarget = nodes.find_index(processRun)
+
+ if indexTarget.blank?
+ indexTarget = nodes.count
+ nodes << processRun
+ end
+
+ # add the link
+ linkArtifactToProcess = {:source => indexTarget, :target => indexSource, :value => linkValue}
+ if links.find_index(linkArtifactToProcess).blank?
+ links << linkArtifactToProcess
+ end
+ end
+
+ if result["outputFromWorkflowRun"].present?
+ workflowRunLabel = result["outputFromWorkflowRunLabel"].to_s
+
+ if workflowRunLabel[0] == "W"
+ workflowRunLabel = workflowRunLabel[workflowRunTrimCount, workflowRunLabel.length]
+ elsif workflowRunLabel[0] == "P"
+ workflowRunLabel = workflowRunLabel[processorTrimCount, workflowRunLabel.length]
+ end
+
+ workflowRun = {:name => result["outputFromWorkflowRun"].to_s, :type => "Workflow Run",
+ :label => workflowRunLabel}
+
+ indexTarget = nodes.find_index(workflowRun)
+
+ if indexTarget.blank?
+ indexTarget = nodes.count
+ nodes << workflowRun
+ end
+
+ # add the link
+ linkArtifactToWorkflow = {:source => indexTarget, :target => indexSource, :value => linkValue}
+ if links.find_index(linkArtifactToWorkflow).blank?
+ links << linkArtifactToWorkflow
+ end
+ end
+
+ if result["hadMemberArtifact"].present?
+ memberArtifact = {:name => result["hadMemberArtifact"].to_s, :type => "Artifact"}
+
+ indexTarget = -1
+
+ nodes.each_with_index do |node, index|
+ if node[:type].to_s == memberArtifact[:type].to_s
+ if node[:name].to_s == memberArtifact[:name].to_s
+ indexTarget = index
+ end
+ end
+ end
+
+ if indexTarget == -1
+ if result["comment"].present?
+ artifactLabel = result["comment"].to_s
+ memberArtifact.merge!(:label => artifactLabel)
+ end
+ indexTarget = nodes.count
+ nodes << memberArtifact
+ end
+
+ # add the link
+ if result["outputFromProcessRun"].present?
+ linkDictToArtifact = {:source => indexSource, :target => indexTarget, :value => linkValue}
+ if links.find_index(linkDictToArtifact).blank?
+ links << linkDictToArtifact
+ end
+ else
+ linkDictToArtifact = {:source => indexTarget, :target => indexSource, :value => linkValue}
+ if links.find_index(linkDictToArtifact).blank?
+ links << linkDictToArtifact
+ end
+ end
+
+ end
+
+ if result["hadMemberDictionary"].present?
+ dictionary = {:name => result["hadMemberDictionary"].to_s, :type => "Dictionary"}
+
+ indexTarget = -1
+
+ nodes.each_with_index do |node, index|
+ if node[:type].to_s == dictionary[:type].to_s
+ if node[:name].to_s == dictionary[:name].to_s
+ indexTarget = index
+ end
+ end
+ end
+
+ if indexTarget == -1
+ # if result["comment"].present?
+ # artifactLabel = result["comment"].to_s
+ # memberArtifact.merge!(:label => artifactLabel)
+ # end
+ indexTarget = nodes.count
+ nodes << dictionary
+ end
+
+ # add the link
+ linkDictToDict = {:source => indexTarget, :target => indexSource, :value => linkValue}
+ if links.find_index(linkDictToDict).blank?
+ links << linkDictToDict
+ end
+ end
+ end
+
+ # make a hash to return
+ stream = {:nodes => nodes, :links => links }
+
+ # return stream
+ stream
+ end
+
+ # persisted is important not to get "undefined method `to_key' for" error
+ def persisted?
+ false
+ end
+end
\ No newline at end of file