You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by ka...@apache.org on 2019/11/15 12:01:55 UTC

[airflow-site] branch aip-11 updated: Add case studies - content (#166)

This is an automated email from the ASF dual-hosted git repository.

kamilbregula pushed a commit to branch aip-11
in repository https://gitbox.apache.org/repos/asf/airflow-site.git


The following commit(s) were added to refs/heads/aip-11 by this push:
     new 78e5885  Add case studies - content (#166)
78e5885 is described below

commit 78e588585378e6ad4a6609107c92efc9f094304e
Author: Kamil Gabryjelski <ka...@gmail.com>
AuthorDate: Fri Nov 15 13:01:44 2019 +0100

    Add case studies - content (#166)
---
 landing-pages/site/assets/icons/spotify-logo.svg   |  21 ------------
 landing-pages/site/assets/scss/_list-boxes.scss    |  37 ++++++++++++---------
 landing-pages/site/assets/scss/_quote.scss         |   1 +
 .../site/content/en/case-studies/_index.html       |   7 ----
 .../site/content/en/case-studies/example-case1.md  |  23 -------------
 .../site/content/en/case-studies/example-case2.md  |  23 -------------
 .../site/content/en/case-studies/example-case3.md  |  23 -------------
 .../site/content/en/case-studies/example-case4.md  |  23 -------------
 .../site/content/en/case-studies/example-case5.md  |  23 -------------
 .../site/content/en/case-studies/example-case6.md  |  23 -------------
 .../site/content/en/case-studies/example-case7.md  |  23 -------------
 .../site/content/en/case-studies/example-case8.md  |  23 -------------
 .../site/content/en/case-studies/example-case9.md  |  23 -------------
 .../site/content/en/use-cases/_index.html          |   7 ++++
 landing-pages/site/content/en/use-cases/adobe.md   |  17 ++++++++++
 .../site/content/en/use-cases/big-fish-games.md    |  17 ++++++++++
 landing-pages/site/content/en/use-cases/dish.md    |  17 ++++++++++
 .../site/content/en/use-cases/experity.md          |  17 ++++++++++
 .../site/content/en/use-cases/onefootball.md       |  21 ++++++++++++
 .../site/layouts/partials/boxes/case-study.html    |   7 ++--
 landing-pages/site/layouts/partials/quote.html     |   7 ++--
 .../{case-studies => use-cases}/baseof.html        |   0
 .../{case-studies => use-cases}/content.html       |   0
 .../layouts/{case-studies => use-cases}/list.html  |   2 +-
 .../{case-studies => use-cases}/single.html        |   0
 landing-pages/site/static/icons/adobe-logo.svg     |   5 +++
 .../site/static/icons/big-fish-games-logo.svg      |   3 ++
 .../site/{assets => static}/icons/dish-logo.svg    |   0
 landing-pages/site/static/icons/experity-logo.jpg  | Bin 0 -> 59400 bytes
 .../site/static/icons/onefootball-logo.svg         |   3 ++
 30 files changed, 138 insertions(+), 258 deletions(-)

diff --git a/landing-pages/site/assets/icons/spotify-logo.svg b/landing-pages/site/assets/icons/spotify-logo.svg
deleted file mode 100644
index 1aebfcd..0000000
--- a/landing-pages/site/assets/icons/spotify-logo.svg
+++ /dev/null
@@ -1,21 +0,0 @@
-<svg xmlns="http://www.w3.org/2000/svg" width="166.806" height="50.029" viewBox="0 0 166.806 50.029">
-    <g id="Group_696" data-name="Group 696" transform="translate(-4.252 -1.277)">
-        <g id="Group_692" data-name="Group 692" transform="translate(4.252 1.277)">
-            <path id="Path_634" d="M29.267 1.277a25.015 25.015 0 1 0 25.015 25.014A25.015 25.015 0 0 0 29.267 1.277zm11.472 36.08a1.558 1.558 0 0 1-2.144.519c-5.874-3.59-13.268-4.4-21.975-2.412a1.559 1.559 0 1 1-.694-3.039c9.528-2.178 17.7-1.241 24.3 2.788a1.559 1.559 0 0 1 .513 2.144zm3.061-6.811a1.951 1.951 0 0 1-2.682.642c-6.722-4.132-16.973-5.329-24.926-2.915a1.95 1.95 0 1 1-1.134-3.732c9.085-2.757 20.38-1.422 28.1 3.323a1.95 1.95 0 0 1 .642 2.682zm.264-7.093C36 18.664 22.7 18.224 15 [...]
-        </g>
-        <g id="Group_695" data-name="Group 695" transform="translate(61.829 12.781)">
-            <g id="Group_693" data-name="Group 693">
-                <path id="Path_635" d="M207.486 55.758c-4.319-1.03-5.088-1.753-5.088-3.271 0-1.435 1.351-2.4 3.36-2.4a9.819 9.819 0 0 1 5.9 2.243.282.282 0 0 0 .214.052.277.277 0 0 0 .187-.115l2.109-2.973a.283.283 0 0 0-.054-.385 12.759 12.759 0 0 0-8.295-2.874c-4.663 0-7.921 2.8-7.921 6.8 0 4.294 2.81 5.814 7.666 6.988 4.133.952 4.83 1.749 4.83 3.175 0 1.58-1.41 2.562-3.68 2.562a9.92 9.92 0 0 1-6.878-2.841.3.3 0 0 0-.208-.068.281.281 0 0 0-.194.1l-2.365 2.814a.281.281 0 0 0 .028.392 14. [...]
-                <path id="Path_636" d="M271.776 64.326a6.776 6.776 0 0 0-5.438 2.616v-1.979a.283.283 0 0 0-.283-.283h-3.867a.283.283 0 0 0-.283.283v21.986a.283.283 0 0 0 .283.283h3.867a.283.283 0 0 0 .283-.283v-6.94a6.947 6.947 0 0 0 5.438 2.461c4.047 0 8.144-3.116 8.144-9.071s-4.096-9.073-8.144-9.073zm3.648 9.073c0 3.033-1.868 5.149-4.543 5.149-2.644 0-4.639-2.213-4.639-5.149s1.995-5.149 4.639-5.149 4.543 2.165 4.543 5.15z" fill="#1ed760" data-name="Path 636" transform="translate(-242.5 [...]
-                <path id="Path_637" d="M335.549 64.326a9.091 9.091 0 1 0 9.263 9.073 9.12 9.12 0 0 0-9.263-9.073zm0 14.254a4.919 4.919 0 0 1-4.862-5.181 4.8 4.8 0 0 1 4.8-5.118 4.934 4.934 0 0 1 4.894 5.183 4.815 4.815 0 0 1-4.832 5.116z" fill="#1ed760" data-name="Path 637" transform="translate(-287.647 -56.997)"/>
-                <path id="Path_638" d="M400.219 54.63h-4.256v-4.351a.283.283 0 0 0-.282-.283h-3.867a.284.284 0 0 0-.284.283v4.351h-1.859a.283.283 0 0 0-.282.283v3.324a.282.282 0 0 0 .282.283h1.859v8.6c0 3.475 1.73 5.238 5.141 5.238a7.055 7.055 0 0 0 3.623-.9.282.282 0 0 0 .143-.245v-3.166a.284.284 0 0 0-.41-.254 4.874 4.874 0 0 1-2.27.548c-1.241 0-1.794-.563-1.794-1.826v-7.994h4.256a.283.283 0 0 0 .282-.283v-3.324a.283.283 0 0 0-.282-.284z" fill="#1ed760" data-name="Path 638" transform=" [...]
-                <path id="Path_639" d="M460 49.914v-.534c0-1.572.6-2.273 1.955-2.273a6.525 6.525 0 0 1 2.179.4.283.283 0 0 0 .372-.269V43.98a.283.283 0 0 0-.2-.272 10.615 10.615 0 0 0-3.214-.462c-3.572 0-5.46 2.011-5.46 5.815v.818h-1.862a.284.284 0 0 0-.284.283V53.5a.284.284 0 0 0 .284.283h1.858v13.27a.283.283 0 0 0 .282.283h3.867a.284.284 0 0 0 .283-.283V53.787h3.611l5.529 13.262c-.628 1.393-1.245 1.671-2.088 1.671a4.46 4.46 0 0 1-2.133-.6.29.29 0 0 0-.225-.021.286.286 0 0 0-.167.153l-1 [...]
-                <rect id="Rectangle_205" width="4.434" height="17.439" fill="#1ed760" data-name="Rectangle 205" rx=".949" transform="translate(70.377 7.683)"/>
-                <path id="Path_640" d="M433.581 39.791a2.773 2.773 0 1 0 2.774 2.773 2.774 2.774 0 0 0-2.774-2.773z" fill="#1ed760" data-name="Path 640" transform="translate(-360.968 -39.791)"/>
-            </g>
-            <g id="Group_694" data-name="Group 694" transform="translate(103.772 7.65)">
-                <path id="Path_641" d="M547.129 70.858a2.729 2.729 0 1 1 2.736-2.737 2.711 2.711 0 0 1-2.736 2.737zm.014-5.187a2.458 2.458 0 1 0 2.435 2.45 2.434 2.434 0 0 0-2.434-2.45zm.6 2.73l.77 1.078h-.649l-.693-.988h-.6v.988h-.543v-2.858h1.274c.663 0 1.1.34 1.1.911a.852.852 0 0 1-.655.868zm-.459-1.289h-.708v.9h.708c.354 0 .565-.173.565-.452s-.208-.449-.561-.449z" fill="#1ed760" data-name="Path 641" transform="translate(-544.408 -65.4)"/>
-            </g>
-        </g>
-    </g>
-</svg>
diff --git a/landing-pages/site/assets/scss/_list-boxes.scss b/landing-pages/site/assets/scss/_list-boxes.scss
index 3b5e73a..b108767 100644
--- a/landing-pages/site/assets/scss/_list-boxes.scss
+++ b/landing-pages/site/assets/scss/_list-boxes.scss
@@ -74,21 +74,6 @@ $card-margin: 20px;
   flex-direction: column;
   align-items: center;
 
-  &.hoverable-icon {
-    svg {
-      filter: grayscale(1);
-      opacity: 0.6;
-      transition: all 0.2s;
-    }
-
-    &:hover {
-      svg {
-        filter: none;
-        opacity: 1;
-      }
-    }
-  }
-
   &__blogpost {
     padding: 0 20px;
 
@@ -126,7 +111,14 @@ $card-margin: 20px;
     &--logo {
       display: flex;
       height: 60px;
+      width:100%;
+      justify-content: center;
       align-items: center;
+
+      svg, img {
+        max-height: 100%;
+        max-width: 100%;
+      }
     }
 
     &--quote {
@@ -213,6 +205,21 @@ $card-margin: 20px;
       }
     }
   }
+
+  &.hoverable-icon {
+    svg, img {
+      filter: grayscale(1);
+      opacity: 0.6;
+      transition: all 0.2s;
+    }
+
+    &:hover {
+      svg, img {
+        filter: none;
+        opacity: 1;
+      }
+    }
+  }
 }
 
 @media (max-width: $mobile) {
diff --git a/landing-pages/site/assets/scss/_quote.scss b/landing-pages/site/assets/scss/_quote.scss
index 7bdc6e1..1062763 100644
--- a/landing-pages/site/assets/scss/_quote.scss
+++ b/landing-pages/site/assets/scss/_quote.scss
@@ -48,6 +48,7 @@
   }
 
   &--logo {
+    max-height: 140px;
     margin: 0 auto;
   }
 }
diff --git a/landing-pages/site/content/en/case-studies/_index.html b/landing-pages/site/content/en/case-studies/_index.html
deleted file mode 100644
index 7c349f9..0000000
--- a/landing-pages/site/content/en/case-studies/_index.html
+++ /dev/null
@@ -1,7 +0,0 @@
----
-title: "Case studies"
-linkTitle: "Case studies"
-menu:
-    main:
-        weight: 20
----
diff --git a/landing-pages/site/content/en/case-studies/example-case1.md b/landing-pages/site/content/en/case-studies/example-case1.md
deleted file mode 100644
index 5c7dffb..0000000
--- a/landing-pages/site/content/en/case-studies/example-case1.md
+++ /dev/null
@@ -1,23 +0,0 @@
----
-title: "Example 1"
-linkTitle: "Example 1"
-quote:
-    text: "A great ecosystem and community that comes together to address about any batch data pipeline need."
-    author: "Austin Benett, CTO at Spotify"
-logo_path: "icons/spotify-logo.svg"
----
-
-##### What was the problem?
-We faced increasing complexity managing lengthy crontabs with scheduling being an issue, this required carefully planning timing due to resource constraints, usage patterns, and especially custom code needed for retry logic.  In the last case, having to verify success of previous jobs and/or steps prior to running the next.  Furthermore, time to results is important, but we were increasingly relying on buffers for processing, where things were effectively sitting idle and not processing, [...]
-
-##### How did Apache Airflow help to solve this problem?
-Relying on community built and existing hooks and operators to the majority of cloud services we use has allowed us to focus on business outcomes.
-
-##### What are the results?
-Airflow helps us manage many of our pain-points, letting us benefit from the overall ecosystem and
-community.  We are able to reduce time-to-end delivery of data products by being event-driven in our
-processing flows (in our first usage, for example, we were able to take out over 2 hours - on average - of various
-waiting between stages).  Furthermore, we are able to arrive at and iterate on products quicker as a result of
-not needing as much custom or roll-our-own solutions.  For Our code base is smaller and simpler, it is easier to
-follow, and to a large extent our DAGs serve as sufficient documentation for new contributors to understand
-what is going on.
diff --git a/landing-pages/site/content/en/case-studies/example-case2.md b/landing-pages/site/content/en/case-studies/example-case2.md
deleted file mode 100644
index 44a5b2f..0000000
--- a/landing-pages/site/content/en/case-studies/example-case2.md
+++ /dev/null
@@ -1,23 +0,0 @@
----
-title: "Example 2"
-linkTitle: "Example 2"
-quote:
-    text: "A great ecosystem and community that comes together to address about any batch data pipeline need."
-    author: "Austin Benett, CTO at Spotify"
-logo_path: "icons/dish-logo.svg"
----
-
-##### What was the problem?
-We faced increasing complexity managing lengthy crontabs with scheduling being an issue, this required carefully planning timing due to resource constraints, usage patterns, and especially custom code needed for retry logic.  In the last case, having to verify success of previous jobs and/or steps prior to running the next.  Furthermore, time to results is important, but we were increasingly relying on buffers for processing, where things were effectively sitting idle and not processing, [...]
-
-##### How did Apache Airflow help to solve this problem?
-Relying on community built and existing hooks and operators to the majority of cloud services we use has allowed us to focus on business outcomes.
-
-##### What are the results?
-Airflow helps us manage many of our pain-points, letting us benefit from the overall ecosystem and
-community.  We are able to reduce time-to-end delivery of data products by being event-driven in our
-processing flows (in our first usage, for example, we were able to take out over 2 hours - on average - of various
-waiting between stages).  Furthermore, we are able to arrive at and iterate on products quicker as a result of
-not needing as much custom or roll-our-own solutions.  For Our code base is smaller and simpler, it is easier to
-follow, and to a large extent our DAGs serve as sufficient documentation for new contributors to understand
-what is going on.
diff --git a/landing-pages/site/content/en/case-studies/example-case3.md b/landing-pages/site/content/en/case-studies/example-case3.md
deleted file mode 100644
index 84b69d7..0000000
--- a/landing-pages/site/content/en/case-studies/example-case3.md
+++ /dev/null
@@ -1,23 +0,0 @@
----
-title: "Example 3"
-linkTitle: "Example 3"
-quote:
-    text: "A great ecosystem and community that comes together to address about any batch data pipeline need."
-    author: "Austin Benett, CTO at Spotify"
-logo_path: "icons/spotify-logo.svg"
----
-
-##### What was the problem?
-We faced increasing complexity managing lengthy crontabs with scheduling being an issue, this required carefully planning timing due to resource constraints, usage patterns, and especially custom code needed for retry logic.  In the last case, having to verify success of previous jobs and/or steps prior to running the next.  Furthermore, time to results is important, but we were increasingly relying on buffers for processing, where things were effectively sitting idle and not processing, [...]
-
-##### How did Apache Airflow help to solve this problem?
-Relying on community built and existing hooks and operators to the majority of cloud services we use has allowed us to focus on business outcomes.
-
-##### What are the results?
-Airflow helps us manage many of our pain-points, letting us benefit from the overall ecosystem and
-community.  We are able to reduce time-to-end delivery of data products by being event-driven in our
-processing flows (in our first usage, for example, we were able to take out over 2 hours - on average - of various
-waiting between stages).  Furthermore, we are able to arrive at and iterate on products quicker as a result of
-not needing as much custom or roll-our-own solutions.  For Our code base is smaller and simpler, it is easier to
-follow, and to a large extent our DAGs serve as sufficient documentation for new contributors to understand
-what is going on.
diff --git a/landing-pages/site/content/en/case-studies/example-case4.md b/landing-pages/site/content/en/case-studies/example-case4.md
deleted file mode 100644
index 144f917..0000000
--- a/landing-pages/site/content/en/case-studies/example-case4.md
+++ /dev/null
@@ -1,23 +0,0 @@
----
-title: "Example 4"
-linkTitle: "Example 4"
-quote:
-    text: "A great ecosystem and community that comes together to address about any batch data pipeline need."
-    author: "Austin Benett, CTO at Spotify"
-logo_path: "icons/spotify-logo.svg"
----
-
-##### What was the problem?
-We faced increasing complexity managing lengthy crontabs with scheduling being an issue, this required carefully planning timing due to resource constraints, usage patterns, and especially custom code needed for retry logic.  In the last case, having to verify success of previous jobs and/or steps prior to running the next.  Furthermore, time to results is important, but we were increasingly relying on buffers for processing, where things were effectively sitting idle and not processing, [...]
-
-##### How did Apache Airflow help to solve this problem?
-Relying on community built and existing hooks and operators to the majority of cloud services we use has allowed us to focus on business outcomes.
-
-##### What are the results?
-Airflow helps us manage many of our pain-points, letting us benefit from the overall ecosystem and
-community.  We are able to reduce time-to-end delivery of data products by being event-driven in our
-processing flows (in our first usage, for example, we were able to take out over 2 hours - on average - of various
-waiting between stages).  Furthermore, we are able to arrive at and iterate on products quicker as a result of
-not needing as much custom or roll-our-own solutions.  For Our code base is smaller and simpler, it is easier to
-follow, and to a large extent our DAGs serve as sufficient documentation for new contributors to understand
-what is going on.
diff --git a/landing-pages/site/content/en/case-studies/example-case5.md b/landing-pages/site/content/en/case-studies/example-case5.md
deleted file mode 100644
index c42b5fd..0000000
--- a/landing-pages/site/content/en/case-studies/example-case5.md
+++ /dev/null
@@ -1,23 +0,0 @@
----
-title: "Example 5"
-linkTitle: "Example 5"
-quote:
-    text: "A great ecosystem and community that comes together to address about any batch data pipeline need."
-    author: "Austin Benett, CTO at Spotify"
-logo_path: "icons/spotify-logo.svg"
----
-
-##### What was the problem?
-We faced increasing complexity managing lengthy crontabs with scheduling being an issue, this required carefully planning timing due to resource constraints, usage patterns, and especially custom code needed for retry logic.  In the last case, having to verify success of previous jobs and/or steps prior to running the next.  Furthermore, time to results is important, but we were increasingly relying on buffers for processing, where things were effectively sitting idle and not processing, [...]
-
-##### How did Apache Airflow help to solve this problem?
-Relying on community built and existing hooks and operators to the majority of cloud services we use has allowed us to focus on business outcomes.
-
-##### What are the results?
-Airflow helps us manage many of our pain-points, letting us benefit from the overall ecosystem and
-community.  We are able to reduce time-to-end delivery of data products by being event-driven in our
-processing flows (in our first usage, for example, we were able to take out over 2 hours - on average - of various
-waiting between stages).  Furthermore, we are able to arrive at and iterate on products quicker as a result of
-not needing as much custom or roll-our-own solutions.  For Our code base is smaller and simpler, it is easier to
-follow, and to a large extent our DAGs serve as sufficient documentation for new contributors to understand
-what is going on.
diff --git a/landing-pages/site/content/en/case-studies/example-case6.md b/landing-pages/site/content/en/case-studies/example-case6.md
deleted file mode 100644
index aee9a00..0000000
--- a/landing-pages/site/content/en/case-studies/example-case6.md
+++ /dev/null
@@ -1,23 +0,0 @@
----
-title: "Example 6"
-linkTitle: "Example 6"
-quote:
-    text: "A great ecosystem and community that comes together to address about any batch data pipeline need."
-    author: "Austin Benett, CTO at Spotify"
-logo_path: "icons/dish-logo.svg"
----
-
-##### What was the problem?
-We faced increasing complexity managing lengthy crontabs with scheduling being an issue, this required carefully planning timing due to resource constraints, usage patterns, and especially custom code needed for retry logic.  In the last case, having to verify success of previous jobs and/or steps prior to running the next.  Furthermore, time to results is important, but we were increasingly relying on buffers for processing, where things were effectively sitting idle and not processing, [...]
-
-##### How did Apache Airflow help to solve this problem?
-Relying on community built and existing hooks and operators to the majority of cloud services we use has allowed us to focus on business outcomes.
-
-##### What are the results?
-Airflow helps us manage many of our pain-points, letting us benefit from the overall ecosystem and
-community.  We are able to reduce time-to-end delivery of data products by being event-driven in our
-processing flows (in our first usage, for example, we were able to take out over 2 hours - on average - of various
-waiting between stages).  Furthermore, we are able to arrive at and iterate on products quicker as a result of
-not needing as much custom or roll-our-own solutions.  For Our code base is smaller and simpler, it is easier to
-follow, and to a large extent our DAGs serve as sufficient documentation for new contributors to understand
-what is going on.
diff --git a/landing-pages/site/content/en/case-studies/example-case7.md b/landing-pages/site/content/en/case-studies/example-case7.md
deleted file mode 100644
index 9100be7..0000000
--- a/landing-pages/site/content/en/case-studies/example-case7.md
+++ /dev/null
@@ -1,23 +0,0 @@
----
-title: "Example 7"
-linkTitle: "Example 7"
-quote:
-    text: "A great ecosystem and community that comes together to address about any batch data pipeline need."
-    author: "Austin Benett, CTO at Spotify"
-logo_path: "icons/dish-logo.svg"
----
-
-##### What was the problem?
-We faced increasing complexity managing lengthy crontabs with scheduling being an issue, this required carefully planning timing due to resource constraints, usage patterns, and especially custom code needed for retry logic.  In the last case, having to verify success of previous jobs and/or steps prior to running the next.  Furthermore, time to results is important, but we were increasingly relying on buffers for processing, where things were effectively sitting idle and not processing, [...]
-
-##### How did Apache Airflow help to solve this problem?
-Relying on community built and existing hooks and operators to the majority of cloud services we use has allowed us to focus on business outcomes.
-
-##### What are the results?
-Airflow helps us manage many of our pain-points, letting us benefit from the overall ecosystem and
-community.  We are able to reduce time-to-end delivery of data products by being event-driven in our
-processing flows (in our first usage, for example, we were able to take out over 2 hours - on average - of various
-waiting between stages).  Furthermore, we are able to arrive at and iterate on products quicker as a result of
-not needing as much custom or roll-our-own solutions.  For Our code base is smaller and simpler, it is easier to
-follow, and to a large extent our DAGs serve as sufficient documentation for new contributors to understand
-what is going on.
diff --git a/landing-pages/site/content/en/case-studies/example-case8.md b/landing-pages/site/content/en/case-studies/example-case8.md
deleted file mode 100644
index 5b23c08..0000000
--- a/landing-pages/site/content/en/case-studies/example-case8.md
+++ /dev/null
@@ -1,23 +0,0 @@
----
-title: "Example 8"
-linkTitle: "Example 8"
-quote:
-    text: "A great ecosystem and community that comes together to address about any batch data pipeline need."
-    author: "Austin Benett, CTO at Spotify"
-logo_path: "icons/dish-logo.svg"
----
-
-##### What was the problem?
-We faced increasing complexity managing lengthy crontabs with scheduling being an issue, this required carefully planning timing due to resource constraints, usage patterns, and especially custom code needed for retry logic.  In the last case, having to verify success of previous jobs and/or steps prior to running the next.  Furthermore, time to results is important, but we were increasingly relying on buffers for processing, where things were effectively sitting idle and not processing, [...]
-
-##### How did Apache Airflow help to solve this problem?
-Relying on community built and existing hooks and operators to the majority of cloud services we use has allowed us to focus on business outcomes.
-
-##### What are the results?
-Airflow helps us manage many of our pain-points, letting us benefit from the overall ecosystem and
-community.  We are able to reduce time-to-end delivery of data products by being event-driven in our
-processing flows (in our first usage, for example, we were able to take out over 2 hours - on average - of various
-waiting between stages).  Furthermore, we are able to arrive at and iterate on products quicker as a result of
-not needing as much custom or roll-our-own solutions.  For Our code base is smaller and simpler, it is easier to
-follow, and to a large extent our DAGs serve as sufficient documentation for new contributors to understand
-what is going on.
diff --git a/landing-pages/site/content/en/case-studies/example-case9.md b/landing-pages/site/content/en/case-studies/example-case9.md
deleted file mode 100644
index 6ea82d7..0000000
--- a/landing-pages/site/content/en/case-studies/example-case9.md
+++ /dev/null
@@ -1,23 +0,0 @@
----
-title: "Example 9"
-linkTitle: "Example 9"
-quote:
-    text: "A great ecosystem and community that comes together to address about any batch data pipeline need."
-    author: "Austin Benett, CTO at Spotify"
-logo_path: "icons/dish-logo.svg"
----
-
-##### What was the problem?
-We faced increasing complexity managing lengthy crontabs with scheduling being an issue, this required carefully planning timing due to resource constraints, usage patterns, and especially custom code needed for retry logic.  In the last case, having to verify success of previous jobs and/or steps prior to running the next.  Furthermore, time to results is important, but we were increasingly relying on buffers for processing, where things were effectively sitting idle and not processing, [...]
-
-##### How did Apache Airflow help to solve this problem?
-Relying on community built and existing hooks and operators to the majority of cloud services we use has allowed us to focus on business outcomes.
-
-##### What are the results?
-Airflow helps us manage many of our pain-points, letting us benefit from the overall ecosystem and
-community.  We are able to reduce time-to-end delivery of data products by being event-driven in our
-processing flows (in our first usage, for example, we were able to take out over 2 hours - on average - of various
-waiting between stages).  Furthermore, we are able to arrive at and iterate on products quicker as a result of
-not needing as much custom or roll-our-own solutions.  For Our code base is smaller and simpler, it is easier to
-follow, and to a large extent our DAGs serve as sufficient documentation for new contributors to understand
-what is going on.
diff --git a/landing-pages/site/content/en/use-cases/_index.html b/landing-pages/site/content/en/use-cases/_index.html
new file mode 100644
index 0000000..733b668
--- /dev/null
+++ b/landing-pages/site/content/en/use-cases/_index.html
@@ -0,0 +1,7 @@
+---
+title: "Use cases"
+linkTitle: "Use cases"
+menu:
+    main:
+        weight: 20
+---
diff --git a/landing-pages/site/content/en/use-cases/adobe.md b/landing-pages/site/content/en/use-cases/adobe.md
new file mode 100644
index 0000000..5d3ce43
--- /dev/null
+++ b/landing-pages/site/content/en/use-cases/adobe.md
@@ -0,0 +1,17 @@
+---
+title: "Adobe"
+linkTitle: "Adobe"
+quote:
+    text: "Apache Aiflow is highly extensible and its plugin interface can be used to meet a variety of use cases. It supports variety of deployment models and has a very active community to scale innovation."
+    author: "Raman Gupta"
+logo: "adobe-logo.svg"
+---
+
+##### What was the problem?
+Modern big data platforms need sophisticated data pipelines connecting to many backend services enabling complex workflows. These workflows need to be deployed, monitored, and run either on regular schedules or triggered by external events. Adobe Experience Platform component services architected and built an orchestration service to enable their users to author, schedule, and monitor complex hierarchical (including sequential and parallel) workflows for Apache Spark (TM) and non-Spark jobs.
+
+##### How did Apache Airflow help to solve this problem?
+Adobe Experience Platform built an orchestration service to meet our user and customer requirements. It is architected based on guiding principles to leverage an off-the-shelf, open-source orchestration engine that is abstracted to other services through an API and extendible to any application through a pluggable framework. Adobe Experience Platform orchestration service leverages Apache Airflow execution engine for scheduling and executing various workflows. Apache Airflow is highly ex [...]
+
+##### What are the results?
+Adobe Experience Platform is using Apache Airflow's plugin interface to write custom operators to meet our use cases. With K8s Executor, we could scale it to run 1000(s) of concurrent workflows. Adobe and Adobe Experience Platform teams can focus on business use cases because all scheduling, dependency management, and retrying logic is offloaded to Apache Airflow.
diff --git a/landing-pages/site/content/en/use-cases/big-fish-games.md b/landing-pages/site/content/en/use-cases/big-fish-games.md
new file mode 100644
index 0000000..1d551b2
--- /dev/null
+++ b/landing-pages/site/content/en/use-cases/big-fish-games.md
@@ -0,0 +1,17 @@
+---
+title: "Big Fish Games"
+linkTitle: "Big Fish Games"
+quote:
+    text: "Apache Airflow is a great open-source workflow orchestration tool supported by an active community. It provides all the features needed for scheduling workflows out-of-the-box. Additionally, DAGs can be easily programmed in Python. Backfilling historical data and retrying failed jobs based on configuration helps mitigate any upstream issues and better handles the late arrival of data."
+    author: "Suganya Varadarajan"
+logo: "big-fish-games-logo.svg"
+---
+
+##### What was the problem?
+The main challenge is the lack of standardized  ETL workflow orchestration tools. PowerShell and Python-based ETL frameworks built in-house are currently used for scheduling and running analytical workloads. However, there is no web UI through which we can monitor these workflows and it requires additional effort to maintain this framework. These scheduled jobs based on external dependencies are not well suited to modern Big Data platforms and their complex workflows. Although we experim [...]
+
+##### How did Apache Airflow help to solve this problem?
+Apache Airflow helps us programmatically control our workflows in Python by setting task dependencies and monitoring tasks within each DAG in a Web UI. Airflow allows us to view detailed logs for each task in these complex workflows. It has built-in connectors for Hive, MySQL, Google Cloud APIs and others. It also lends us flexibility to create our own custom connectors (i.e. for a Netezza database) using JDBCHook and JDBCOperator or extend the existing operator such as Hive Operator. Fo [...]
+
+##### What are the results?
+We seek to run concurrent tasks with DAGs and concurrent DAGs using Apache Airflow, in hopes of running our entire ETL workload faster. Airflow helps our analysts and developers focus on the analyses, rather than labor over building an ETL framework to schedule and monitor our applications. Airflow facilitates a seamless ETL migration to the Google Cloud Platform (GCP), as GCP maintains Cloud Composer, an Apache Airflow managed service.
diff --git a/landing-pages/site/content/en/use-cases/dish.md b/landing-pages/site/content/en/use-cases/dish.md
new file mode 100644
index 0000000..86b0c61
--- /dev/null
+++ b/landing-pages/site/content/en/use-cases/dish.md
@@ -0,0 +1,17 @@
+---
+title: "Dish"
+linkTitle: "Dish"
+quote:
+    text: "Airflow is Batteries-Included. A great ecosystem and community that comes together to address about any (batch) data pipeline needs."
+    author: "Austin Benett"
+logo: "dish-logo.svg"
+---
+
+##### What was the problem?
+We faced increasing complexity managing lengthy crontabs with scheduling being an issue, this required carefully planning timing due to resource constraints, usage patterns, and especially custom code needed for retry logic.  In the last case, having to verify success of previous jobs and/or steps prior to running the next.  Furthermore, time to results is important, but we were increasingly relying on buffers for processing, where things were effectively sitting idle and not processing, [...]
+
+##### How did Apache Airflow help to solve this problem?
+Relying on community built and existing hooks and operators to the majority of cloud services we use has allowed us to focus on business outcomes rather than operations.
+
+##### What are the results?
+Airflow helps us manage many of our pain-points, letting us benefit from the overall ecosystem and community.  We are able to reduce time-to-end delivery of data products by being event-driven in our processing flows (in our first usage, for example, we were able to take out over 2 hours - on average - of various waiting between stages).  Furthermore, we are able to arrive at and iterate on products quicker as a result of not needing as much custom or roll-our-own solutions.  For Our cod [...]
diff --git a/landing-pages/site/content/en/use-cases/experity.md b/landing-pages/site/content/en/use-cases/experity.md
new file mode 100644
index 0000000..7371ebd
--- /dev/null
+++ b/landing-pages/site/content/en/use-cases/experity.md
@@ -0,0 +1,17 @@
+---
+title: "Experity"
+linkTitle: "Experity"
+quote:
+    text: "Airflow can be an enterprise scheduling tool if used properly. Its ability to run \"any command, on any node\" is amazing. Handling complex, mixed-mode tasks was easy and scaling out with celery workers is huge. The open source community is great and we can help diagnose and debug our own problems as well as contribute those back to the greater good."
+    author: "Luke Bodeen"
+logo: "experity-logo.jpg"
+---
+
+##### What was the problem?
+We had to deploy our complex, flagship app to multiple nodes in multiple ways. This required tasks to communicate across Windows nodes and coordinate timing perfectly. We did not want to buy an expensive enterprise scheduling tool and needed ultimate flexibility.
+
+##### How did Apache Airflow help to solve this problem?
+Ultimately we decided flexible, multi-node, DAG capable tooling was key and airflow was one of the few tools that fit that bill. Having it based on open source and python were large factors that upheld our core principles. At the time, Airflow was missing a windows hook and operator so we contributed the WinRM hook and operator back to the community. Given its flexibilty we also use DAG generators to have our metadata drive our DAGs and keep maintenance costs down.
+
+##### What are the results?
+We have a very flexible deployment framework that allows us to be as nimble as possible. The reliability is something we have grown to trust as long as we use the tool correctly. The scalability has also allowed us to decrease the time it takes to operate on our fleet of servers.
diff --git a/landing-pages/site/content/en/use-cases/onefootball.md b/landing-pages/site/content/en/use-cases/onefootball.md
new file mode 100644
index 0000000..fa5f7de
--- /dev/null
+++ b/landing-pages/site/content/en/use-cases/onefootball.md
@@ -0,0 +1,21 @@
+---
+title: "Onefootball"
+linkTitle: "Onefootball"
+quote:
+    text: "Airflow is extensible enough for any business to define the custom operators they need. Airflow can help you in your DataOps journey: viewing analytics as code, monitoring, reusing components, being a catalyst of team interactions."
+    author: "Louis Guitton"
+logo: "onefootball-logo.svg"
+---
+
+##### What was the problem?
+With millions of daily active users, managing the complexity of data engineering at Onefootball is a constant challenge. Lengthy crontabs, multiplication of custom API clients, erosion of confidence in the analytics served, increasing heroism ("only one person can solve this issue"). Those are the challenges that most teams face unless they consciously invest in their tools and processes.
+
+On top of that, new data tools appear each month: third party data sources, cloud providers solutions, different storage technologies... Managing all those integrations is costly and brittle, especially for small data engineering teams that are trying to do more with less.
+
+##### How did Apache Airflow help to solve this problem?
+Airflow had been on our radar for a while until one day we took the leap. We used the DAG paradigm to migrate the pipelines running on crontabs. We benefited from the community Hooks and Operators to remove parts of our code, or to refactor the API clients specific to our business. We use the alerts, SLAs and the web UI to regain confidence in our analytics. We use our airflow internal PRs as catalysts for team discussion and to challenge our technical designs.
+
+We have DAGs orchestrating SQL transformations in our data warehouse, but also DAGs that are orchestrating functions ran against our Kubernetes cluster both for training Machine Learning models and sending daily analytics emails.
+
+##### What are the results?
+The learning curve was steep but in about 100 days we were able to efficiently use Airflow to manage the complexity of our data engineering. We currently have 17 DAGs (adding on average 1 per week), we have 2 contributions on apache/airflow, we have 7 internal hooks and operators and are planning to add more as our migration efforts continue.
diff --git a/landing-pages/site/layouts/partials/boxes/case-study.html b/landing-pages/site/layouts/partials/boxes/case-study.html
index 413d916..f5e73e3 100644
--- a/landing-pages/site/layouts/partials/boxes/case-study.html
+++ b/landing-pages/site/layouts/partials/boxes/case-study.html
@@ -17,15 +17,16 @@
  under the License.
 */}}
 
+{{ $title := .title }}
 <div class="card">
     <div class="box-event box-event__case-study hoverable-icon">
         <div class="box-event__case-study--logo">
-            {{ with resources.Get .logo_path }}
-                {{ .Content | safeHTML }}
+            {{ with .logo }}
+                <img src="/icons/{{ . }}" alt="{{ $title }} logo" />
             {{ end }}
         </div>
         <p class="box-event__case-study--quote"
-           >{{ .quote.text }}</p>
+           >{{ .quote.text | truncate 120 }}</p>
         {{ partial "buttons/button-hollow" (dict "text" "Learn more")}}
     </div>
 </div>
diff --git a/landing-pages/site/layouts/partials/quote.html b/landing-pages/site/layouts/partials/quote.html
index 64ec9d3..26529b8 100644
--- a/landing-pages/site/layouts/partials/quote.html
+++ b/landing-pages/site/layouts/partials/quote.html
@@ -17,12 +17,11 @@
  under the License.
 */}}
 
+{{ $title := .title }}
 <div class="quote">
     <p class="quote--text">{{ .quote.text }}</p>
     <p class="quote--author">{{ .quote.author }}</p>
-    {{ with resources.Get .logo_path }}
-        <div class="quote--logo">
-            {{ .Content | safeHTML }}
-        </div>
+    {{ with .logo }}
+        <img src="/icons/{{ . }}" alt="{{ $title }} logo" class="quote--logo" />
     {{ end }}
 </div>
diff --git a/landing-pages/site/layouts/case-studies/baseof.html b/landing-pages/site/layouts/use-cases/baseof.html
similarity index 100%
rename from landing-pages/site/layouts/case-studies/baseof.html
rename to landing-pages/site/layouts/use-cases/baseof.html
diff --git a/landing-pages/site/layouts/case-studies/content.html b/landing-pages/site/layouts/use-cases/content.html
similarity index 100%
rename from landing-pages/site/layouts/case-studies/content.html
rename to landing-pages/site/layouts/use-cases/content.html
diff --git a/landing-pages/site/layouts/case-studies/list.html b/landing-pages/site/layouts/use-cases/list.html
similarity index 96%
rename from landing-pages/site/layouts/case-studies/list.html
rename to landing-pages/site/layouts/use-cases/list.html
index 2c3c597..61881a3 100644
--- a/landing-pages/site/layouts/case-studies/list.html
+++ b/landing-pages/site/layouts/use-cases/list.html
@@ -19,7 +19,7 @@
 
 {{ define "main" }}
     <div>
-        <h2 class="page-header">Case studies</h2>
+        <h2 class="page-header">Use cases</h2>
         <h5 class="page-subtitle">
             Find out how Apache Airflow helped businesses reach their goals
         </h5>
diff --git a/landing-pages/site/layouts/case-studies/single.html b/landing-pages/site/layouts/use-cases/single.html
similarity index 100%
rename from landing-pages/site/layouts/case-studies/single.html
rename to landing-pages/site/layouts/use-cases/single.html
diff --git a/landing-pages/site/static/icons/adobe-logo.svg b/landing-pages/site/static/icons/adobe-logo.svg
new file mode 100644
index 0000000..292815a
--- /dev/null
+++ b/landing-pages/site/static/icons/adobe-logo.svg
@@ -0,0 +1,5 @@
+<svg xmlns="http://www.w3.org/2000/svg" width="183.122" height="48.105">
+    <path fill="#FFF" d="M3.701 4.24h45.573v40.331H3.701z"/>
+    <path fill="#E20025" d="M32.428 4.24l16.846 40.331V4.24H32.428zm-28.727 0v40.331L20.56 4.24H3.701zm15.427 32.219h7.855l3.21 8.112h7.034L26.495 19.103l-7.367 17.356z"/>
+    <path d="M73.332 4.134l-9.12 40.438h5.52l2.4-11.159h8.4l2.34 11.159h5.58l-8.58-40.438h-6.54zm-.54 24.778l1.92-9.659c.54-2.58 1.14-6.3 1.56-9.239h.24c.42 2.88.96 6.479 1.5 9.239l1.86 9.659h-7.08zm36.729-25.858h-5.64v15.179h-.12c-1.02-2.22-2.88-3.3-5.22-3.3-4.619 0-9 4.858-9 15.179 0 9.06 3.3 14.939 8.7 14.939 2.938 0 5.159-1.98 6.119-4.021h.181l.359 3.54h4.859c-.12-2.04-.24-5.279-.24-7.499l.002-34.017zm-5.64 31.259c0 .659-.061 1.38-.18 1.858-.96 3.479-2.641 4.141-3.9 4.141-3 0-4.56-4. [...]
+</svg>
diff --git a/landing-pages/site/static/icons/big-fish-games-logo.svg b/landing-pages/site/static/icons/big-fish-games-logo.svg
new file mode 100644
index 0000000..cabda75
--- /dev/null
+++ b/landing-pages/site/static/icons/big-fish-games-logo.svg
@@ -0,0 +1,3 @@
+<svg xmlns="http://www.w3.org/2000/svg" width="423.286" height="331.153" viewBox="0 0 111.994 87.618">
+    <path fill="#00548e" d="M109.72 25.233H94.674c-.98 0-1.774.794-1.774 1.775v7.412c-4.858 0-9.303-3.803-8.767-10.122.402-4.716 4.202-8.015 8.887-8.015 2.832 0 5.185 1.016 7.299 2.738.73.592 1.813.434 2.363-.332l6.7-9.33a1.77 1.77 0 00-.283-2.386C105.261 3.742 98.858.5 91.284.5 78.697.5 68.802 7.7 65.785 17.955l-2.974-8.893a11.429 11.429 0 00-10.84-7.807h-8.7c1.602 3.926 1.584 5.983 2.515 12.718.837 6.018 3.888 11.236 3.888 11.236s-3.051 5.217-3.888 11.236c-.931 6.73-.913 8.79-2.515 12. [...]
+</svg>
diff --git a/landing-pages/site/assets/icons/dish-logo.svg b/landing-pages/site/static/icons/dish-logo.svg
similarity index 100%
rename from landing-pages/site/assets/icons/dish-logo.svg
rename to landing-pages/site/static/icons/dish-logo.svg
diff --git a/landing-pages/site/static/icons/experity-logo.jpg b/landing-pages/site/static/icons/experity-logo.jpg
new file mode 100644
index 0000000..24adc4b
Binary files /dev/null and b/landing-pages/site/static/icons/experity-logo.jpg differ
diff --git a/landing-pages/site/static/icons/onefootball-logo.svg b/landing-pages/site/static/icons/onefootball-logo.svg
new file mode 100644
index 0000000..6a45ace
--- /dev/null
+++ b/landing-pages/site/static/icons/onefootball-logo.svg
@@ -0,0 +1,3 @@
+<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 353.2 68">
+    <path fill="#32B846" d="M38.9 17.2c0 2-1.6 3.6-3.6 3.6s-3.6-1.6-3.6-3.6 1.6-3.6 3.6-3.6 3.6 1.7 3.6 3.6zm-9 38.6c0 2-1.6 3.6-3.6 3.6s-3.6-1.6-3.6-3.6 1.6-3.6 3.6-3.6 3.6 1.6 3.6 3.6zM41.6 38c-.2.2-.7.2-.9 0L39 36.4c-.3-.2-.3-.7 0-.9l3.6-3.6-3.8-3.7-9.2 9.1c-.2.2-.7.2-.9 0l-5.4-5.4c-.2-.2-.2-.7 0-.9l4.8-4.8H12.3c-.5 0-.7-.6-.4-.9l2-2c.3-.2.9-.7 1.5-.7h22.3c.2 0 .7.1 1 .4l8.5 8.5c.2.2.2.7 0 .9L41.6 38zm-20.3-5c.2-.2.7-.2.9 0l1.6 1.6c.2.2.2.7 0 .9L14.3 45c-.2.2-.7.2-.9 0l-1.6-1.6c-.2-.2 [...]
+</svg>