You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@superset.apache.org by jo...@apache.org on 2018/05/04 01:14:42 UTC

[incubator-superset] branch master updated: [druid] Updating Druid refresh metadata tests (#4887)

This is an automated email from the ASF dual-hosted git repository.

johnbodley pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-superset.git


The following commit(s) were added to refs/heads/master by this push:
     new 440fb77  [druid] Updating Druid refresh metadata tests (#4887)
440fb77 is described below

commit 440fb77df5b7d53624cf2762a6fa100c69cfb3e7
Author: John Bodley <45...@users.noreply.github.com>
AuthorDate: Thu May 3 18:14:40 2018 -0700

    [druid] Updating Druid refresh metadata tests (#4887)
---
 tests/druid_tests.py | 114 +++++++++++++++++++++++++++------------------------
 1 file changed, 61 insertions(+), 53 deletions(-)

diff --git a/tests/druid_tests.py b/tests/druid_tests.py
index 6947a6c..0759afb 100644
--- a/tests/druid_tests.py
+++ b/tests/druid_tests.py
@@ -346,38 +346,38 @@ class DruidTests(SupersetTestCase):
         self.login(username='admin')
         cluster = self.get_cluster(PyDruid)
         cluster.refresh_datasources()
+        datasource = cluster.datasources[0]
 
-        for i, datasource in enumerate(cluster.datasources):
-            cols = (
-                db.session.query(DruidColumn)
-                .filter(DruidColumn.datasource_id == datasource.id)
+        cols = (
+            db.session.query(DruidColumn)
+            .filter(DruidColumn.datasource_id == datasource.id)
+        )
+
+        for col in cols:
+            self.assertIn(
+                col.column_name,
+                SEGMENT_METADATA[0]['columns'].keys(),
             )
 
-            for col in cols:
-                self.assertIn(
-                    col.column_name,
-                    SEGMENT_METADATA[i]['columns'].keys(),
-                )
+        metrics = (
+            db.session.query(DruidMetric)
+            .filter(DruidMetric.datasource_id == datasource.id)
+            .filter(DruidMetric.metric_name.like('%__metric1'))
+        )
 
-            metrics = (
-                db.session.query(DruidMetric)
-                .filter(DruidMetric.datasource_id == datasource.id)
-                .filter(DruidMetric.metric_name.like('%__metric1'))
-            )
+        self.assertEqual(
+            {metric.metric_name for metric in metrics},
+            {'max__metric1', 'min__metric1', 'sum__metric1'},
+        )
+
+        for metric in metrics:
+            agg, _ = metric.metric_name.split('__')
 
             self.assertEqual(
-                {metric.metric_name for metric in metrics},
-                {'max__metric1', 'min__metric1', 'sum__metric1'},
+                json.loads(metric.json)['type'],
+                'double{}'.format(agg.capitalize()),
             )
 
-            for metric in metrics:
-                agg, _ = metric.metric_name.split('__')
-
-                self.assertEqual(
-                    json.loads(metric.json)['type'],
-                    'double{}'.format(agg.capitalize()),
-                )
-
     @patch('superset.connectors.druid.models.PyDruid')
     def test_refresh_metadata_augment_type(self, PyDruid):
         self.login(username='admin')
@@ -389,52 +389,60 @@ class DruidTests(SupersetTestCase):
         instance = PyDruid.return_value
         instance.segment_metadata.return_value = metadata
         cluster.refresh_datasources()
+        datasource = cluster.datasources[0]
 
-        for i, datasource in enumerate(cluster.datasources):
-            metrics = (
-                db.session.query(DruidMetric)
-                .filter(DruidMetric.datasource_id == datasource.id)
-                .filter(DruidMetric.metric_name.like('%__metric1'))
-            )
+        column = (
+            db.session.query(DruidColumn)
+            .filter(DruidColumn.datasource_id == datasource.id)
+            .filter(DruidColumn.column_name == 'metric1')
+        ).one()
 
-            for metric in metrics:
-                agg, _ = metric.metric_name.split('__')
+        self.assertEqual(column.type, 'LONG')
 
-                self.assertEqual(
-                    metric.json_obj['type'],
-                    'long{}'.format(agg.capitalize()),
-                )
+        metrics = (
+            db.session.query(DruidMetric)
+            .filter(DruidMetric.datasource_id == datasource.id)
+            .filter(DruidMetric.metric_name.like('%__metric1'))
+        )
+
+        for metric in metrics:
+            agg, _ = metric.metric_name.split('__')
+
+            self.assertEqual(
+                metric.json_obj['type'],
+                'long{}'.format(agg.capitalize()),
+            )
 
     @patch('superset.connectors.druid.models.PyDruid')
     def test_refresh_metadata_augment_verbose_name(self, PyDruid):
         self.login(username='admin')
         cluster = self.get_cluster(PyDruid)
         cluster.refresh_datasources()
+        datasource = cluster.datasources[0]
 
-        for i, datasource in enumerate(cluster.datasources):
-            metrics = (
-                db.session.query(DruidMetric)
-                .filter(DruidMetric.datasource_id == datasource.id)
-                .filter(DruidMetric.metric_name.like('%__metric1'))
-            )
+        metrics = (
+            db.session.query(DruidMetric)
+            .filter(DruidMetric.datasource_id == datasource.id)
+            .filter(DruidMetric.metric_name.like('%__metric1'))
+        )
 
-            for metric in metrics:
-                metric.verbose_name = metric.metric_name
+        for metric in metrics:
+            metric.verbose_name = metric.metric_name
 
-            db.session.commit()
+        db.session.commit()
 
         # The verbose name should not change during a refresh.
         cluster.refresh_datasources()
+        datasource = cluster.datasources[0]
 
-        for i, datasource in enumerate(cluster.datasources):
-            metrics = (
-                db.session.query(DruidMetric)
-                .filter(DruidMetric.datasource_id == datasource.id)
-                .filter(DruidMetric.metric_name.like('%__metric1'))
-            )
+        metrics = (
+            db.session.query(DruidMetric)
+            .filter(DruidMetric.datasource_id == datasource.id)
+            .filter(DruidMetric.metric_name.like('%__metric1'))
+        )
 
-            for metric in metrics:
-                self.assertEqual(metric.verbose_name, metric.metric_name)
+        for metric in metrics:
+            self.assertEqual(metric.verbose_name, metric.metric_name)
 
     def test_urls(self):
         cluster = self.get_test_cluster_obj()

-- 
To stop receiving notification emails like this one, please contact
johnbodley@apache.org.