You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@beam.apache.org by ro...@apache.org on 2016/07/20 20:06:48 UTC

[2/5] incubator-beam git commit: Reduce the number of elements in the pvalue caching test.

Reduce the number of elements in the pvalue caching test.

It seems this test is causing travis-ci to time out,
as the non-compiled version got slightly slower.

100,000 elements should be sufficient to see the effects
of not caching.


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/27689819
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/27689819
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/27689819

Branch: refs/heads/python-sdk
Commit: 276898195819329a1b88002a41a109df61530a26
Parents: 8efc231
Author: Robert Bradshaw <ro...@google.com>
Authored: Wed Jul 20 01:37:49 2016 -0700
Committer: Robert Bradshaw <ro...@google.com>
Committed: Wed Jul 20 13:06:21 2016 -0700

----------------------------------------------------------------------
 sdks/python/apache_beam/pipeline_test.py | 18 ++++++++++--------
 1 file changed, 10 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/27689819/sdks/python/apache_beam/pipeline_test.py
----------------------------------------------------------------------
diff --git a/sdks/python/apache_beam/pipeline_test.py b/sdks/python/apache_beam/pipeline_test.py
index c1db5cb..86ae45f 100644
--- a/sdks/python/apache_beam/pipeline_test.py
+++ b/sdks/python/apache_beam/pipeline_test.py
@@ -207,11 +207,13 @@ class PipelineTest(unittest.TestCase):
       yield o
       yield SideOutputValue('side', o)
 
+    num_elements = 100000
+
     pipeline = Pipeline('DirectPipelineRunner')
 
     gc.collect()
     count_threshold = len(gc.get_objects()) + 10000
-    biglist = pipeline | Create('oom:create', ['x'] * 1000000)
+    biglist = pipeline | Create('oom:create', ['x'] * num_elements)
     dupes = (
         biglist
         | Map('oom:addone', lambda x: (x, 1))
@@ -223,17 +225,17 @@ class PipelineTest(unittest.TestCase):
         | CombinePerKey('oom:combine', sum)
         | Map('oom:check', check_memory, count_threshold))
 
-    assert_that(result, equal_to([('x', 3000000)]))
+    assert_that(result, equal_to([('x', 3 * num_elements)]))
     pipeline.run()
     self.assertEqual(
         pipeline.runner.debug_counters['element_counts'],
         {
-            'oom:flatten': 3000000,
-            ('oom:combine/GroupByKey/reify_windows', None): 3000000,
-            ('oom:dupes/oom:dupes', 'side'): 1000000,
-            ('oom:dupes/oom:dupes', None): 1000000,
-            'oom:create': 1000000,
-            ('oom:addone', None): 1000000,
+            'oom:flatten': 3 * num_elements,
+            ('oom:combine/GroupByKey/reify_windows', None): 3 * num_elements,
+            ('oom:dupes/oom:dupes', 'side'): num_elements,
+            ('oom:dupes/oom:dupes', None): num_elements,
+            'oom:create': num_elements,
+            ('oom:addone', None): num_elements,
             'oom:combine/GroupByKey/group_by_key': 1,
             ('oom:check', None): 1,
             'assert_that/singleton': 1,