Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id A19D1200B9A for ; Thu, 22 Sep 2016 16:16:19 +0200 (CEST) Received: by cust-asf.ponee.io (Postfix) id A04DB160AD0; Thu, 22 Sep 2016 14:16:19 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id E4FD6160AAD for ; Thu, 22 Sep 2016 16:16:18 +0200 (CEST) Received: (qmail 54975 invoked by uid 500); 22 Sep 2016 14:16:18 -0000 Mailing-List: contact commits-help@allura.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@allura.apache.org Delivered-To: mailing list commits@allura.apache.org Received: (qmail 54949 invoked by uid 99); 22 Sep 2016 14:16:18 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 22 Sep 2016 14:16:18 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id E83D3E04A2; Thu, 22 Sep 2016 14:16:17 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: brondsem@apache.org To: commits@allura.apache.org Date: Thu, 22 Sep 2016 14:16:18 -0000 Message-Id: <1ba58127cc40486da9927428d6efba04@git.apache.org> In-Reply-To: <1c77d3aafb4c44958b861ff2f2165ba0@git.apache.org> References: <1c77d3aafb4c44958b861ff2f2165ba0@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [2/3] allura git commit: Split up pylint test into chunks that can be run with nose multiprocess; move pyflakes chunks into parallelized pattern archived-at: Thu, 22 Sep 2016 14:16:19 -0000 Split up pylint test into chunks that can be run with nose multiprocess; move pyflakes chunks into parallelized pattern Project: http://git-wip-us.apache.org/repos/asf/allura/repo Commit: http://git-wip-us.apache.org/repos/asf/allura/commit/46dc9f9b Tree: http://git-wip-us.apache.org/repos/asf/allura/tree/46dc9f9b Diff: http://git-wip-us.apache.org/repos/asf/allura/diff/46dc9f9b Branch: refs/heads/db/parallel_test_improvements Commit: 46dc9f9bc0a026e0b555add0dbd5e90cea4a7462 Parents: 1969390 Author: Dave Brondsema Authored: Wed Sep 21 14:32:35 2016 -0400 Committer: Dave Brondsema Committed: Wed Sep 21 14:32:35 2016 -0400 ---------------------------------------------------------------------- AlluraTest/alluratest/test_syntax.py | 89 +++++++++++++++++-------------- 1 file changed, 50 insertions(+), 39 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/allura/blob/46dc9f9b/AlluraTest/alluratest/test_syntax.py ---------------------------------------------------------------------- diff --git a/AlluraTest/alluratest/test_syntax.py b/AlluraTest/alluratest/test_syntax.py index d29b00b..1db63c9 100644 --- a/AlluraTest/alluratest/test_syntax.py +++ b/AlluraTest/alluratest/test_syntax.py @@ -18,6 +18,7 @@ import os.path from subprocess import Popen, PIPE import sys +from itertools import izip_longest toplevel_dir = os.path.abspath(os.path.dirname(__file__) + "/../..") @@ -30,52 +31,17 @@ def run(cmd): sys.stderr.write(stderr) return proc.returncode -find_py = "find Allura Forge* -name '*.py'" -# a recepe from itertools doc -from itertools import izip_longest +find_py = "find Allura Forge* -name '*.py'" +# a recipe from itertools doc def grouper(n, iterable, fillvalue=None): "grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx" args = [iter(iterable)] * n return izip_longest(fillvalue=fillvalue, *args) -def test_pyflakes(): - # skip some that aren't critical errors - skips = [ - 'imported but unused', - 'redefinition of unused', - 'assigned to but never used', - '__version__', - ] - proc = Popen(find_py, shell=True, cwd=toplevel_dir, - stdout=PIPE, stderr=PIPE) - (find_stdout, stderr) = proc.communicate() - sys.stderr.write(stderr) - assert proc.returncode == 0, proc.returncode - - # run pyflakes in batches, so it doesn't take tons of memory - error = False - all_files = [f for f in find_stdout.split('\n') - if '/migrations/' not in f and f.strip()] - for files in grouper(20, all_files, fillvalue=''): - cmd = "pyflakes " + \ - ' '.join(files) + " | grep -v '" + \ - "' | grep -v '".join(skips) + "'" - # print 'Command was: %s' % cmd - retval = run(cmd) - if retval != 1: - print - # print 'Command was: %s' % cmd - print 'Returned %s' % retval - error = True - - if error: - raise Exception('pyflakes failure, see stdout') - - def test_no_local_tz_functions(): if run(find_py + " | xargs grep '\.now(' ") not in [1, 123]: raise Exception("These should use .utcnow()") @@ -104,6 +70,51 @@ def test_no_tabs(): if run(find_py + " | xargs grep ' ' ") not in [1, 123]: raise Exception('These should not use tab chars') -def test_linters(): - if run(find_py + ' | xargs pylint -E --disable=all --enable=exposed-api-needs-kwargs --load-plugins alluratest.pylint_checkers') != 0: + +def run_linter(files): + if run('pylint -E --disable=all --enable=exposed-api-needs-kwargs --load-plugins alluratest.pylint_checkers {}'.format(' '.join(files))) != 0: raise Exception('Custom Allura pylint errors found.') + + +def run_pyflakes(files): + # skip some that aren't critical errors + skips = [ + 'imported but unused', + 'redefinition of unused', + 'assigned to but never used', + '__version__', + ] + files = [f for f in files if '/migrations/' not in f] + cmd = "pyflakes " + ' '.join(files) + " | grep -v '" + "' | grep -v '".join(skips) + "'" + if run(cmd) != 1: + # print 'Command was: %s' % cmd + raise Exception('pyflakes failure, see stdout') + + +class TestLinters(object): + # this will get populated dynamically with test methods, see below + pass + + +# Dynamically generate many test methods, to run pylint & pyflakes commands in separate batches +# Can't use http://nose.readthedocs.io/en/latest/writing_tests.html#test-generators because nose doesn't run +# those in parallel +def create_many_lint_methods(): + proc = Popen(find_py, shell=True, cwd=toplevel_dir, stdout=PIPE, stderr=PIPE) + (find_stdout, stderr) = proc.communicate() + sys.stderr.write(stderr) + assert proc.returncode == 0, proc.returncode + py_files = find_stdout.split('\n') + + for i, files in enumerate(grouper(40, py_files)): + files = filter(None, files) + + lint_test_method = lambda self, these_files=files: run_linter(these_files) + lint_test_method.__name__ = 'test_pylint_{}'.format(i) + setattr(TestLinters, 'test_pylint_{}'.format(i), lint_test_method) + + pyflake_test_method = lambda self, these_files=files: run_pyflakes(these_files) + pyflake_test_method.__name__ = 'test_pyflakes_{}'.format(i) + setattr(TestLinters, 'test_pyflakes_{}'.format(i), pyflake_test_method) + +create_many_lint_methods()