[SPARK-25238][PYTHON] lint-python: Upgrade pycodestyle to v2.4.0
authorcclauss <cclauss@bluewin.ch>
Sat, 15 Sep 2018 01:13:07 +0000 (20:13 -0500)
committerSean Owen <sean.owen@databricks.com>
Sat, 15 Sep 2018 01:13:07 +0000 (20:13 -0500)
See https://pycodestyle.readthedocs.io/en/latest/developer.html#changes for changes made in this release.

## What changes were proposed in this pull request?

Upgrade pycodestyle to v2.4.0

## How was this patch tested?

__pycodestyle__

Please review http://spark.apache.org/contributing.html before opening a pull request.

Closes #22231 from cclauss/patch-1.

Authored-by: cclauss <cclauss@bluewin.ch>
Signed-off-by: Sean Owen <sean.owen@databricks.com>
dev/lint-python
dev/run-tests-jenkins.py
dev/tox.ini
python/pyspark/sql/functions.py

index a98a251..e26bd4b 100755 (executable)
@@ -36,7 +36,7 @@ compile_status="${PIPESTATUS[0]}"
 # Get pycodestyle at runtime so that we don't rely on it being installed on the build server.
 # See: https://github.com/apache/spark/pull/1744#issuecomment-50982162
 # Updated to the latest official version of pep8. pep8 is formally renamed to pycodestyle.
-PYCODESTYLE_VERSION="2.3.1"
+PYCODESTYLE_VERSION="2.4.0"
 PYCODESTYLE_SCRIPT_PATH="$SPARK_ROOT_DIR/dev/pycodestyle-$PYCODESTYLE_VERSION.py"
 PYCODESTYLE_SCRIPT_REMOTE_PATH="https://raw.githubusercontent.com/PyCQA/pycodestyle/$PYCODESTYLE_VERSION/pycodestyle.py"
 
index 6e94389..eca88f2 100755 (executable)
@@ -116,7 +116,7 @@ def run_tests(tests_timeout):
 
     failure_note_by_errcode = {
         # error to denote run-tests script failures:
-        1: 'executing the `dev/run-tests` script',  # noqa: W605
+        1: 'executing the `dev/run-tests` script',
         ERROR_CODES["BLOCK_GENERAL"]: 'some tests',
         ERROR_CODES["BLOCK_RAT"]: 'RAT tests',
         ERROR_CODES["BLOCK_SCALA_STYLE"]: 'Scala style tests',
@@ -131,7 +131,7 @@ def run_tests(tests_timeout):
         ERROR_CODES["BLOCK_PYSPARK_UNIT_TESTS"]: 'PySpark unit tests',
         ERROR_CODES["BLOCK_PYSPARK_PIP_TESTS"]: 'PySpark pip packaging tests',
         ERROR_CODES["BLOCK_SPARKR_UNIT_TESTS"]: 'SparkR unit tests',
-        ERROR_CODES["BLOCK_TIMEOUT"]: 'from timeout after a configured wait of \`%s\`' % (
+        ERROR_CODES["BLOCK_TIMEOUT"]: 'from timeout after a configured wait of `%s`' % (
             tests_timeout)
     }
 
index 28dad8f..6ec223b 100644 (file)
@@ -14,6 +14,6 @@
 # limitations under the License.
 
 [pycodestyle]
-ignore=E402,E731,E241,W503,E226,E722,E741,E305
+ignore=E226,E241,E305,E402,E722,E731,E741,W503,W504
 max-line-length=100
 exclude=cloudpickle.py,heapq3.py,shared.py,python/docs/conf.py,work/*/*.py,python/.eggs/*,dist/*
index e288ec8..6da5237 100644 (file)
@@ -1711,7 +1711,7 @@ def regexp_extract(str, pattern, idx):
 @ignore_unicode_prefix
 @since(1.5)
 def regexp_replace(str, pattern, replacement):
-    """Replace all substrings of the specified string value that match regexp with rep.
+    r"""Replace all substrings of the specified string value that match regexp with rep.
 
     >>> df = spark.createDataFrame([('100-200',)], ['str'])
     >>> df.select(regexp_replace('str', r'(\d+)', '--').alias('d')).collect()