mirror of
https://github.com/tiny-pilot/tinypilot.git
synced 2023-10-01 22:58:29 +03:00
Initial commit
This commit is contained in:
24
.circleci/config.yml
Normal file
24
.circleci/config.yml
Normal file
@@ -0,0 +1,24 @@
|
||||
version: 2.1
|
||||
jobs:
|
||||
build:
|
||||
docker:
|
||||
- image: circleci/python:3.7.3
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Install requirements and run build script
|
||||
command: |
|
||||
mkdir -p ./venv
|
||||
virtualenv --python python3 ./venv
|
||||
. venv/bin/activate
|
||||
pip install --requirement requirements.txt
|
||||
pip install --requirement dev_requirements.txt
|
||||
./build
|
||||
- persist_to_workspace:
|
||||
root: ./
|
||||
paths:
|
||||
- .coverage
|
||||
workflows:
|
||||
test:
|
||||
jobs:
|
||||
- build
|
||||
10
.dependabot/config.yml
Normal file
10
.dependabot/config.yml
Normal file
@@ -0,0 +1,10 @@
|
||||
version: 1
|
||||
update_configs:
|
||||
- package_manager: "python"
|
||||
directory: "/"
|
||||
update_schedule: "daily"
|
||||
ignored_updates:
|
||||
- match:
|
||||
# We need to use a specific pylint version that's compatible with
|
||||
# DocStringChecker.
|
||||
dependency_name: "pylint"
|
||||
68
.gitignore
vendored
Normal file
68
.gitignore
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
env/
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*,cover
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# virtualenv
|
||||
venv/
|
||||
|
||||
# Vim
|
||||
*~
|
||||
*.sw?
|
||||
|
||||
# Mac OS
|
||||
*.DS_Store
|
||||
*.xcworkspace
|
||||
11
.pylintrc
Normal file
11
.pylintrc
Normal file
@@ -0,0 +1,11 @@
|
||||
[MASTER]
|
||||
|
||||
# Load the DocStringChecker plugin.
|
||||
load-plugins=lint
|
||||
|
||||
# We actually want to disable all standard pylint checkers, enable
|
||||
# DocStringChecker, then disable specific checks in DocStringChecker, but there
|
||||
# does not seem to be a way of doing that. Instead, we disable everything aside
|
||||
# from the whitelist of DocStringChecker checks that we want.
|
||||
disable=all
|
||||
enable=docstring-trailing-whitespace,docstring-leading-whitespace,docstring-cuddled-quotes,docstring-section-newline,docstring-section-name,docstring-section-order,docstring-first-line,docstring-missing-args,docstring-misnamed-args,docstring-arg-spacing,docstring-too-many-newlines,docstring-second-line-blank,docstring-section-indent
|
||||
1
COPYRIGHT
Normal file
1
COPYRIGHT
Normal file
@@ -0,0 +1 @@
|
||||
Copyright (c) 2020 Michael Lynch. All rights reserved.
|
||||
7
LICENSE
Normal file
7
LICENSE
Normal file
@@ -0,0 +1,7 @@
|
||||
Copyright 2019 Michael Lynch
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
42
README.md
Normal file
42
README.md
Normal file
@@ -0,0 +1,42 @@
|
||||
# Python3 Seed
|
||||
|
||||
[](https://circleci.com/gh/mtlynch/python3_seed)
|
||||
|
||||
## Overview
|
||||
|
||||
A boilerplate Python 3 project set up for unit tests and continuous integration.
|
||||
|
||||
Specifically:
|
||||
|
||||
* Enforces Python style rules with [YAPF](https://github.com/google/yapf)
|
||||
* Enforces style rules on docstrings using [DocStringChecker](https://chromium.googlesource.com/chromiumos/chromite/+/master/cli/cros/lint.py)
|
||||
* Perfoms static analysis with [pyflakes](https://github.com/megies/pyflakes)
|
||||
* Sorts imports with [isort](https://github.com/timothycrosley/isort)
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
mkdir -p ./venv
|
||||
virtualenv --python python3 ./venv
|
||||
. venv/bin/activate
|
||||
pip install --requirement requirements.txt
|
||||
pip install --requirement dev_requirements.txt
|
||||
hooks/enable_hooks
|
||||
```
|
||||
|
||||
## Customization
|
||||
|
||||
To customize this for your project:
|
||||
|
||||
1. Change `COPYRIGHT` to your name.
|
||||
1. Change `LICENSE` to [a license of your choosing](https://choosealicense.com/).
|
||||
1. Change the CircleCI badge in `README.md` to your own Circle CI project badge.
|
||||
1. Change the app name in `app/main.py` from `Python Seed` to your app's name.
|
||||
1. Rename `app/dummy.py` and `tests/test_dummy.py` to the module names of your choosing.
|
||||
1. Begin working.
|
||||
|
||||
## Run
|
||||
|
||||
```bash
|
||||
./app/main.py
|
||||
```
|
||||
0
__init__.py
Normal file
0
__init__.py
Normal file
5
app/__init__.py
Normal file
5
app/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
# For relative imports to work in Python 3.6
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.append(os.path.dirname(os.path.realpath(__file__)))
|
||||
9
app/dummy.py
Normal file
9
app/dummy.py
Normal file
@@ -0,0 +1,9 @@
|
||||
"""Dummy module.
|
||||
|
||||
Dummy module to exercise unit test code. Replace this with actual application
|
||||
logic.
|
||||
"""
|
||||
|
||||
|
||||
def dummy():
|
||||
return 'dummy'
|
||||
32
app/main.py
Executable file
32
app/main.py
Executable file
@@ -0,0 +1,32 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
|
||||
import dummy
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def configure_logging():
|
||||
root_logger = logging.getLogger()
|
||||
handler = logging.StreamHandler()
|
||||
formatter = logging.Formatter(
|
||||
'%(asctime)s %(name)-15s %(levelname)-4s %(message)s',
|
||||
'%Y-%m-%d %H:%M:%S')
|
||||
handler.setFormatter(formatter)
|
||||
root_logger.addHandler(handler)
|
||||
root_logger.setLevel(logging.INFO)
|
||||
|
||||
|
||||
def main(args):
|
||||
configure_logging()
|
||||
logger.info('Started runnning')
|
||||
print(dummy.dummy())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(
|
||||
prog='Python Seed',
|
||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||
main(parser.parse_args())
|
||||
57
build
Executable file
57
build
Executable file
@@ -0,0 +1,57 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Exit build script on first failure.
|
||||
set -e
|
||||
|
||||
# Echo commands to stdout.
|
||||
set -x
|
||||
|
||||
# Exit on unset variable.
|
||||
set -u
|
||||
|
||||
# Location of app source files.
|
||||
SOURCE_DIR=app
|
||||
|
||||
# Location of unit tests.
|
||||
TEST_DIR=tests
|
||||
|
||||
# Location of virtualenv.
|
||||
VIRTUALENV_DIR=venv
|
||||
|
||||
# Delete pyc files from previous builds.
|
||||
find . \
|
||||
-name "*.pyc" \
|
||||
-type f \
|
||||
-not -path "./${VIRTUALENV_DIR}/*" \
|
||||
-delete
|
||||
|
||||
# Run unit tests and calculate code coverage.
|
||||
coverage run \
|
||||
--source "$SOURCE_DIR" \
|
||||
-m unittest discover
|
||||
coverage report
|
||||
|
||||
# Check that source has correct formatting.
|
||||
yapf \
|
||||
--diff \
|
||||
--recursive \
|
||||
--style google \
|
||||
./ \
|
||||
--exclude=third_party/* \
|
||||
--exclude=venv/*
|
||||
|
||||
# Check correct sorting for imports.
|
||||
isort \
|
||||
--recursive \
|
||||
--force-single-line-imports \
|
||||
--diff \
|
||||
--check-only \
|
||||
--skip-glob=third_party/* \
|
||||
--skip-glob=venv/*
|
||||
|
||||
# Run static analysis for Python bugs/cruft.
|
||||
pyflakes "${SOURCE_DIR}/" "${TEST_DIR}/"
|
||||
|
||||
# Check docstrings for style consistency.
|
||||
PYTHONPATH="$(pwd)/third_party/docstringchecker" \
|
||||
pylint --reports=n --rcfile=.pylintrc "$SOURCE_DIR" "$TEST_DIR"
|
||||
8
dev_requirements.txt
Normal file
8
dev_requirements.txt
Normal file
@@ -0,0 +1,8 @@
|
||||
coverage==5.1
|
||||
isort[requirements]==4.3.21
|
||||
pyflakes==2.2.0
|
||||
# DocStringChecker currently only works with pylint 1.6.5. Upgrading causes it
|
||||
# to stop producing errors on bad docstrings.
|
||||
pylint==1.9.4
|
||||
pylint-quotes==0.2.1
|
||||
yapf==0.30.0
|
||||
3
hooks/enable_hooks
Executable file
3
hooks/enable_hooks
Executable file
@@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
# Run this from the repository root to enable all git hooks for this project.
|
||||
rm -rf .git/hooks/ && ln -s -f ../hooks .git/hooks
|
||||
1
hooks/pre-commit
Executable file
1
hooks/pre-commit
Executable file
@@ -0,0 +1 @@
|
||||
./build
|
||||
0
requirements.txt
Normal file
0
requirements.txt
Normal file
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
10
tests/test_dummy.py
Normal file
10
tests/test_dummy.py
Normal file
@@ -0,0 +1,10 @@
|
||||
import unittest
|
||||
|
||||
from app import dummy
|
||||
|
||||
|
||||
class DummyTest(unittest.TestCase):
|
||||
"""Replace this with a real unit test class."""
|
||||
|
||||
def test_dummy(self):
|
||||
self.assertEqual('dummy', dummy.dummy())
|
||||
27
third_party/docstringchecker/LICENSE
vendored
Normal file
27
third_party/docstringchecker/LICENSE
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
// Copyright (c) 2006-2009 The Chromium OS Authors. All rights reserved.
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
4
third_party/docstringchecker/README.md
vendored
Normal file
4
third_party/docstringchecker/README.md
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
DocStringChecker pylint plugin taken from the Chromium OS project.
|
||||
|
||||
To update to the latest version, run the `update.sh` script.
|
||||
|
||||
0
third_party/docstringchecker/chromite/__init__.py
vendored
Normal file
0
third_party/docstringchecker/chromite/__init__.py
vendored
Normal file
0
third_party/docstringchecker/chromite/utils/__init__.py
vendored
Normal file
0
third_party/docstringchecker/chromite/utils/__init__.py
vendored
Normal file
107
third_party/docstringchecker/chromite/utils/memoize.py
vendored
Normal file
107
third_party/docstringchecker/chromite/utils/memoize.py
vendored
Normal file
@@ -0,0 +1,107 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2018 The Chromium OS Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Functions for automatic caching of expensive function calls."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import functools
|
||||
import sys
|
||||
|
||||
import six
|
||||
|
||||
|
||||
def MemoizedSingleCall(functor):
|
||||
"""Decorator for simple functor targets, caching the results
|
||||
|
||||
The functor must accept no arguments beyond either a class or self (depending
|
||||
on if this is used in a classmethod/instancemethod context). Results of the
|
||||
wrapped method will be written to the class/instance namespace in a specially
|
||||
named cached value. All future invocations will just reuse that value.
|
||||
|
||||
Note that this cache is per-process, so sibling and parent processes won't
|
||||
notice updates to the cache.
|
||||
"""
|
||||
# TODO(build): Should we rebase to snakeoil.klass.cached* functionality?
|
||||
# pylint: disable=protected-access
|
||||
@functools.wraps(functor)
|
||||
def wrapper(obj):
|
||||
key = wrapper._cache_key
|
||||
val = getattr(obj, key, None)
|
||||
if val is None:
|
||||
val = functor(obj)
|
||||
setattr(obj, key, val)
|
||||
return val
|
||||
|
||||
# Use name mangling to store the cached value in a (hopefully) unique place.
|
||||
wrapper._cache_key = '_%s_cached' % (functor.__name__.lstrip('_'),)
|
||||
return wrapper
|
||||
|
||||
|
||||
def Memoize(f):
|
||||
"""Decorator for memoizing a function.
|
||||
|
||||
Caches all calls to the function using a ._memo_cache dict mapping (args,
|
||||
kwargs) to the results of the first function call with those args and kwargs.
|
||||
|
||||
If any of args or kwargs are not hashable, trying to store them in a dict will
|
||||
cause a ValueError.
|
||||
|
||||
Note that this cache is per-process, so sibling and parent processes won't
|
||||
notice updates to the cache.
|
||||
"""
|
||||
# pylint: disable=protected-access
|
||||
f._memo_cache = {}
|
||||
|
||||
@functools.wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
# Make sure that the key is hashable... as long as the contents of args and
|
||||
# kwargs are hashable.
|
||||
# TODO(phobbs) we could add an option to use the id(...) of an object if
|
||||
# it's not hashable. Then "MemoizedSingleCall" would be obsolete.
|
||||
key = (tuple(args), tuple(sorted(kwargs.items())))
|
||||
if key in f._memo_cache:
|
||||
return f._memo_cache[key]
|
||||
|
||||
result = f(*args, **kwargs)
|
||||
f._memo_cache[key] = result
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def SafeRun(functors, combine_exceptions=False):
|
||||
"""Executes a list of functors, continuing on exceptions.
|
||||
|
||||
Args:
|
||||
functors: An iterable of functors to call.
|
||||
combine_exceptions: If set, and multiple exceptions are encountered,
|
||||
SafeRun will raise a RuntimeError containing a list of all the exceptions.
|
||||
If only one exception is encountered, then the default behavior of
|
||||
re-raising the original exception with unmodified stack trace will be
|
||||
kept.
|
||||
|
||||
Raises:
|
||||
The first exception encountered, with corresponding backtrace, unless
|
||||
|combine_exceptions| is specified and there is more than one exception
|
||||
encountered, in which case a RuntimeError containing a list of all the
|
||||
exceptions that were encountered is raised.
|
||||
"""
|
||||
errors = []
|
||||
|
||||
for f in functors:
|
||||
try:
|
||||
f()
|
||||
except Exception as e:
|
||||
# Append the exception object and the traceback.
|
||||
errors.append((e, sys.exc_info()[2]))
|
||||
|
||||
if errors:
|
||||
if len(errors) == 1 or not combine_exceptions:
|
||||
# To preserve the traceback.
|
||||
inst, tb = errors[0]
|
||||
six.reraise(inst, None, tb)
|
||||
else:
|
||||
raise RuntimeError([e[0] for e in errors])
|
||||
784
third_party/docstringchecker/lint.py
vendored
Normal file
784
third_party/docstringchecker/lint.py
vendored
Normal file
@@ -0,0 +1,784 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
# This module is not automatically loaded by the `cros` helper. The filename
|
||||
# would need a "cros_" prefix to make that happen. It lives here so that it
|
||||
# is alongside the cros_lint.py file.
|
||||
#
|
||||
# For msg namespaces, the 9xxx should generally be reserved for our own use.
|
||||
|
||||
"""Additional lint modules loaded by pylint.
|
||||
|
||||
This is loaded by pylint directly via its pylintrc file:
|
||||
load-plugins=chromite.cli.cros.lint
|
||||
|
||||
Then pylint will import the register function and call it. So we can have
|
||||
as many/few checkers as we want in this one module.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import collections
|
||||
import tokenize
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
import pylint.checkers
|
||||
from pylint.config import ConfigurationMixIn
|
||||
import pylint.interfaces
|
||||
|
||||
from chromite.utils import memoize
|
||||
|
||||
|
||||
_THIRD_PARTY = os.path.join(
|
||||
os.path.dirname(os.path.realpath(__file__)), '..', '..', 'third_party')
|
||||
_PYLINT_QUOTES = os.path.join(_THIRD_PARTY, 'pylint-quotes')
|
||||
sys.path.insert(0, _PYLINT_QUOTES)
|
||||
# pylint: disable=unused-import,wrong-import-position
|
||||
from pylint_quotes.checker import StringQuoteChecker
|
||||
|
||||
|
||||
# pylint: disable=too-few-public-methods
|
||||
|
||||
|
||||
class DocStringSectionDetails(object):
|
||||
"""Object to hold details about a docstring section.
|
||||
|
||||
e.g. This holds the Args: or Returns: data.
|
||||
"""
|
||||
|
||||
def __init__(self, name=None, header=None, lines=None, lineno=None):
|
||||
"""Initialize.
|
||||
|
||||
Args:
|
||||
name: The name of this section, e.g. "Args".
|
||||
header: The raw header of this section, e.g. " Args:".
|
||||
lines: The raw lines making up the section.
|
||||
lineno: The first line of the section in the overall docstring.
|
||||
This counts from one and includes the section header line.
|
||||
"""
|
||||
self.name = name
|
||||
self.header = header
|
||||
self.lines = [] if lines is None else lines
|
||||
self.lineno = lineno
|
||||
|
||||
def __str__(self):
|
||||
"""A human readable string for this object."""
|
||||
return 'DocStringSectionDetails(%r, %r)' % (self.name, self.lineno)
|
||||
|
||||
def __repr__(self):
|
||||
"""A string to quickly identify this object."""
|
||||
return 'DocStringSectionDetails(%r, %r, %r, %r)' % (
|
||||
self.name, self.header, self.lines, self.lineno,
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Test whether two DocStringSectionDetails objects are equivalent"""
|
||||
return (
|
||||
self.name == other.name and
|
||||
self.header == other.header and
|
||||
self.lines == other.lines and
|
||||
self.lineno == other.lineno
|
||||
)
|
||||
|
||||
|
||||
def _PylintrcConfig(config_file, section, opts):
|
||||
"""Read specific pylintrc settings.
|
||||
|
||||
This is a bit hacky. The pylint framework doesn't allow us to access options
|
||||
outside of a Checker's own namespace (self.name), and multiple linters may not
|
||||
have the same name/options values (since they get globally registered). So we
|
||||
have to re-read the registered config file and pull out the value we want.
|
||||
|
||||
The other option would be to force people to duplicate settings in the config
|
||||
files and that's worse. e.g.
|
||||
[format]
|
||||
indent-string = ' '
|
||||
[doc_string_checker]
|
||||
indent-string = ' '
|
||||
|
||||
Args:
|
||||
config_file: Path to the pylintrc file to read.
|
||||
section: The section to read.
|
||||
opts: The specific settings to return.
|
||||
|
||||
Returns:
|
||||
A pylint configuration object. Use option_value('...') to read.
|
||||
"""
|
||||
class ConfigReader(ConfigurationMixIn):
|
||||
"""Dynamic config file reader."""
|
||||
name = section
|
||||
options = opts
|
||||
|
||||
cfg = ConfigReader(config_file=config_file)
|
||||
cfg.read_config_file()
|
||||
cfg.load_config_file()
|
||||
return cfg
|
||||
|
||||
|
||||
class DocStringChecker(pylint.checkers.BaseChecker):
|
||||
"""PyLint AST based checker to verify PEP 257 compliance
|
||||
|
||||
See our style guide for more info:
|
||||
https://dev.chromium.org/chromium-os/python-style-guidelines#TOC-Describing-arguments-in-docstrings
|
||||
"""
|
||||
|
||||
# TODO: See about merging with the pep257 project:
|
||||
# https://github.com/GreenSteam/pep257
|
||||
|
||||
__implements__ = pylint.interfaces.IAstroidChecker
|
||||
|
||||
# pylint: disable=class-missing-docstring,multiple-statements
|
||||
class _MessageCP001(object): pass
|
||||
class _MessageCP002(object): pass
|
||||
class _MessageCP003(object): pass
|
||||
class _MessageCP004(object): pass
|
||||
class _MessageCP005(object): pass
|
||||
class _MessageCP006(object): pass
|
||||
class _MessageCP007(object): pass
|
||||
class _MessageCP008(object): pass
|
||||
class _MessageCP009(object): pass
|
||||
class _MessageCP010(object): pass
|
||||
class _MessageCP011(object): pass
|
||||
class _MessageCP012(object): pass
|
||||
class _MessageCP013(object): pass
|
||||
class _MessageCP014(object): pass
|
||||
class _MessageCP015(object): pass
|
||||
class _MessageCP016(object): pass
|
||||
class _MessageCP017(object): pass
|
||||
class _MessageCP018(object): pass
|
||||
# pylint: enable=class-missing-docstring,multiple-statements
|
||||
|
||||
# All the sections we recognize (and in this order).
|
||||
VALID_FUNC_SECTIONS = ('Examples', 'Args', 'Returns', 'Yields', 'Raises')
|
||||
VALID_CLASS_SECTIONS = ('Examples', 'Attributes')
|
||||
ALL_VALID_SECTIONS = set(VALID_FUNC_SECTIONS + VALID_CLASS_SECTIONS)
|
||||
|
||||
# This is the section name in the pylintrc file.
|
||||
name = 'doc_string_checker'
|
||||
# Any pylintrc config options we accept.
|
||||
options = ()
|
||||
priority = -1
|
||||
MSG_ARGS = 'offset:%(offset)i: {%(line)s}'
|
||||
msgs = {
|
||||
'C9001': ('Modules should have docstrings (even a one liner)',
|
||||
('module-missing-docstring'), _MessageCP001),
|
||||
'C9002': ('Classes should have docstrings (even a one liner)',
|
||||
('class-missing-docstring'), _MessageCP002),
|
||||
'C9003': ('Trailing whitespace in docstring: ' + MSG_ARGS,
|
||||
('docstring-trailing-whitespace'), _MessageCP003),
|
||||
'C9004': ('Leading whitespace in docstring (excess or missing)'
|
||||
': ' + MSG_ARGS,
|
||||
('docstring-leading-whitespace'), _MessageCP004),
|
||||
'C9005': ('Closing triple quotes should not be cuddled',
|
||||
('docstring-cuddled-quotes'), _MessageCP005),
|
||||
'C9006': ('Section names should be preceded by one blank line'
|
||||
': ' + MSG_ARGS,
|
||||
('docstring-section-newline'), _MessageCP006),
|
||||
'C9007': ('Section names should be "%(section)s": ' + MSG_ARGS,
|
||||
('docstring-section-name'), _MessageCP007),
|
||||
'C9008': ('Sections should be in the order: %(sections)s',
|
||||
('docstring-section-order'), _MessageCP008),
|
||||
'C9009': ('First line should be a short summary',
|
||||
('docstring-first-line'), _MessageCP009),
|
||||
'C9010': ('Not all args mentioned in doc string: |%(arg)s|',
|
||||
('docstring-missing-args'), _MessageCP010),
|
||||
'C9011': ('Variable args/keywords are named *args/**kwargs, not %(arg)s',
|
||||
('docstring-misnamed-args'), _MessageCP011),
|
||||
'C9012': ('Incorrectly formatted Args section: %(arg)s',
|
||||
('docstring-arg-spacing'), _MessageCP012),
|
||||
'C9013': ('Too many blank lines in a row: ' + MSG_ARGS,
|
||||
('docstring-too-many-newlines'), _MessageCP013),
|
||||
'C9014': ('Second line should be blank',
|
||||
('docstring-second-line-blank'), _MessageCP014),
|
||||
'C9015': ('Section indentation should be %(want_indent)s spaces, not '
|
||||
'%(curr_indent)s spaces: ' + MSG_ARGS,
|
||||
('docstring-section-indent'), _MessageCP015),
|
||||
'C9016': ('Closing triple quotes should be indented with %(want_indent)s '
|
||||
'spaces, not %(curr_indent)s',
|
||||
('docstring-trailing-quotes'), _MessageCP016),
|
||||
'C9017': ('Section %(section)s shows up more than once; previous at '
|
||||
'%(line_old)i',
|
||||
('docstring-duplicate-section'), _MessageCP017),
|
||||
'C9018': ('Docstrings must start with exactly three quotes',
|
||||
('docstring-extra-quotes'), _MessageCP018),
|
||||
}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
pylint.checkers.BaseChecker.__init__(self, *args, **kwargs)
|
||||
|
||||
if self.linter is None:
|
||||
# Unit tests don't set this up.
|
||||
self._indent_string = ' '
|
||||
else:
|
||||
cfg = _PylintrcConfig(self.linter.config_file, 'format',
|
||||
(('indent-string', {'default': ' ',
|
||||
'type': 'string'}),))
|
||||
self._indent_string = cfg.option_value('indent-string')
|
||||
self._indent_len = len(self._indent_string)
|
||||
|
||||
def visit_functiondef(self, node):
|
||||
"""Verify function docstrings"""
|
||||
if node.doc:
|
||||
lines = node.doc.split('\n')
|
||||
self._check_common(node, lines)
|
||||
sections = self._parse_docstring_sections(node, lines)
|
||||
self._check_section_lines(node, lines, sections, self.VALID_FUNC_SECTIONS)
|
||||
self._check_all_args_in_doc(node, lines, sections)
|
||||
self._check_func_signature(node)
|
||||
else:
|
||||
# This is what C0111 already does for us, so ignore.
|
||||
pass
|
||||
|
||||
def visit_module(self, node):
|
||||
"""Verify module docstrings"""
|
||||
if node.doc:
|
||||
self._check_common(node)
|
||||
else:
|
||||
# Ignore stub __init__.py files.
|
||||
if os.path.basename(node.file) == '__init__.py':
|
||||
return
|
||||
self.add_message('C9001', node=node)
|
||||
|
||||
def visit_classdef(self, node):
|
||||
"""Verify class docstrings"""
|
||||
if node.doc:
|
||||
lines = node.doc.split('\n')
|
||||
self._check_common(node, lines)
|
||||
sections = self._parse_docstring_sections(node, lines)
|
||||
self._check_section_lines(node, lines, sections,
|
||||
self.VALID_CLASS_SECTIONS)
|
||||
else:
|
||||
self.add_message('C9002', node=node, line=node.fromlineno)
|
||||
|
||||
def _docstring_indent(self, node):
|
||||
"""How much a |node|'s docstring should be indented"""
|
||||
if node.display_type() == 'Module':
|
||||
return 0
|
||||
else:
|
||||
return node.col_offset + self._indent_len
|
||||
|
||||
def _check_common(self, node, lines=None):
|
||||
"""Common checks we enforce on all docstrings"""
|
||||
if lines is None:
|
||||
lines = node.doc.split('\n')
|
||||
|
||||
funcs = (
|
||||
self._check_first_line,
|
||||
self._check_second_line_blank,
|
||||
self._check_whitespace,
|
||||
self._check_last_line,
|
||||
)
|
||||
for f in funcs:
|
||||
f(node, lines)
|
||||
|
||||
def _check_first_line(self, node, lines):
|
||||
"""Make sure first line is a short summary by itself"""
|
||||
if lines[0] == '':
|
||||
self.add_message('C9009', node=node, line=node.fromlineno)
|
||||
|
||||
# We only check the first line for extra quotes because the grammar halts
|
||||
# when it sees the next set of triple quotes (which means extra trailing
|
||||
# quotes are not part of the docstring).
|
||||
if lines[0].startswith('"'):
|
||||
self.add_message('C9018', node=node, line=node.fromlineno)
|
||||
|
||||
def _check_second_line_blank(self, node, lines):
|
||||
"""Make sure the second line is blank"""
|
||||
if len(lines) > 1 and lines[1] != '':
|
||||
self.add_message('C9014', node=node, line=node.fromlineno)
|
||||
|
||||
def _check_whitespace(self, node, lines):
|
||||
"""Verify whitespace is sane"""
|
||||
# Make sure first line doesn't have leading whitespace.
|
||||
if lines[0].lstrip() != lines[0]:
|
||||
margs = {'offset': 0, 'line': lines[0]}
|
||||
self.add_message('C9004', node=node, line=node.fromlineno, args=margs)
|
||||
|
||||
# Verify no trailing whitespace.
|
||||
# We skip the last line since it's supposed to be pure whitespace.
|
||||
#
|
||||
# Also check for multiple blank lines in a row.
|
||||
last_blank = False
|
||||
for i, l in enumerate(lines[:-1]):
|
||||
margs = {'offset': i, 'line': l}
|
||||
|
||||
if l.rstrip() != l:
|
||||
self.add_message('C9003', node=node, line=node.fromlineno, args=margs)
|
||||
|
||||
curr_blank = l == ''
|
||||
if last_blank and curr_blank:
|
||||
self.add_message('C9013', node=node, line=node.fromlineno, args=margs)
|
||||
last_blank = curr_blank
|
||||
|
||||
# Now specially handle the last line.
|
||||
l = lines[-1]
|
||||
if l.strip() != '' and l.rstrip() != l:
|
||||
margs = {'offset': len(lines), 'line': l}
|
||||
self.add_message('C9003', node=node, line=node.fromlineno, args=margs)
|
||||
|
||||
def _check_last_line(self, node, lines):
|
||||
"""Make sure last line is all by itself"""
|
||||
if len(lines) > 1:
|
||||
indent = self._docstring_indent(node)
|
||||
|
||||
if lines[-1].strip() != '':
|
||||
self.add_message('C9005', node=node, line=node.fromlineno)
|
||||
elif lines[-1] != ' ' * indent:
|
||||
# The -1 line holds the """ itself and that should be indented.
|
||||
margs = {
|
||||
'offset': len(lines) - 1,
|
||||
'line': lines[-1],
|
||||
'want_indent': indent,
|
||||
'curr_indent': len(lines[-1]),
|
||||
}
|
||||
self.add_message('C9016', node=node, line=node.fromlineno, args=margs)
|
||||
|
||||
# The last line should not be blank.
|
||||
if lines[-2] == '':
|
||||
margs = {'offset': len(lines) - 2, 'line': lines[-2]}
|
||||
self.add_message('C9003', node=node, line=node.fromlineno, args=margs)
|
||||
|
||||
@memoize.MemoizedSingleCall
|
||||
def _invalid_sections_map(self):
|
||||
"""Get a mapping from common invalid section names to the valid name."""
|
||||
invalid_sections_sets = {
|
||||
# Handle common misnamings.
|
||||
'Examples': {'example', 'exaple', 'exaples', 'usage', 'example usage'},
|
||||
'Attributes': {
|
||||
'attr', 'attrs', 'attribute',
|
||||
'prop', 'props', 'properties',
|
||||
'member', 'members',
|
||||
},
|
||||
'Args': {'arg', 'argument', 'arguments'},
|
||||
'Returns': {
|
||||
'ret', 'rets', 'return', 'retrun', 'retruns', 'result', 'results',
|
||||
},
|
||||
'Yields': {'yield', 'yeild', 'yeilds'},
|
||||
'Raises': {'raise', 'riase', 'riases', 'throw', 'throws'},
|
||||
}
|
||||
|
||||
invalid_sections_map = {}
|
||||
for key, value in invalid_sections_sets.items():
|
||||
invalid_sections_map.update((v, key) for v in value)
|
||||
return invalid_sections_map
|
||||
|
||||
def _parse_docstring_sections(self, node, lines):
|
||||
"""Find all the sections and return them
|
||||
|
||||
Args:
|
||||
node: The python object we're checking.
|
||||
lines: Parsed docstring lines.
|
||||
|
||||
Returns:
|
||||
An ordered dict of sections and their (start, end) line numbers.
|
||||
The start line does not include the section header itself.
|
||||
{'Args': [start_line_number, end_line_number], ...}
|
||||
"""
|
||||
sections = collections.OrderedDict()
|
||||
invalid_sections_map = self._invalid_sections_map()
|
||||
indent_len = self._docstring_indent(node)
|
||||
|
||||
in_args_section = False
|
||||
last_section = None
|
||||
for lineno, line in enumerate(lines[1:], start=2):
|
||||
line_indent_len = len(line) - len(line.lstrip(' '))
|
||||
margs = {
|
||||
'offset': lineno,
|
||||
'line': line,
|
||||
}
|
||||
l = line.strip()
|
||||
|
||||
# Catch semi-common javadoc style.
|
||||
if l.startswith('@param'):
|
||||
margs['section'] = 'Args'
|
||||
self.add_message('C9007', node=node, line=node.fromlineno, args=margs)
|
||||
if l.startswith('@return'):
|
||||
margs['section'] = 'Returns'
|
||||
self.add_message('C9007', node=node, line=node.fromlineno, args=margs)
|
||||
|
||||
# See if we can detect incorrect behavior.
|
||||
section = l.split(':', 1)[0]
|
||||
|
||||
# Remember whether we're currently in the Args: section so we don't treat
|
||||
# named arguments as sections (e.g. a function has a "returns" arg). Use
|
||||
# the indentation level to detect the start of the next section.
|
||||
if in_args_section:
|
||||
in_args_section = (indent_len < line_indent_len)
|
||||
|
||||
if not in_args_section:
|
||||
# We only parse known invalid & valid sections here. This avoids
|
||||
# picking up things that look like sections but aren't (e.g. "Note:"
|
||||
# lines), and avoids running checks on sections we don't yet support.
|
||||
if section.lower() in invalid_sections_map:
|
||||
margs['section'] = invalid_sections_map[section.lower()]
|
||||
self.add_message('C9007', node=node, line=node.fromlineno, args=margs)
|
||||
elif section in self.ALL_VALID_SECTIONS:
|
||||
if section in sections:
|
||||
# We got the same section more than once?
|
||||
margs_copy = margs.copy()
|
||||
margs_copy.update({
|
||||
'line_old': sections[section].lineno,
|
||||
'section': section,
|
||||
})
|
||||
self.add_message('C9017', node=node, line=node.fromlineno,
|
||||
args=margs_copy)
|
||||
else:
|
||||
# Gather the order of the sections.
|
||||
sections[section] = last_section = DocStringSectionDetails(
|
||||
name=section, header=line, lineno=lineno)
|
||||
|
||||
# Detect whether we're in the Args section once we've processed the Args
|
||||
# section itself.
|
||||
in_args_section = (section in ('Args', 'Attributes'))
|
||||
|
||||
if l == '' and last_section:
|
||||
last_section.lines = lines[last_section.lineno:lineno - 1]
|
||||
last_section = None
|
||||
|
||||
return sections
|
||||
|
||||
def _check_section_lines(self, node, lines, sections, valid_sections):
|
||||
"""Verify each section (e.g. Args/Returns/etc...) is sane"""
|
||||
indent_len = self._docstring_indent(node)
|
||||
|
||||
# Make sure the sections are in the right order.
|
||||
found_sections = [x for x in valid_sections if x in sections]
|
||||
if found_sections != list(sections):
|
||||
margs = {'sections': ', '.join(valid_sections)}
|
||||
self.add_message('C9008', node=node, line=node.fromlineno, args=margs)
|
||||
|
||||
for section in sections.values():
|
||||
# We're going to check the section line itself.
|
||||
lineno = section.lineno
|
||||
line = section.header
|
||||
want_indent = indent_len + self._indent_len
|
||||
line_indent_len = len(line) - len(line.lstrip(' '))
|
||||
margs = {
|
||||
'offset': lineno,
|
||||
'line': line,
|
||||
'want_indent': want_indent,
|
||||
'curr_indent': line_indent_len,
|
||||
}
|
||||
|
||||
# Make sure it has some number of leading whitespace.
|
||||
if not line.startswith(' '):
|
||||
self.add_message('C9004', node=node, line=node.fromlineno, args=margs)
|
||||
|
||||
# Make sure it has a single trailing colon.
|
||||
if line.strip() != '%s:' % section.name:
|
||||
margs['section'] = section.name
|
||||
self.add_message('C9007', node=node, line=node.fromlineno, args=margs)
|
||||
|
||||
# Verify blank line before it. We use -2 because lineno counts from one,
|
||||
# but lines is a zero-based list.
|
||||
if lines[lineno - 2] != '':
|
||||
self.add_message('C9006', node=node, line=node.fromlineno, args=margs)
|
||||
|
||||
# Check the indentation level on the section header (e.g. Args:).
|
||||
if line_indent_len != indent_len:
|
||||
self.add_message('C9015', node=node, line=node.fromlineno, args=margs)
|
||||
|
||||
# Now check the indentation of subtext in each section.
|
||||
saw_exact = False
|
||||
for i, line in enumerate(section.lines, start=1):
|
||||
# Every line should be indented at least the minimum.
|
||||
# Always update margs so that if we drop through below, it has
|
||||
# reasonable values when generating the message.
|
||||
line_indent_len = len(line) - len(line.lstrip(' '))
|
||||
margs.update({
|
||||
'line': line,
|
||||
'offset': lineno + i,
|
||||
'curr_indent': line_indent_len,
|
||||
})
|
||||
if line_indent_len == want_indent:
|
||||
saw_exact = True
|
||||
elif line_indent_len < want_indent:
|
||||
self.add_message('C9015', node=node, line=node.fromlineno, args=margs)
|
||||
|
||||
# If none of the lines were indented at the exact level, then something
|
||||
# is amiss like they're all incorrectly offset.
|
||||
if not saw_exact:
|
||||
self.add_message('C9015', node=node, line=node.fromlineno, args=margs)
|
||||
|
||||
def _check_all_args_in_doc(self, node, _lines, sections):
|
||||
"""All function arguments are mentioned in doc"""
|
||||
if not hasattr(node, 'argnames'):
|
||||
return
|
||||
|
||||
# If they don't have an Args section, then give it a pass.
|
||||
section = sections.get('Args')
|
||||
if section is None:
|
||||
return
|
||||
|
||||
# Now verify all args exist.
|
||||
# TODO: Should we verify arg order matches doc order ?
|
||||
# TODO: Should we check indentation of wrapped docs ?
|
||||
missing_args = []
|
||||
for arg in node.args.args:
|
||||
# Ignore class related args.
|
||||
if arg.name in ('cls', 'self'):
|
||||
continue
|
||||
# Ignore ignored args.
|
||||
if arg.name.startswith('_'):
|
||||
continue
|
||||
|
||||
# Valid arguments may look like `<arg>:` or `<arg> (<type>):`.
|
||||
arg_re = re.compile(r'%s( \([^)]+\))?:' % re.escape(arg.name))
|
||||
for l in section.lines:
|
||||
aline = l.lstrip()
|
||||
m = arg_re.match(aline)
|
||||
if m:
|
||||
amsg = aline[m.end():]
|
||||
if amsg and len(amsg) - len(amsg.lstrip()) != 1:
|
||||
margs = {'arg': l}
|
||||
self.add_message('C9012', node=node, line=node.fromlineno,
|
||||
args=margs)
|
||||
break
|
||||
else:
|
||||
missing_args.append(arg.name)
|
||||
|
||||
if missing_args:
|
||||
margs = {'arg': '|, |'.join(missing_args)}
|
||||
self.add_message('C9010', node=node, line=node.fromlineno, args=margs)
|
||||
|
||||
def _check_func_signature(self, node):
|
||||
"""Require *args to be named args, and **kwargs kwargs"""
|
||||
vararg = node.args.vararg
|
||||
if vararg and vararg != 'args' and vararg != '_args':
|
||||
margs = {'arg': vararg}
|
||||
self.add_message('C9011', node=node, line=node.fromlineno, args=margs)
|
||||
|
||||
kwarg = node.args.kwarg
|
||||
if kwarg and kwarg != 'kwargs' and kwarg != '_kwargs':
|
||||
margs = {'arg': kwarg}
|
||||
self.add_message('C9011', node=node, line=node.fromlineno, args=margs)
|
||||
|
||||
|
||||
class Py3kCompatChecker(pylint.checkers.BaseChecker):
|
||||
"""Make sure we enforce py3k compatible features"""
|
||||
|
||||
__implements__ = pylint.interfaces.IAstroidChecker
|
||||
|
||||
# pylint: disable=class-missing-docstring,multiple-statements
|
||||
class _MessageR9100(object): pass
|
||||
# pylint: enable=class-missing-docstring,multiple-statements
|
||||
|
||||
name = 'py3k_compat_checker'
|
||||
priority = -1
|
||||
MSG_ARGS = 'offset:%(offset)i: {%(line)s}'
|
||||
msgs = {
|
||||
'R9100': ('Missing "from __future__ import print_function" line',
|
||||
('missing-print-function'), _MessageR9100),
|
||||
}
|
||||
options = ()
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Py3kCompatChecker, self).__init__(*args, **kwargs)
|
||||
self.seen_print_func = False
|
||||
self.saw_imports = False
|
||||
|
||||
def close(self):
|
||||
"""Called when done processing module"""
|
||||
if not self.seen_print_func:
|
||||
# Do not warn if moduler doesn't import anything at all (like
|
||||
# empty __init__.py files).
|
||||
if self.saw_imports:
|
||||
self.add_message('R9100')
|
||||
|
||||
def _check_print_function(self, node):
|
||||
"""Verify print_function is imported"""
|
||||
if node.modname == '__future__':
|
||||
for name, _ in node.names:
|
||||
if name == 'print_function':
|
||||
self.seen_print_func = True
|
||||
|
||||
def visit_importfrom(self, node):
|
||||
"""Process 'from' statements"""
|
||||
self.saw_imports = True
|
||||
self._check_print_function(node)
|
||||
|
||||
def visit_import(self, _node):
|
||||
"""Process 'import' statements"""
|
||||
self.saw_imports = True
|
||||
|
||||
|
||||
class SourceChecker(pylint.checkers.BaseChecker):
|
||||
"""Make sure we enforce rules on the source."""
|
||||
|
||||
__implements__ = pylint.interfaces.IAstroidChecker
|
||||
|
||||
# pylint: disable=class-missing-docstring,multiple-statements
|
||||
class _MessageR9200(object): pass
|
||||
class _MessageR9201(object): pass
|
||||
class _MessageR9202(object): pass
|
||||
class _MessageR9203(object): pass
|
||||
class _MessageR9204(object): pass
|
||||
class _MessageR9205(object): pass
|
||||
# pylint: enable=class-missing-docstring,multiple-statements
|
||||
|
||||
name = 'source_checker'
|
||||
priority = -1
|
||||
MSG_ARGS = 'offset:%(offset)i: {%(line)s}'
|
||||
msgs = {
|
||||
'R9200': ('Shebang should be #!/usr/bin/env python2 or '
|
||||
'#!/usr/bin/env python3',
|
||||
('bad-shebang'), _MessageR9200),
|
||||
'R9201': ('Shebang is missing, but file is executable (chmod -x to fix)',
|
||||
('missing-shebang'), _MessageR9201),
|
||||
'R9202': ('Shebang is set, but file is not executable (chmod +x to fix)',
|
||||
('spurious-shebang'), _MessageR9202),
|
||||
'R9203': ('Unittest not named xxx_unittest.py',
|
||||
('unittest-misnamed'), _MessageR9203),
|
||||
'R9204': ('File encoding missing (the first line after the shebang'
|
||||
' should be "# -*- coding: utf-8 -*-")',
|
||||
('missing-file-encoding'), _MessageR9204),
|
||||
'R9205': ('File encoding should be "utf-8"',
|
||||
('bad-file-encoding'), _MessageR9205),
|
||||
}
|
||||
options = ()
|
||||
|
||||
# Taken from PEP-263.
|
||||
_ENCODING_RE = re.compile(br'^[ \t\v]*#.*?coding[:=][ \t]*([-_.a-zA-Z0-9]+)')
|
||||
|
||||
def visit_module(self, node):
|
||||
"""Called when the whole file has been read"""
|
||||
with node.stream() as stream:
|
||||
st = os.fstat(stream.fileno())
|
||||
self._check_shebang(node, stream, st)
|
||||
self._check_encoding(node, stream, st)
|
||||
self._check_module_name(node)
|
||||
|
||||
def _check_shebang(self, _node, stream, st):
|
||||
"""Verify the shebang is version specific"""
|
||||
stream.seek(0)
|
||||
|
||||
mode = st.st_mode
|
||||
executable = bool(mode & 0o0111)
|
||||
|
||||
shebang = stream.readline()
|
||||
if shebang[0:2] != b'#!':
|
||||
if executable:
|
||||
self.add_message('R9201')
|
||||
return
|
||||
elif not executable:
|
||||
self.add_message('R9202')
|
||||
|
||||
if shebang.strip() not in (
|
||||
b'#!/usr/bin/env python2', b'#!/usr/bin/env python3'):
|
||||
self.add_message('R9200')
|
||||
|
||||
def _check_encoding(self, _node, stream, st):
|
||||
"""Verify the file has an encoding set
|
||||
|
||||
See PEP-263 for more details.
|
||||
https://www.python.org/dev/peps/pep-0263/
|
||||
"""
|
||||
# Only allow empty files to have no encoding (e.g. __init__.py).
|
||||
if not st.st_size:
|
||||
return
|
||||
|
||||
stream.seek(0)
|
||||
encoding = stream.readline()
|
||||
|
||||
# If the first line is the shebang, then the encoding is the second line.
|
||||
if encoding[0:2] == b'#!':
|
||||
encoding = stream.readline()
|
||||
|
||||
# See if the encoding matches the standard.
|
||||
m = self._ENCODING_RE.match(encoding)
|
||||
if m:
|
||||
if m.group(1) != b'utf-8':
|
||||
self.add_message('R9205')
|
||||
else:
|
||||
self.add_message('R9204')
|
||||
|
||||
def _check_module_name(self, node):
|
||||
"""Make sure the module name is sane"""
|
||||
# Catch various typos.
|
||||
name = node.name.rsplit('.', 2)[-1]
|
||||
if name.rsplit('_', 2)[-1] in ('unittests',):
|
||||
self.add_message('R9203')
|
||||
|
||||
|
||||
class CommentChecker(pylint.checkers.BaseTokenChecker):
|
||||
"""Enforce our arbitrary rules on comments."""
|
||||
|
||||
__implements__ = pylint.interfaces.ITokenChecker
|
||||
|
||||
# pylint: disable=class-missing-docstring,multiple-statements
|
||||
class _MessageR9250(object): pass
|
||||
# pylint: enable=class-missing-docstring,multiple-statements
|
||||
|
||||
name = 'comment_checker'
|
||||
priority = -1
|
||||
MSG_ARGS = 'offset:%(offset)i: {%(line)s}'
|
||||
msgs = {
|
||||
'R9250': ('One space needed at start of comment: %(comment)s',
|
||||
('comment-missing-leading-space'), _MessageR9250),
|
||||
}
|
||||
options = ()
|
||||
|
||||
def _visit_comment(self, lineno, comment):
|
||||
"""Process |comment| at |lineno|."""
|
||||
if comment == '#':
|
||||
# Ignore standalone comments for spacing.
|
||||
return
|
||||
|
||||
if lineno == 1 and comment.startswith('#!'):
|
||||
# Ignore shebangs.
|
||||
return
|
||||
|
||||
# We remove multiple leading # to support runs like ###.
|
||||
if not comment.lstrip('#').startswith(' '):
|
||||
self.add_message('R9250', line=lineno, args={'comment': comment})
|
||||
|
||||
def process_tokens(self, tokens):
|
||||
"""Process tokens and look for comments."""
|
||||
for (tok_type, token, (start_row, _), _, _) in tokens:
|
||||
if tok_type == tokenize.COMMENT:
|
||||
self._visit_comment(start_row, token)
|
||||
|
||||
|
||||
class ChromiteLoggingChecker(pylint.checkers.BaseChecker):
|
||||
"""Make sure we enforce rules on importing logging."""
|
||||
|
||||
__implements__ = pylint.interfaces.IAstroidChecker
|
||||
|
||||
# pylint: disable=class-missing-docstring,multiple-statements
|
||||
class _MessageR9301(object): pass
|
||||
# pylint: enable=class-missing-docstring,multiple-statements
|
||||
|
||||
name = 'chromite_logging_checker'
|
||||
priority = -1
|
||||
MSG_ARGS = 'offset:%(offset)i: {%(line)s}'
|
||||
msgs = {
|
||||
'R9301': ('logging is deprecated. Use "from chromite.lib import '
|
||||
'cros_logging as logging" to import chromite/lib/cros_logging',
|
||||
('cros-logging-import'), _MessageR9301),
|
||||
}
|
||||
options = ()
|
||||
# This checker is disabled by default because we only want to disallow "import
|
||||
# logging" in chromite and not in other places cros lint is used. To enable
|
||||
# this checker, modify the pylintrc file.
|
||||
enabled = False
|
||||
|
||||
def visit_import(self, node):
|
||||
"""Called when node is an import statement."""
|
||||
for name, _ in node.names:
|
||||
if name == 'logging':
|
||||
self.add_message('R9301', line=node.lineno)
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""pylint will call this func to register all our checkers"""
|
||||
# Walk all the classes in this module and register ours.
|
||||
this_module = sys.modules[__name__]
|
||||
for member in dir(this_module):
|
||||
if not member.endswith('Checker'):
|
||||
continue
|
||||
cls = getattr(this_module, member)
|
||||
linter.register_checker(cls(linter))
|
||||
29
third_party/docstringchecker/update.sh
vendored
Executable file
29
third_party/docstringchecker/update.sh
vendored
Executable file
@@ -0,0 +1,29 @@
|
||||
#!/bin/bash
|
||||
# Downloads the latest copy of the DocStringChecker plugin.
|
||||
|
||||
# Exit build script on first failure.
|
||||
set -e
|
||||
|
||||
# Echo commands to stdout.
|
||||
set -x
|
||||
|
||||
# Treat undefined variables as errors.
|
||||
set -u
|
||||
|
||||
LINT_OUTPUT_DIR=$(dirname "$0")
|
||||
LINT_OUTPUT_FILE="${LINT_OUTPUT_DIR}/lint.py"
|
||||
|
||||
wget \
|
||||
https://chromium.googlesource.com/chromiumos/chromite/+/master/cli/cros/lint.py?format=TEXT \
|
||||
-O - | \
|
||||
base64 --decode \
|
||||
> "$LINT_OUTPUT_FILE"
|
||||
|
||||
MEMOIZE_OUTPUT_DIR=$(dirname "$0")/chromite/utils
|
||||
MEMOIZE_OUTPUT_FILE="${MEMOIZE_OUTPUT_DIR}/memoize.py"
|
||||
|
||||
wget \
|
||||
https://chromium.googlesource.com/chromiumos/chromite/+/master/utils/memoize.py?format=TEXT \
|
||||
-O - | \
|
||||
base64 --decode \
|
||||
> "$MEMOIZE_OUTPUT_FILE"
|
||||
Reference in New Issue
Block a user