Skip to content

Commit 2667deb

Browse files
max-sixtyshoyer
authored andcommitted
Flake fixed (#2629)
* add ignores * test_combine * isort * fixes * odd interation between pytest fixture loop and flake * fix
1 parent d8d87d2 commit 2667deb

36 files changed

+179
-177
lines changed

setup.cfg

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,10 @@ testpaths=xarray/tests
88
[flake8]
99
max-line-length=79
1010
ignore=
11-
W503
11+
E402 # module level import not at top of file
12+
E731 # do not assign a lambda expression, use a def
13+
W503 # line break before binary operator
14+
W504 # line break after binary operator
1215
exclude=
1316
doc/
1417

versioneer.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
# flake8: noqa
12

23
# Version: 0.18
34

xarray/backends/api.py

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -1,33 +1,32 @@
11
from __future__ import absolute_import, division, print_function
22

33
import os.path
4+
import warnings
45
from glob import glob
56
from io import BytesIO
67
from numbers import Number
7-
import warnings
88

99
import numpy as np
1010

1111
from .. import Dataset, backends, conventions
1212
from ..core import indexing
13-
from ..core.combine import _infer_concat_order_from_positions, _auto_combine
13+
from ..core.combine import _auto_combine, _infer_concat_order_from_positions
1414
from ..core.pycompat import basestring, path_type
15-
from ..core.utils import close_on_error, is_remote_uri, is_grib_path
15+
from ..core.utils import close_on_error, is_grib_path, is_remote_uri
1616
from .common import ArrayWriter
1717
from .locks import _get_scheduler
1818

19-
2019
DATAARRAY_NAME = '__xarray_dataarray_name__'
2120
DATAARRAY_VARIABLE = '__xarray_dataarray_variable__'
2221

2322

2423
def _get_default_engine_remote_uri():
2524
try:
26-
import netCDF4
25+
import netCDF4 # noqa
2726
engine = 'netcdf4'
2827
except ImportError: # pragma: no cover
2928
try:
30-
import pydap # flake8: noqa
29+
import pydap # noqa
3130
engine = 'pydap'
3231
except ImportError:
3332
raise ValueError('netCDF4 or pydap is required for accessing '
@@ -38,12 +37,12 @@ def _get_default_engine_remote_uri():
3837
def _get_default_engine_grib():
3938
msgs = []
4039
try:
41-
import Nio # flake8: noqa
40+
import Nio # noqa
4241
msgs += ["set engine='pynio' to access GRIB files with PyNIO"]
4342
except ImportError: # pragma: no cover
4443
pass
4544
try:
46-
import cfgrib # flake8: noqa
45+
import cfgrib # noqa
4746
msgs += ["set engine='cfgrib' to access GRIB files with cfgrib"]
4847
except ImportError: # pragma: no cover
4948
pass
@@ -56,7 +55,7 @@ def _get_default_engine_grib():
5655

5756
def _get_default_engine_gz():
5857
try:
59-
import scipy # flake8: noqa
58+
import scipy # noqa
6059
engine = 'scipy'
6160
except ImportError: # pragma: no cover
6261
raise ValueError('scipy is required for accessing .gz files')
@@ -65,11 +64,11 @@ def _get_default_engine_gz():
6564

6665
def _get_default_engine_netcdf():
6766
try:
68-
import netCDF4 # flake8: noqa
67+
import netCDF4 # noqa
6968
engine = 'netcdf4'
7069
except ImportError: # pragma: no cover
7170
try:
72-
import scipy.io.netcdf # flake8: noqa
71+
import scipy.io.netcdf # noqa
7372
engine = 'scipy'
7473
except ImportError:
7574
raise ValueError('cannot read or write netCDF files without '
@@ -579,7 +578,7 @@ def open_mfdataset(paths, chunks=None, concat_dim=_CONCAT_DIM_DEFAULT,
579578
580579
.. [1] http://xarray.pydata.org/en/stable/dask.html
581580
.. [2] http://xarray.pydata.org/en/stable/dask.html#chunking-and-performance
582-
"""
581+
""" # noqa
583582
if isinstance(paths, basestring):
584583
if is_remote_uri(paths):
585584
raise ValueError(
@@ -642,11 +641,12 @@ def open_mfdataset(paths, chunks=None, concat_dim=_CONCAT_DIM_DEFAULT,
642641
# Discard ordering because it should be redone from coordinates
643642
ids = False
644643

645-
combined = _auto_combine(datasets, concat_dims=concat_dims,
646-
compat=compat,
647-
data_vars=data_vars, coords=coords,
648-
infer_order_from_coords=infer_order_from_coords,
649-
ids=ids)
644+
combined = _auto_combine(
645+
datasets, concat_dims=concat_dims,
646+
compat=compat,
647+
data_vars=data_vars, coords=coords,
648+
infer_order_from_coords=infer_order_from_coords,
649+
ids=ids)
650650
except ValueError:
651651
for ds in datasets:
652652
ds.close()

xarray/backends/cfgrib_.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
from ..core import indexing
77
from ..core.utils import Frozen, FrozenOrderedDict
88
from .common import AbstractDataStore, BackendArray
9-
from .locks import ensure_lock, SerializableLock
9+
from .locks import SerializableLock, ensure_lock
1010

1111
# FIXME: Add a dedicated lock, even if ecCodes is supposed to be thread-safe
1212
# in most circumstances. See:

xarray/backends/file_manager.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77
from .locks import acquire
88
from .lru_cache import LRUCache
99

10-
1110
# Global cache for storing open files.
1211
FILE_CACHE = LRUCache(
1312
OPTIONS['file_cache_maxsize'], on_evict=lambda k, v: v.close())

xarray/backends/netCDF4_.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,9 +14,9 @@
1414
from ..core.utils import FrozenOrderedDict, close_on_error, is_remote_uri
1515
from .common import (
1616
BackendArray, WritableCFDataStore, find_root, robust_getitem)
17-
from .locks import (NETCDFC_LOCK, HDF5_LOCK,
18-
combine_locks, ensure_lock, get_write_lock)
1917
from .file_manager import CachingFileManager, DummyFileManager
18+
from .locks import (
19+
HDF5_LOCK, NETCDFC_LOCK, combine_locks, ensure_lock, get_write_lock)
2020
from .netcdf3 import encode_nc3_attr_value, encode_nc3_variable
2121

2222
# This lookup table maps from dtype.byteorder to a readable endian

xarray/backends/netcdf3.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99

1010
# Special characters that are permitted in netCDF names except in the
1111
# 0th position of the string
12-
_specialchars = '_.@+- !"#$%&\()*,:;<=>?[]^`{|}~'
12+
_specialchars = '_.@+- !"#$%&\\()*,:;<=>?[]^`{|}~'
1313

1414
# The following are reserved names in CDL and may not be used as names of
1515
# variables, dimension, attributes

xarray/backends/pseudonetcdf_.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@
1010
from .file_manager import CachingFileManager
1111
from .locks import HDF5_LOCK, NETCDFC_LOCK, combine_locks, ensure_lock
1212

13-
1413
# psuedonetcdf can invoke netCDF libraries internally
1514
PNETCDF_LOCK = combine_locks([HDF5_LOCK, NETCDFC_LOCK])
1615

xarray/backends/pynio_.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,7 @@
88
from .common import AbstractDataStore, BackendArray
99
from .file_manager import CachingFileManager
1010
from .locks import (
11-
HDF5_LOCK, NETCDFC_LOCK, combine_locks, ensure_lock, SerializableLock)
12-
11+
HDF5_LOCK, NETCDFC_LOCK, SerializableLock, combine_locks, ensure_lock)
1312

1413
# PyNIO can invoke netCDF libraries internally
1514
# Add a dedicated lock just in case NCL as well isn't thread-safe.

xarray/backends/rasterio_.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@
1111
from .file_manager import CachingFileManager
1212
from .locks import SerializableLock
1313

14-
1514
# TODO: should this be GDAL_LOCK instead?
1615
RASTERIO_LOCK = SerializableLock()
1716

0 commit comments

Comments
 (0)