Skip to content

Commit 7f59718

Browse files
author
Matt Bertrand
committed
Flake8 fixes
1 parent af38cd7 commit 7f59718

19 files changed

+113
-97
lines changed

.travis.yml

+5-4
Original file line numberDiff line numberDiff line change
@@ -21,9 +21,10 @@ install:
2121
- pip install -e .
2222
- git clone -b 2.4.x https://github.com/GeoNode/geonode.git
2323
- cp local_settings.py geonode/geonode/.
24-
- pushd geonode
25-
- pip install -e .
24+
- pip install -e geonode
2625

2726
script:
28-
29-
- python manage.py test dataqs
27+
- flake8 --config flake8.cfg dataqs
28+
- pushd geonode
29+
- python manage.py test dataqs
30+
- popd

dataqs/airnow/airnow.py

+4-3
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
import re
88
import shutil
99
from django.conf import settings
10-
from dataqs.processor_base import GeoDataMosaicProcessor, DEFAULT_WORKSPACE
10+
from dataqs.processor_base import GeoDataMosaicProcessor
1111
from dataqs.helpers import warp_image, style_exists
1212

1313
logger = logging.getLogger("dataqs.processors")
@@ -27,8 +27,9 @@ class AirNowGRIB2HourlyProcessor(GeoDataMosaicProcessor):
2727
"""
2828
prefix = "airnow"
2929
base_url = "ftp.airnowapi.org"
30-
layer_names = ["airnow_aqi_ozone", "airnow_aqi_pm25",
31-
"airnow_aqi_combined"]
30+
layer_names = ["airnow_aqi_ozone",
31+
"airnow_aqi_pm25",
32+
"airnow_aqi_combined"]
3233
img_patterns = ["", "_pm25", "_combined"]
3334
layer_titles = ["Ozone", "PM25", "Combined Ozone & PM25"]
3435

dataqs/aqicn/aqicn.py

+12-12
Original file line numberDiff line numberDiff line change
@@ -13,8 +13,8 @@
1313
import traceback
1414
from dateutil.parser import parse
1515
from dateutil.tz import tzutc
16-
from dataqs.helpers import postgres_query, layer_exists, table_exists, style_exists, \
17-
asciier
16+
from dataqs.helpers import postgres_query, layer_exists, table_exists, \
17+
style_exists, asciier
1818
from dataqs.processor_base import GeoDataProcessor, DEFAULT_WORKSPACE
1919
from geonode.geoserver.helpers import ogc_server_settings
2020

@@ -23,11 +23,11 @@
2323
script_dir = os.path.dirname(os.path.realpath(__file__))
2424

2525
REQ_HEADER = {
26-
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
27-
'User-Agent':
28-
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) \
29-
AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 '
30-
'Safari/537.36'
26+
'Accept': 'text/html,application/xhtml+xml,application/xml;'
27+
'q=0.9,image/webp,*/*;q=0.8',
28+
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) '
29+
'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 '
30+
'Safari/537.36'
3131
}
3232

3333
AQICN_SQL = u"""
@@ -79,6 +79,7 @@
7979
CREATE INDEX {table}_the_geom ON {table} USING gist (the_geom);
8080
"""
8181

82+
8283
def thread_parse(table, cities):
8384
"""
8485
Thread worker for a list of cities
@@ -135,8 +136,7 @@ def handle_city(self, i, city):
135136
return
136137
city['dateTime'] = self.get_time(city)
137138
city["data"] = {}
138-
139-
cur_list = soup.find_all("td", {"id" : re.compile('^cur_')})
139+
cur_list = soup.find_all("td", {"id": re.compile('^cur_')})
140140
# Go on to the next city if we don't find anything
141141
if not cur_list:
142142
logger.debug("Nothing found for %s" % city['city'])
@@ -160,7 +160,7 @@ def handle_city(self, i, city):
160160

161161
except KeyboardInterrupt:
162162
sys.exit()
163-
except Exception as e:
163+
except Exception:
164164
logger.error('Error with city {}'.format(city['url']))
165165
logger.error(traceback.format_exc())
166166

@@ -221,8 +221,8 @@ def run(self):
221221
class AQICNProcessor(GeoDataProcessor):
222222
prefix = 'aqicn'
223223
directory = 'output'
224-
cities=None
225-
countries=None
224+
cities = None
225+
countries = None
226226
pool_size = 6
227227
base_url = 'http://aqicn.org/city/all/'
228228
layers = {

dataqs/aqicn/tests.py

+2
Original file line numberDiff line numberDiff line change
@@ -30,12 +30,14 @@ def mock_saveData(self, city):
3030
with open(tmpfile, 'w') as outfile:
3131
outfile.write(json.dumps(city))
3232

33+
3334
def mock_worker_init(self, table, cities):
3435
self.cities = cities
3536
self.prefix = table
3637
self.archive = self.prefix + "_archive"
3738
self.max_wait = 5
3839

40+
3941
class AQICNTest(TestCase):
4042
"""
4143
Tests the dataqs.aqicn module. Since each processor is highly

dataqs/csv_helpers.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -62,4 +62,4 @@ def writerow(self, row):
6262

6363
def writerows(self, rows):
6464
for row in rows:
65-
self.writerow(row)
65+
self.writerow(row)

dataqs/forecastio/tests.py

-1
Original file line numberDiff line numberDiff line change
@@ -110,4 +110,3 @@ def test_cleanup(self):
110110
self.processor.cleanup()
111111
self.assertEquals([], glob.glob(os.path.join(
112112
self.processor.tmp_dir, self.processor.prefix + '*')))
113-

dataqs/gdacs/tests.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
import glob
2-
import json
31
import os
42
import datetime
53
from django.test import TestCase
@@ -9,6 +7,7 @@
97

108
script_dir = os.path.dirname(os.path.realpath(__file__))
119

10+
1211
class GdacsTest(TestCase):
1312
"""
1413
Tests the dataqs.gdacs module. Since each processor is highly

dataqs/gfms/tests.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@ def test_cleanup(self):
9191
httpretty.register_uri(httpretty.GET, current_url,
9292
body=get_mock_image())
9393
imgfile = self.processor.download(current_url)
94-
tif_file = self.processor.convert(imgfile)
94+
self.processor.convert(imgfile)
9595
self.assertNotEqual([], glob.glob(os.path.join(
9696
self.processor.tmp_dir, self.processor.prefix + '*')))
9797
self.processor.cleanup()

dataqs/helpers.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,6 @@
33
import traceback
44
import os
55
import subprocess
6-
7-
import functools
86
import requests
97
from geoserver.catalog import Catalog, FailedRequestError
108
import psycopg2
@@ -340,4 +338,4 @@ def __init__(self, content):
340338
self.status_code = 200
341339

342340
def raise_for_status(self):
343-
pass
341+
pass

dataqs/nasa_gpm/tests.py

+7-6
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,8 @@ def mock_nlst(self, *args):
2727
current_date = datetime.datetime.utcnow()
2828
start_date = current_date - datetime.timedelta(days=7)
2929
while start_date < current_date:
30-
file = '3B-HHR-E.MS.MRG.3IMERG.{}-S120000-E175959.1050.V03E.1day.tif'.format(
30+
f = '3B-HHR-E.MS.MRG.3IMERG.{}-S120000-E175959.1050.V03E.1day.tif'
31+
file = f.format(
3132
current_date.strftime('%Y%m%d'))
3233
file_list.append(file)
3334
current_date = current_date - datetime.timedelta(days=1)
@@ -73,10 +74,10 @@ def test_parse_name(self):
7374
Layer title should contain date of image
7475
:return:
7576
"""
76-
imgfile = '3B-HHR-E.MS.MRG.3IMERG.20151027-S133000-E135959.0810.V03E.1day.tif'
77-
title = self.processor.parse_name(imgfile)[0]
77+
f = '3B-HHR-E.MS.MRG.3IMERG.20151027-S133000-E135959.0810.V03E.1day.tif'
78+
title = self.processor.parse_name(f)[0]
7879
self.assertTrue('NASA Global Precipitation Estimate (1day) - 2015-10-27'
79-
in title)
80+
in title)
8081

8182
@patch('ftplib.FTP', autospec=True)
8283
@patch('ftplib.FTP.retrbinary', mock_retrbinary)
@@ -106,9 +107,9 @@ def test_cleanup(self, mock_ftp):
106107
:return:
107108
"""
108109
dl_tif = self.processor.download()[0]
109-
convert_tif = self.processor.convert(dl_tif)
110+
self.processor.convert(dl_tif)
110111
self.assertNotEqual([], glob.glob(os.path.join(
111112
self.processor.tmp_dir, self.processor.prefix + '*')))
112113
self.processor.cleanup()
113114
self.assertEquals([], glob.glob(os.path.join(
114-
self.processor.tmp_dir, self.processor.prefix + '*')))
115+
self.processor.tmp_dir, self.processor.prefix + '*')))

dataqs/processor_base.py

+21-19
Original file line numberDiff line numberDiff line change
@@ -71,8 +71,9 @@ class GeoDataProcessor(object):
7171
for import into GeoNode/GeoServer
7272
"""
7373

74-
gs_url = "http://{}:8080/geoserver/rest/workspaces/{}/coveragestores/{}/file.geotiff"
75-
gs_vec_url = "http://{}:8080/geoserver/rest/workspaces/{}/datastores/{}/featuretypes"
74+
base_url = "http://{}:8080/geoserver/rest/workspaces/"
75+
gs_url = base_url + "{}/coveragestores/{}/file.geotiff"
76+
gs_vec_url = base_url + "{}/datastores/{}/featuretypes"
7677
gs_style_url = "http://{}:8080/geoserver/rest/styles/"
7778

7879
def __init__(self, workspace=DEFAULT_WORKSPACE, tmp_dir=GS_TMP_DIR,
@@ -111,17 +112,17 @@ def truncate_gs_cache(self, layer_name):
111112
ws=self.workspace,
112113
layer=layer_name
113114
)
114-
truncate_json = json.dumps({'seedRequest':
115-
{'name': 'geonode:{}'.format(
116-
layer_name),
117-
'srs': {'number': 900913},
118-
'zoomStart': 0,
119-
'zoomStop': 19,
120-
'format': 'image/png',
121-
'type': 'truncate',
122-
'threadCount': 4
123-
}
124-
})
115+
truncate_json = json.dumps({
116+
'seedRequest': {
117+
'name': 'geonode:{}'.format(layer_name),
118+
'srs': {'number': 900913},
119+
'zoomStart': 0,
120+
'zoomStop': 19,
121+
'format': 'image/png',
122+
'type': 'truncate',
123+
'threadCount': 4
124+
}
125+
})
125126
res = requests.post(url=gwc_url, data=truncate_json,
126127
auth=(_user, _password),
127128
headers={"Content-type": "application/json"})
@@ -249,8 +250,8 @@ def set_default_style(self, layer_name, sld_name, sld_content):
249250
gs_url = self.gs_style_url.format(ogc_server_settings.hostname)
250251

251252
# Create the style
252-
data = "<style><name>{name}</name><filename>{name}.sld</filename></style>".format(
253-
name=sld_name)
253+
s = "<style><name>{name}</name><filename>{name}.sld</filename></style>"
254+
data = s.format(name=sld_name)
254255
_user, _password = ogc_server_settings.credentials
255256
res = requests.post(url=gs_url,
256257
data=data,
@@ -274,7 +275,8 @@ def set_default_style(self, layer_name, sld_name, sld_content):
274275

275276
# Assign to the layer
276277
layer_typename = "{}%3A{}".format(DEFAULT_WORKSPACE, layer_name)
277-
data = '<layer><defaultStyle><name>{}</name></defaultStyle></layer>'.format(
278+
s = '<layer><defaultStyle><name>{}</name></defaultStyle></layer>'
279+
data = s.format(
278280
sld_name)
279281
url = urljoin(gs_url.replace("styles", "layers"), layer_typename)
280282
logger.debug(url)
@@ -303,11 +305,11 @@ def run(self):
303305
class GeoDataMosaicProcessor(GeoDataProcessor):
304306
"""
305307
Processor for handling raster mosaic data stores
306-
http://docs.geoserver.org/latest/en/user/tutorials/imagemosaic_timeseries/imagemosaic_timeseries.html
308+
http://bit.ly/1oMPIE7
307309
http://geoserver.geo-solutions.it/multidim/en/rest/index.html
308310
"""
309-
310-
gs_url = "http://{}:8080/geoserver/rest/workspaces/{}/coveragestores/{}/external.imagemosaic"
311+
gs_url = "http://{}:8080/geoserver/rest/workspaces/{}/" \
312+
"coveragestores/{}/external.imagemosaic"
311313
mosaic_url = gs_url.replace('external.imagemosaic',
312314
'coverages/{}/index/granules')
313315
create_url = gs_url.replace('external.imagemosaic', 'file.imagemosaic')

dataqs/spei/spei.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
import logging
44
import os
5-
from dataqs.processor_base import GeoDataProcessor, DEFAULT_WORKSPACE
5+
from dataqs.processor_base import GeoDataProcessor
66
from dataqs.helpers import get_band_count, gdal_translate, cdo_invert, \
77
nc_convert, style_exists
88

dataqs/spei/tests.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ def test_cleanup(self):
5252
imgurl = "{}spei03.nc".format(self.processor.base_url)
5353
httpretty.register_uri(httpretty.GET, imgurl,
5454
body=get_mock_image())
55-
dl_tif = self.processor.download(imgurl, 'spei03.tif')
55+
self.processor.download(imgurl, 'spei03.tif')
5656
self.assertNotEqual([], glob.glob(os.path.join(
5757
self.processor.tmp_dir, self.processor.prefix + '*')))
5858
self.processor.cleanup()

dataqs/usgs_quakes/usgs_quakes.py

+9-7
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
logger = logging.getLogger("dataqs.processors")
1414
script_dir = os.path.dirname(os.path.realpath(__file__))
1515

16+
1617
class USGSQuakeProcessor(GeoDataProcessor):
1718
"""
1819
Class for retrieving and processing the latest earthquake data from USGS.
@@ -21,9 +22,11 @@ class USGSQuakeProcessor(GeoDataProcessor):
2122
are removed.
2223
"""
2324
prefix = 'usgs_quakes'
24-
tables = ("quakes_weekly", "quakes_monthly", "quakes_yearly", "quakes_archive")
25+
tables = ("quakes_weekly", "quakes_monthly",
26+
"quakes_yearly", "quakes_archive")
2527
titles = ("Last 7 Days", "Last 30 Days", "Last 365 Days", "Archive")
26-
base_url = "http://earthquake.usgs.gov/fdsnws/event/1/query?format=geojson&starttime={}&endtime={}"
28+
base_url = "http://earthquake.usgs.gov/fdsnws/event/1/query?" \
29+
"format=geojson&starttime={}&endtime={}"
2730
params = {}
2831

2932
def __init__(self, *args, **kwargs):
@@ -64,7 +67,7 @@ def run(self, rss_file=None):
6467
"""
6568
if not rss_file:
6669
rss = self.download(self.base_url.format(self.params['sdate'],
67-
self.params['edate']),
70+
self.params['edate']),
6871
filename=self.prefix + '.rss')
6972
rss_file = os.path.join(self.tmp_dir, rss)
7073

@@ -93,11 +96,10 @@ def run(self, rss_file=None):
9396
datastore = ogc_server_settings.server.get('DATASTORE')
9497
if not layer_exists(table, datastore, DEFAULT_WORKSPACE):
9598
c = connections[datastore].cursor()
99+
q = 'ALTER TABLE {tb} ADD CONSTRAINT {tb}_ids UNIQUE (ids);'
96100
try:
97-
c.execute(
98-
'ALTER TABLE {tb} ADD CONSTRAINT {tb}_ids UNIQUE (ids);'.
99-
format(tb=table))
100-
except:
101+
c.execute(q.format(tb=table))
102+
except Exception:
101103
c.close()
102104
self.post_geoserver_vector(table)
103105
if not style_exists(table):

dataqs/wqp/tests.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,8 @@ def test_download(self):
3939
"""
4040
for qtype in ('Result', 'Station'):
4141
url = ('http://www.waterqualitydata.us/{}/search?'.format(qtype) +
42-
'countrycode=US&startDateLo=12-27-2015&startDateHi=01-26-2016' +
42+
'countrycode=US&startDateLo=12-27-2015' +
43+
'&startDateHi=01-26-2016' +
4344
'&characteristicName=pH')
4445
httpretty.register_uri(httpretty.GET, url,
4546
body=get_mock_response(

dataqs/wqp/wqp.py

+12-11
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
import datetime
66
import re
77
import requests
8-
from dataqs.helpers import gdal_translate, postgres_query, ogr2ogr_exec, \
8+
from dataqs.helpers import postgres_query, ogr2ogr_exec, \
99
table_exists, purge_old_data, layer_exists, style_exists
1010
from dataqs.processor_base import GeoDataProcessor, DEFAULT_WORKSPACE
1111
import unicodecsv as csv
@@ -55,21 +55,22 @@ def update_station_table(self, csvfile):
5555
:return: None
5656
"""
5757
vrt_content = (
58-
"""<OGRVRTDataSource>
59-
<OGRVRTLayer name="{name}">
60-
<SrcDataSource>{csv}</SrcDataSource>
61-
<GeometryType>wkbPoint</GeometryType>
62-
<LayerSRS>WGS84</LayerSRS>
63-
<GeometryField encoding="PointFromColumns" x="LongitudeMeasure" y="LatitudeMeasure"/>
64-
</OGRVRTLayer>
65-
</OGRVRTDataSource>
66-
""")
58+
"""<OGRVRTDataSource>
59+
<OGRVRTLayer name="{name}">
60+
<SrcDataSource>{csv}</SrcDataSource>
61+
<GeometryType>wkbPoint</GeometryType>
62+
<LayerSRS>WGS84</LayerSRS>
63+
<GeometryField encoding="PointFromColumns"
64+
x="LongitudeMeasure" y="LatitudeMeasure"/>
65+
</OGRVRTLayer>
66+
</OGRVRTDataSource>
67+
""")
6768
station_table = self.station_table
6869
needs_index = not table_exists(station_table)
6970

7071
db = ogc_server_settings.datastore_db
7172
vrt_file = os.path.join(self.tmp_dir, csvfile.replace('.csv', '.vrt'))
72-
csv_name = os.path.basename(csvfile).replace(".csv","")
73+
csv_name = os.path.basename(csvfile).replace(".csv", "")
7374
if not os.path.exists(vrt_file):
7475
with open(vrt_file, 'w') as vrt:
7576
vrt.write(vrt_content.format(

0 commit comments

Comments
 (0)