Skip to content

Commit

Permalink
Merge branch 'cleaner-exceptions' of https://github.com/valgur/sentin…
Browse files Browse the repository at this point in the history
…elsat into valgur-cleaner-exceptions
  • Loading branch information
willemarcel committed May 29, 2017
2 parents e8e32d8 + 8b738d9 commit 55e9344
Show file tree
Hide file tree
Showing 3 changed files with 73 additions and 29 deletions.
57 changes: 39 additions & 18 deletions sentinelsat/sentinel.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def query(self, area=None, initial_date='NOW-1DAY', end_date='NOW', **keywords):
Parameters
----------
area : str
The area of interest formatted as a Well-Known Text string.
The area of interest formatted as a Well-Known Text string.
initial_date : str or datetime
Beginning of the time interval for sensing time. Defaults to 'NOW-1DAY'.
Either a Python datetime or a string in one of the following formats:
Expand All @@ -89,18 +89,18 @@ def query(self, area=None, initial_date='NOW-1DAY', end_date='NOW', **keywords):
end_date : str or datetime
Beginning of the time interval for sensing time. Defaults to 'NOW'.
See initial_date for allowed format.
Other Parameters
----------------
Additional keywords can be used to specify other query parameters, e.g. orbitnumber=70.
See https://scihub.copernicus.eu/twiki/do/view/SciHubUserGuide/3FullTextSearch
for a full list of accepted parameters.
Returns
-------
dict[string, dict]
Products returned by the query as a dictionary with the product ID as the key and
Products returned by the query as a dictionary with the product ID as the key and
the product's attributes (a dictionary) as the value.
"""
query = self.format_query(area, initial_date, end_date, **keywords)
Expand Down Expand Up @@ -138,7 +138,7 @@ def query_raw(self, query):
Returns
-------
dict[string, dict]
Products returned by the query as a dictionary with the product ID as the key and
Products returned by the query as a dictionary with the product ID as the key and
the product's attributes (a dictionary) as the value.
"""
response = self._load_query(query)
Expand Down Expand Up @@ -230,10 +230,10 @@ def to_geodataframe(products):

def get_product_odata(self, id, full=False):
"""Access SciHub OData API to get info about a product.
Returns a dict containing the id, title, size, md5sum, date, footprint and download url
of the product. The date field corresponds to the Start ContentDate value.
If ``full`` is set to True, then the full, detailed metadata of the product is returned
in addition to the above. For a mapping between the OpenSearch (Solr) and OData
attribute names see the following definition files:
Expand Down Expand Up @@ -343,8 +343,8 @@ def download(self, id, directory_path='.', checksum=False, check_existing=False,
def download_all(self, products, directory_path='.', max_attempts=10, checksum=False,
check_existing=False, **kwargs):
"""Download a list of products.
Takes a list of product IDs as input. This means that the return value of query() can be
Takes a list of product IDs as input. This means that the return value of query() can be
passed directly to this method.
File names on the server are used for the downloaded files, e.g.
Expand Down Expand Up @@ -412,15 +412,13 @@ class SentinelAPIError(Exception):
"""Invalid responses from SciHub.
"""

def __init__(self, http_status=None, code=None, msg=None, response_body=None):
self.http_status = http_status
self.code = code
def __init__(self, msg=None, response=None):
self.msg = msg
self.response_body = response_body
self.response = response

def __str__(self):
return '(HTTP status: {0}, code: {1}) {2}'.format(
self.http_status, self.code,
return 'HTTP status {0} {1}: {2}'.format(
self.response.status_code, self.response.reason,
('\n' if '\n' in self.msg else '') + self.msg)


Expand All @@ -436,8 +434,8 @@ def read_geojson(geojson_file):


def geojson_to_wkt(geojson_obj, feature_number=0):
"""Convert a GeoJSON object to Well-Known Text. Intended for use with OpenSearch queries.
"""Convert a GeoJSON object to Well-Known Text. Intended for use with OpenSearch queries.
In case of FeatureCollection, only one of the features is used (the first by default).
3D points are converted to 2D.
Expand Down Expand Up @@ -567,10 +565,33 @@ def _parse_odata_timestamp(in_date):
ms = timestamp % 1000
return datetime.utcfromtimestamp(seconds) + timedelta(milliseconds=ms)

def _check_scihub_response(response):
"""Check that the response from server has status code 2xx and that the response is valid JSON."""
try:
response.raise_for_status()
response.json()
except (requests.HTTPError, ValueError) as e:
msg = "API response not valid. JSON decoding failed."
try:
msg = response.headers['cause-message']
except:
if not response.text.strip().startswith('{'):
try:
h = html2text.HTML2Text()
h.ignore_images = True
h.ignore_anchors = True
msg = h.handle(response.text).strip()
except:
pass
api_error = SentinelAPIError(msg, response)
# Suppress "During handling of the above exception..." message
# See PEP 409
api_error.__cause__ = None
raise api_error

def _parse_opensearch_response(products):
"""Convert a query response to a dictionary.
The resulting dictionary structure is {<product id>: {<property>: <value>}}.
The property values are converted to their respective Python types unless `parse_values` is set to `False`.
"""
Expand Down
22 changes: 11 additions & 11 deletions tests/test_mod.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ def test_SentinelAPI_wrong_credentials():
)
with pytest.raises(SentinelAPIError) as excinfo:
api.query(**_small_query)
assert excinfo.value.http_status == 401
assert excinfo.value.response.status_code == 401


@my_vcr.use_cassette
Expand Down Expand Up @@ -345,6 +345,15 @@ def test_get_product_odata_full():
assert ret[k] == expected[k]


@my_vcr.use_cassette
@pytest.mark.scihub
def test_get_product_info_bad_key():
api = SentinelAPI(**_api_auth)

with pytest.raises(SentinelAPIError) as excinfo:
api.get_product_odata('invalid-xyz')
assert excinfo.value.msg == "InvalidKeyException : Invalid key (invalid-xyz) to access Products"

@pytest.mark.mock_api
def test_get_product_odata_scihub_down():
api = SentinelAPI("mock_user", "mock_password")
Expand All @@ -358,16 +367,7 @@ def test_get_product_odata_scihub_down():
api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')

rqst.get(
"https://scihub.copernicus.eu/apihub/odata/v1/Products('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')?$format=json",
text='{"error":{"code":null,"message":{"lang":"en","value":'
'"No Products found with key \'8df46c9e-a20c-43db-a19a-4240c2ed3b8b\' "}}}', status_code=500
)
with pytest.raises(SentinelAPIError) as excinfo:
api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')
assert excinfo.value.msg == "No Products found with key \'8df46c9e-a20c-43db-a19a-4240c2ed3b8b\' "

rqst.get(
"https://scihub.copernicus.eu/apihub/odata/v1/Products('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')?$format=json",
"https://scihub.copernicus.eu/apihub/odata/v1/Products('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')/?$format=json",
text="Mock SciHub is Down", status_code=200
)
with pytest.raises(SentinelAPIError) as excinfo:
Expand Down
23 changes: 23 additions & 0 deletions tests/vcr_cassettes/test_get_product_info_bad_key.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
interactions:
- request:
body: null
headers:
Accept: ['*/*']
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
User-Agent: [sentinelsat/0.9.1]
method: GET
uri: https://scihub.copernicus.eu/apihub/odata/v1/Products('invalid-xyz')/?$format=json
response:
body: {string: '{"error":{"code":null,"message":{"lang":"en","value":"Invalid
key (invalid-xyz) to access Products"}}}'}
headers:
Content-Length: ['102']
Content-Type: [application/json]
DataServiceVersion: ['1.0']
Date: ['Sun, 09 Apr 2017 22:44:50 GMT']
Pragma: [no-cache]
Server: [Apache-Coyote/1.1]
cause-message: ['InvalidKeyException : Invalid key (invalid-xyz) to access Products']
status: {code: 500, message: Internal Server Error}
version: 1

0 comments on commit 55e9344

Please sign in to comment.