fixing results from process.py
[JPSSData.git] / bboxsearch.py
blobe172c8d799071d9314ef40f30c84373a37a78157
1 ## Search/Download - sandbox
2 from cmr import CollectionQuery, GranuleQuery
3 import json
4 import requests
5 import urlparse
6 import os
7 import sys
9 api = GranuleQuery()
10 fire = GranuleQuery()
12 #MOD14: bounding box = Colorado (gps coord sorted lat,lon; counterclockwise)
13 MOD14granules = api.parameters(
14 short_name="MOD14",
15 platform="Terra",
16 downloadable=True,
17 polygon=[(-109.0507527,40.99898), (-109.0698568,37.0124375), (-102.0868788,36.9799819),(-102.0560592,40.999126), (-109.0507527,40.99898)],
18 temporal=("2017-01-01T00:00:00Z", "2017-01-07T00:00:00Z") #time start,end
20 print "MOD14 gets %s hits in this range" % MOD14granules.hits()
21 MOD14granules = api.get(10)
23 #MOD03: geoloc data for MOD14
24 MOD03granules = api.parameters(
25 short_name="MOD03",
26 platform="Terra",
27 downloadable=True,
28 polygon=[(-109.0507527,40.99898), (-109.0698568,37.0124375), (-102.0868788,36.9799819),(-102.0560592,40.999126), (-109.0507527,40.99898)],
29 temporal=("2017-01-01T00:00:00Z", "2017-01-07T00:00:00Z") #time start,end
31 print "MOD03 gets %s hits in this range" % MOD03granules.hits()
32 MOD03granules = api.get(10)
34 #VNP14: fire data, res 750m
35 VNP14granules = fire.parameters(
36 short_name="VNP14",
37 downloadable=True,
38 polygon=[(-109.0507527,40.99898), (-109.0698568,37.0124375), (-102.0868788,36.9799819),(-102.0560592,40.999126), (-109.0507527,40.99898)],
39 temporal=("2017-01-01T00:00:00Z", "2017-01-07T00:00:00Z") #time start,end
41 print "VNP14 gets %s hits in this range" % VNP14granules.hits()
42 VNP14granules = fire.get(10)
44 #VNP14IMGTDL_NRT: granules with resolution 375m
45 VNP14hiresgranules = fire.parameters(
46 short_name="VNP14IMGTDL_NRT",
47 downloadable=True,
48 polygon=[(-109.0507527,40.99898), (-109.0698568,37.0124375), (-102.0868788,36.9799819),(-102.0560592,40.999126), (-109.0507527,40.99898)],
49 temporal=("2017-01-01T00:00:00Z", "2017-01-07T00:00:00Z") #time start,end
51 print "VNP14(hi-res) gets %s hits in this range" % VNP14hiresgranules.hits()
52 VNP14hiresgranules = fire.get(10)
54 #VNP03MODLL: geoloc data for VNP14
55 VNP03granules = fire.parameters(
56 short_name="VNP03MODLL",
57 downloadable=True,
58 polygon=[(-109.0507527,40.99898), (-109.0698568,37.0124375), (-102.0868788,36.9799819),(-102.0560592,40.999126), (-109.0507527,40.99898)],
59 temporal=("2017-01-01T00:00:00Z", "2017-01-07T00:00:00Z") #time start,end
61 print "VNP03 gets %s hits in this range" % VNP03granules.hits()
62 VNP03granules = fire.get(10)
64 data = []
65 def download(granules):
66 for granule in granules:
67 url = granule['links'][0]['href']
68 filename=os.path.basename(urlparse.urlsplit(url).path)
70 # to store as object in memory (maybe not completely downloaded until accessed?)
71 # with requests.Session() as s:
72 # data.append(s.get(url))
74 # download - a minimal code without various error checking and corrective actions
75 # see wrfxpy/src/ingest/downloader.py
76 try:
77 chunk_size = 1024*1024
78 s = 0
79 r = requests.get(url, stream=True)
80 if r.status_code == 200:
81 content_size = int(r.headers['Content-Length'])
82 print 'downloading %s as %s size %sB' % (url, filename, content_size)
83 with open(filename, 'wb') as f:
84 for chunk in r.iter_content(chunk_size):
85 f.write(chunk)
86 s = s + len(chunk)
87 print('downloaded %sB of %sB' % (s, content_size))
88 else:
89 print 'cannot connect to %s' % url
90 print 'web request status code %s' % r.status_code
91 print 'Make sure you have file ~/.netrc permission 600 with the content'
92 print 'machine urs.earthdata.nasa.gov\nlogin yourusername\npassword yourpassword'
93 sys.exit(1)
94 except Exception as e:
95 print 'download failed with error %s' % e
97 '''
98 BE CAREFUL!! - the script below triggers automatic download of VERY
99 LARGE .hdf files!
100 '''
102 #MOD14 = download(MOD14granules)
103 MOD03 = download(MOD03granules)
104 VNP14 = download(VNP14granules)
105 VNP14hires = download(VNP14hiresgranules)
106 VNP03 = download(VNP03granules)