Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Bump lodash from 4.17.10 to 4.17.21 in /Website #16

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 23 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1,23 @@
DS_Store
*/.DS_Store
East and Horn of Africa, .docx
Finding-the-Nexus/.DS_Store
Finding-the-Nexus/Bruno_parser/
Finding-the-Nexus/FInding the nexus between climate change, conflict and forced displacement - Workshop Agenda 2019.pdf
Finding-the-Nexus/FindTheNexusDeepLearning/Project Jetson - U. of Essex.pdf
Finding-the-Nexus/computer-vision/Gupta_Creating_xBD_A_Dataset_for_Assessing_Building_Damage_from_Satellite_CVPRW_2019_paper.pdf
Finding-the-Nexus/computer-vision/NDVI-Index-for-Somalia-2002-2019.pdf
Finding-the-Nexus/computer-vision/Nachmany_Detecting_Roads_from_Satellite_Imagery_in_the_Developing_World_CVPRW_2019_paper.pdf
Finding-the-Nexus/computer-vision/Nazir_Tiny-Inception-ResNet-v2_Using_Deep_Learning_for_Eliminating_Bonded_Labors_of_Brick_CVPRW_2019_paper.pdf
Finding-the-Nexus/computer-vision/PERS_2002_SVI.pdf
Finding-the-Nexus/computer-vision/related-work/UNOSAT_UNGP_machine learning satelite.pdf
Intro to PA in forced displacement + Project Jetson.pdf
KEN_Rivers/
Sahel Survey Results.xlsx
Slides_KatherineHoffman.pdf
SomeInnovationProjects.pdf
UNGP_ Modeling Framework - Predictive Analytics.pdf
Website/node_modules/.DS_Store
dataviz-somalia-prmn-master/
remote_sensing/.DS_Store
wikiimages/Demo_Overview.png
DS_Store
*/.DS_Store
Binary file added East and Horn of Africa, .docx
Binary file not shown.
Binary file added Finding-the-Nexus/.DS_Store
Binary file not shown.
202 changes: 202 additions & 0 deletions Finding-the-Nexus/Bruno_parser/example_m2m.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,202 @@
# =============================================================================
# USGS/EROS Inventory Service Example
# Python - JSON API
#
# Script Last Modified: 6/17/2020
# Note: This example does not include any error handling!
# Any request can throw an error, which can be found in the errorCode proprty of
# the response (errorCode, errorMessage, and data properies are included in all responses).
# These types of checks could be done by writing a wrapper similiar to the sendRequest function below
# Usage: python download_data.py -u username -p password
# =============================================================================

import json
import requests
import sys
import time
import argparse

# send http request
def sendRequest(url, data, apiKey = None):
json_data = json.dumps(data)

if apiKey == None:
response = requests.post(url, json_data)
else:
headers = {'X-Auth-Token': apiKey}
response = requests.post(url, json_data, headers = headers)

try:
httpStatusCode = response.status_code
if response == None:
print("No output from service")
sys.exit()
output = json.loads(response.text)
if output['errorCode'] != None:
print(output['errorCode'], "- ", output['errorMessage'])
sys.exit()
if httpStatusCode == 404:
print("404 Not Found")
sys.exit()
elif httpStatusCode == 401:
print("401 Unauthorized")
sys.exit()
elif httpStatusCode == 400:
print("Error Code", httpStatusCode)
sys.exit()
except Exception as e:
response.close()
print(e)
sys.exit()
response.close()

return output['data']


if __name__ == '__main__':
#NOTE :: Passing credentials over a command line arguement is not considered secure
# and is used only for the purpose of being example - credential parameters
# should be gathered in a more secure way for production usage
#Define the command line arguements

# user input
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--username', required=True, help='Username')
parser.add_argument('-p', '--password', required=True, help='Password')

args = parser.parse_args()

username = args.username
password = args.password

print("\nRunning Scripts...\n")

serviceUrl = "https://m2m.cr.usgs.gov/api/api/json/stable/"

# login
payload = {'username' : username, 'password' : password}

apiKey = sendRequest(serviceUrl + "login", payload)

print("API Key: " + apiKey + "\n")

datasetName = "gls_all"

spatialFilter = {'filterType' : "mbr",
'lowerLeft' : {'latitude' : 30, 'longitude' : -120},
'upperRight' : { 'latitude' : 40, 'longitude' : -140}}

temporalFilter = {'start' : '2000-12-10', 'end' : '2005-12-10'}

payload = {'datasetName' : datasetName,
'spatialFilter' : spatialFilter,
'temporalFilter' : temporalFilter}

print("Searching datasets...\n")
datasets = sendRequest(serviceUrl + "dataset-search", payload, apiKey)

print("Found ", len(datasets), " datasets\n")

# download datasets
for dataset in datasets:

# Because I've ran this before I know that I want GLS_ALL, I don't want to download anything I don't
# want so we will skip any other datasets that might be found, logging it incase I want to look into
# downloading that data in the future.
if dataset['datasetAlias'] != datasetName:
print("Found dataset " + dataset['collectionName'] + " but skipping it.\n")
continue

# I don't want to limit my results, but using the dataset-filters request, you can
# find additional filters

acquisitionFilter = {"end": "2005-12-10",
"start": "2000-12-10" }

payload = {'datasetName' : dataset['datasetAlias'],
'maxResults' : 2,
'startingNumber' : 1,
'sceneFilter' : {
'spatialFilter' : spatialFilter,
'acquisitionFilter' : acquisitionFilter}}

# Now I need to run a scene search to find data to download
print("Searching scenes...\n\n")

scenes = sendRequest(serviceUrl + "scene-search", payload, apiKey)

# Did we find anything?
if scenes['recordsReturned'] > 0:
# Aggregate a list of scene ids
sceneIds = []
for result in scenes['results']:
# Add this scene to the list I would like to download
sceneIds.append(result['entityId'])

# Find the download options for these scenes
# NOTE :: Remember the scene list cannot exceed 50,000 items!
payload = {'datasetName' : dataset['datasetAlias'], 'entityIds' : sceneIds}

downloadOptions = sendRequest(serviceUrl + "download-options", payload, apiKey)

# Aggregate a list of available products
downloads = []
for product in downloadOptions:
# Make sure the product is available for this scene
if product['available'] == True:
downloads.append({'entityId' : product['entityId'],
'productId' : product['id']})

# Did we find products?
if downloads:
requestedDownloadsCount = len(downloads)
# set a label for the download request
label = "download-sample"
payload = {'downloads' : downloads,
'label' : label}
# Call the download to get the direct download urls
requestResults = sendRequest(serviceUrl + "download-request", payload, apiKey)

# PreparingDownloads has a valid link that can be used but data may not be immediately available
# Call the download-retrieve method to get download that is available for immediate download
if requestResults['preparingDownloads'] != None and len(requestResults['preparingDownloads']) > 0:
payload = {'label' : label}
moreDownloadUrls = sendRequest(serviceUrl + "download-retrieve", payload, apiKey)

downloadIds = []

for download in moreDownloadUrls['available']:
downloadIds.append(download['downloadId'])
print("DOWNLOAD: " + download['url'])

for download in moreDownloadUrls['requested']:
downloadIds.append(download['downloadId'])
print("DOWNLOAD: " + download['url'])

# Didn't get all of the reuested downloads, call the download-retrieve method again probably after 30 seconds
while len(downloadIds) < requestedDownloadsCount:
preparingDownloads = requestedDownloadsCount - len(downloadIds)
print("\n", preparingDownloads, "downloads are not available. Waiting for 30 seconds.\n")
time.sleep(30)
print("Trying to retrieve data\n")
moreDownloadUrls = sendRequest(serviceUrl + "download-retrieve", payload, apiKey)
for download in moreDownloadUrls['available']:
if download['downloadId'] not in downloadIds:
downloadIds.append(download['downloadId'])
print("DOWNLOAD: " + download['url'])

else:
# Get all available downloads
for download in requestResults['availableDownloads']:
# TODO :: Implement a downloading routine
print("DOWNLOAD: " + download['url'])
print("\nAll downloads are available to download.\n")
else:
print("Search found no results.\n")

# Logout so the API Key cannot be used anymore
endpoint = "logout"
if sendRequest(serviceUrl + endpoint, None, apiKey) == None:
print("Logged Out\n\n")
else:
print("Logout Failed\n\n")
Loading