Skip to content

Commit

Permalink
inital commit
Browse files Browse the repository at this point in the history
  • Loading branch information
gokulprathin8 authored Jun 13, 2023
0 parents commit 68c2f95
Show file tree
Hide file tree
Showing 15 changed files with 695 additions and 0 deletions.
69 changes: 69 additions & 0 deletions code/plot_global_avg_sea_level.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
import os
import numpy as np
from pyproj import Geod
from matplotlib import pyplot as plt
import earthaccess
import xarray as xr

auth = earthaccess.login(strategy="netrc", persist=True)

granules = []

for year in range(1990, 2019):
granule = earthaccess.granule_query().short_name("SEA_SURFACE_HEIGHT_ALT_GRIDS_L4_2SATS_5DAY_6THDEG_V_JPL2205").temporal(f"{year}-05", f"{year}-06").get(1)
if len(granule)>0:
granules.append(granule[0])
print(f"Total granules: {len(granules)}")

def ssl_area(lats):
# Define WGS84 as CRS:
geod = Geod(ellps='WGS84')
dx=1/12.0
c_area=lambda lat: geod.polygon_area_perimeter(np.r_[-dx,dx,dx,-dx], lat+np.r_[-dx,-dx,dx,dx])[0]
out=[]
for lat in lats:
out.append(c_area(lat))
return np.array(out)
ds = xr.open_mfdataset(earthaccess.open(granules), chunks={})

ssh_area = ssl_area(ds.Latitude.data).reshape(-1,1)
print(ssh_area.shape)

plt.rcParams["figure.figsize"] = (16,4)

# Create a blank image with the desired dimensions
blank_img = np.ones((100, 100, 3)) # Adjust the dimensions as needed

fig, axs = plt.subplots()
plt.grid(True)

def global_mean(SLA, **kwargs):
dout=((SLA*ssh_area).sum()/(SLA/SLA*ssh_area).sum())*1000
return dout

result = ds.SLA.groupby('Time').apply(global_mean)

plt.xlabel('Time (year)',fontsize=16)
plt.ylabel('Global Mean SLA (meter)',fontsize=12)
# axs.imshow(img, aspect='auto')
plt.grid(True)



#historic_ts=xr.open_dataset('https://opendap.jpl.nasa.gov/opendap/allData/homage/L4/gmsl/global_timeseries_measures.nc')

result.plot(ax=axs, color="orange", marker="o", label='satellite record')

#historic_ts['global_average_sea_level_change'].plot(ax=axs, label='Historical in-situ record', color="lightblue")

# Use the blank image as the background
#x0, x1 = axs.get_xlim()
#y0, y1 = axs.get_ylim()
# axs.imshow(blank_img, extent=[x0, x1, y0, y1], aspect='auto')

plt.legend()
home_dir = os.path.expanduser('~')
fig.figure.savefig(os.path.join(home_dir, 'global_sea_level_avg.png'))
exit(0)
print('end of workflow run')

25 changes: 25 additions & 0 deletions code/process.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
[{
"id" : "bs1l0r",
"name" : "sea_level_utils",
"description" : null,
"code" : "import earthaccess\nfrom pprint import pprint\n\nauth = earthaccess.login(strategy=\"netrc\", persist=True)\n\n# We'll get 4 collections that match with our keywords\ncollections = earthaccess.collection_query().keyword(\"SEA SURFACE HEIGHT\").cloud_hosted(True).get(4)\n\n# Let's print 2 collections\nfor collection in collections[0:2]:\n # pprint(collection.summary())\n print(pprint(collection.summary()), collection.abstract(), \"\\n\", collection[\"umm\"][\"DOI\"], \"\\n\\n\")\n \n\ngranules = earthaccess.granule_query().short_name(\"SEA_SURFACE_HEIGHT_ALT_GRIDS_L4_2SATS_5DAY_6THDEG_V_JPL2205\").temporal(\"2017-01\",\"2018-01\").get()\nprint(len(granules))\n\n# the collection is cloud hosted, but we can access it out of AWS with the regular HTTPS URL\ngranules[0].data_links(access=\"external\")\ngranules[0].data_links(access=\"direct\")",
"lang" : "python",
"owner" : "111111",
"confidential" : "FALSE"
},{
"id" : "qeb9yn",
"name" : "xarray_earth_access",
"description" : null,
"code" : "import os\nimport xarray as xr\nimport matplotlib.pyplot as plt\nimport earthaccess\n\ngranules = []\nhome_dir = os.path.expanduser('~')\n\nauth = earthaccess.login(strategy=\"netrc\", persist=True)\n\n# we just grab 1 granule from May for each year of the dataset\nfor year in range(1990, 2019):\n granule = earthaccess.granule_query().short_name(\"SEA_SURFACE_HEIGHT_ALT_GRIDS_L4_2SATS_5DAY_6THDEG_V_JPL2205\").temporal(f\"{year}-05\", f\"{year}-06\").get(1)\n if len(granule)>0:\n granules.append(granule[0])\nprint(f\"Total granules: {len(granules)}\")\n\nds = xr.open_mfdataset(earthaccess.open(granules), chunks={})\nplot = ds.SLA.where((ds.SLA>=0) & (ds.SLA < 10)).std('Time').plot(figsize=(14,6), x='Longitude', y='Latitude')\nhome_dir = os.path.expanduser('~')\nplot.figure.savefig(os.path.join(home_dir, 'sla_plot.png'))\n",
"lang" : "python",
"owner" : "111111",
"confidential" : "FALSE"
},{
"id" : "x7coy2",
"name" : "plot_global_avg_sea_level",
"description" : null,
"code" : "import os\nimport numpy as np\nfrom pyproj import Geod\nfrom matplotlib import pyplot as plt\nimport earthaccess\nimport xarray as xr\n\nauth = earthaccess.login(strategy=\"netrc\", persist=True)\n\ngranules = []\n\nfor year in range(1990, 2019):\n granule = earthaccess.granule_query().short_name(\"SEA_SURFACE_HEIGHT_ALT_GRIDS_L4_2SATS_5DAY_6THDEG_V_JPL2205\").temporal(f\"{year}-05\", f\"{year}-06\").get(1)\n if len(granule)>0:\n granules.append(granule[0])\nprint(f\"Total granules: {len(granules)}\")\n\ndef ssl_area(lats):\n # Define WGS84 as CRS:\n geod = Geod(ellps='WGS84')\n dx=1/12.0\n c_area=lambda lat: geod.polygon_area_perimeter(np.r_[-dx,dx,dx,-dx], lat+np.r_[-dx,-dx,dx,dx])[0]\n out=[]\n for lat in lats:\n out.append(c_area(lat))\n return np.array(out)\nds = xr.open_mfdataset(earthaccess.open(granules), chunks={})\n\nssh_area = ssl_area(ds.Latitude.data).reshape(-1,1)\nprint(ssh_area.shape)\n\nplt.rcParams[\"figure.figsize\"] = (16,4)\n\n# Create a blank image with the desired dimensions\nblank_img = np.ones((100, 100, 3)) # Adjust the dimensions as needed\n\nfig, axs = plt.subplots()\nplt.grid(True)\n\ndef global_mean(SLA, **kwargs):\n dout=((SLA*ssh_area).sum()/(SLA/SLA*ssh_area).sum())*1000\n return dout\n\nresult = ds.SLA.groupby('Time').apply(global_mean)\n\nplt.xlabel('Time (year)',fontsize=16)\nplt.ylabel('Global Mean SLA (meter)',fontsize=12)\n# axs.imshow(img, aspect='auto')\nplt.grid(True)\n\n\n\n#historic_ts=xr.open_dataset('https://opendap.jpl.nasa.gov/opendap/allData/homage/L4/gmsl/global_timeseries_measures.nc')\n\nresult.plot(ax=axs, color=\"orange\", marker=\"o\", label='satellite record')\n\n#historic_ts['global_average_sea_level_change'].plot(ax=axs, label='Historical in-situ record', color=\"lightblue\")\n\n# Use the blank image as the background\n#x0, x1 = axs.get_xlim()\n#y0, y1 = axs.get_ylim()\n# axs.imshow(blank_img, extent=[x0, x1, y0, y1], aspect='auto')\n\nplt.legend()\nhome_dir = os.path.expanduser('~')\nfig.figure.savefig(os.path.join(home_dir, 'global_sea_level_avg.png'))\nexit(0)\nprint('end of workflow run')\n",
"lang" : "python",
"owner" : "111111",
"confidential" : "FALSE"
}]
20 changes: 20 additions & 0 deletions code/sea_level_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
import earthaccess
from pprint import pprint

auth = earthaccess.login(strategy="netrc", persist=True)

# We'll get 4 collections that match with our keywords
collections = earthaccess.collection_query().keyword("SEA SURFACE HEIGHT").cloud_hosted(True).get(4)

# Let's print 2 collections
for collection in collections[0:2]:
# pprint(collection.summary())
print(pprint(collection.summary()), collection.abstract(), "\n", collection["umm"]["DOI"], "\n\n")


granules = earthaccess.granule_query().short_name("SEA_SURFACE_HEIGHT_ALT_GRIDS_L4_2SATS_5DAY_6THDEG_V_JPL2205").temporal("2017-01","2018-01").get()
print(len(granules))

# the collection is cloud hosted, but we can access it out of AWS with the regular HTTPS URL
granules[0].data_links(access="external")
granules[0].data_links(access="direct")
22 changes: 22 additions & 0 deletions code/xarray_earth_access.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import os
import xarray as xr
import matplotlib.pyplot as plt
import earthaccess

granules = []
home_dir = os.path.expanduser('~')

auth = earthaccess.login(strategy="netrc", persist=True)

# we just grab 1 granule from May for each year of the dataset
for year in range(1990, 2019):
granule = earthaccess.granule_query().short_name("SEA_SURFACE_HEIGHT_ALT_GRIDS_L4_2SATS_5DAY_6THDEG_V_JPL2205").temporal(f"{year}-05", f"{year}-06").get(1)
if len(granule)>0:
granules.append(granule[0])
print(f"Total granules: {len(granules)}")

ds = xr.open_mfdataset(earthaccess.open(granules), chunks={})
plot = ds.SLA.where((ds.SLA>=0) & (ds.SLA < 10)).std('Time').plot(figsize=(14,6), x='Longitude', y='Latitude')
home_dir = os.path.expanduser('~')
plot.figure.savefig(os.path.join(home_dir, 'sla_plot.png'))

31 changes: 31 additions & 0 deletions history/7eQhagsEQ4Kh3IhUEy.json

Large diffs are not rendered by default.

61 changes: 61 additions & 0 deletions history/96tqhttgag3b6wbi1fgb.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
[{
"history_id" : "MUTApn76ZZng4WEhJX",
"history_input" : "bs1l0r-A8HlU;qeb9yn-I560u;x7coy2-FZrV7;",
"history_output" : "58nbhrn6sen;i607mz6qh1a;ghhj2iygcqz;",
"history_begin_time" : 1686645385612,
"history_end_time" : 1686645438009,
"history_notes" : null,
"history_process" : "96tqhttgag3b6wbi1fgb",
"host_id" : "100001;",
"indicator" : "Done"
},{
"history_id" : "N7VHW5pjtB7SoIS8vt",
"history_input" : "bs1l0r-A8HlU;qeb9yn-I560u;x7coy2-FZrV7;",
"history_output" : "ruilg4dza9x;a5hp21gwurw;7ww57jgdzhh;",
"history_begin_time" : 1686645222900,
"history_end_time" : 1686645275078,
"history_notes" : null,
"history_process" : "96tqhttgag3b6wbi1fgb",
"host_id" : "100001;",
"indicator" : "Failed"
},{
"history_id" : "7eQhagsEQ4Kh3IhUEy",
"history_input" : "bs1l0r-A8HlU;qeb9yn-I560u;x7coy2-u0gwX;",
"history_output" : "1jl1jynpq7f;f0mvyirp48r;zc0ksz22c14;",
"history_begin_time" : 1686641676846,
"history_end_time" : 1686641725403,
"history_notes" : null,
"history_process" : "96tqhttgag3b6wbi1fgb",
"host_id" : "100001;",
"indicator" : "Done"
},{
"history_id" : "bTSiZpDI4OAG58L4uE",
"history_input" : "bs1l0r-A8HlU;qeb9yn-I560u;x7coy2-u0gwX;",
"history_output" : "as43k46i7l1;0mzf8tvid4b;k2ivcq8whph;",
"history_begin_time" : 1686637742040,
"history_end_time" : 1686637803323,
"history_notes" : null,
"history_process" : "96tqhttgag3b6wbi1fgb",
"host_id" : "100001;",
"indicator" : "Failed"
},{
"history_id" : "PNfWleHNQ5iEgcnExT",
"history_input" : "bs1l0r-A8HlU;qeb9yn-I560u;x7coy2-u0gwX;",
"history_output" : "yuk599baysa;107lcfedv97;wlqgqbknv3c;",
"history_begin_time" : 1686637661075,
"history_end_time" : 1686637675376,
"history_notes" : null,
"history_process" : "96tqhttgag3b6wbi1fgb",
"host_id" : "100001;",
"indicator" : "Failed"
},{
"history_id" : "dRxz3L2r1yNzr3LXN1",
"history_input" : "bs1l0r-A8HlU;qeb9yn-I560u;",
"history_output" : "hht4iw5pb4v;nctm6mld8g8;",
"history_begin_time" : 1686637223483,
"history_end_time" : 1686637223999,
"history_notes" : null,
"history_process" : "96tqhttgag3b6wbi1fgb",
"host_id" : "100001;",
"indicator" : "Done"
}]
31 changes: 31 additions & 0 deletions history/MUTApn76ZZng4WEhJX.json

Large diffs are not rendered by default.

31 changes: 31 additions & 0 deletions history/N7VHW5pjtB7SoIS8vt.json

Large diffs are not rendered by default.

31 changes: 31 additions & 0 deletions history/PNfWleHNQ5iEgcnExT.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
[{
"history_id" : "yuk599baysa",
"history_input" : "import earthaccess\nfrom pprint import pprint\n\nauth = earthaccess.login(strategy=\"netrc\", persist=True)\n\n# We'll get 4 collections that match with our keywords\ncollections = earthaccess.collection_query().keyword(\"SEA SURFACE HEIGHT\").cloud_hosted(True).get(4)\n\n# Let's print 2 collections\nfor collection in collections[0:2]:\n # pprint(collection.summary())\n print(pprint(collection.summary()), collection.abstract(), \"\\n\", collection[\"umm\"][\"DOI\"], \"\\n\\n\")\n \n\ngranules = earthaccess.granule_query().short_name(\"SEA_SURFACE_HEIGHT_ALT_GRIDS_L4_2SATS_5DAY_6THDEG_V_JPL2205\").temporal(\"2017-01\",\"2018-01\").get()\nprint(len(granules))\n\n# the collection is cloud hosted, but we can access it out of AWS with the regular HTTPS URL\ngranules[0].data_links(access=\"external\")\ngranules[0].data_links(access=\"direct\")",
"history_output" : "You're now authenticated with NASA Earthdata Login\nUsing token with expiration date: 08/07/2023\nUsing .netrc file for EDL\n{'cloud-info': {'Region': 'us-west-2',\n 'S3BucketAndObjectPrefixNames': ['podaac-ops-cumulus-public/SEA_SURFACE_HEIGHT_ALT_GRIDS_L4_2SATS_5DAY_6THDEG_V_JPL2205/',\n 'podaac-ops-cumulus-protected/SEA_SURFACE_HEIGHT_ALT_GRIDS_L4_2SATS_5DAY_6THDEG_V_JPL2205/'],\n 'S3CredentialsAPIDocumentationURL': 'https://archive.podaac.earthdata.nasa.gov/s3credentialsREADME',\n 'S3CredentialsAPIEndpoint': 'https://archive.podaac.earthdata.nasa.gov/s3credentials'},\n 'concept-id': 'C2270392799-POCLOUD',\n 'file-type': \"[{'Format': 'netCDF-4', 'FormatType': 'Native', \"\n \"'AverageFileSize': 9.7, 'AverageFileSizeUnit': 'MB'}]\",\n 'get-data': ['https://cmr.earthdata.nasa.gov/virtual-directory/collections/C2270392799-POCLOUD',\n 'https://search.earthdata.nasa.gov/search/granules?p=C2270392799-POCLOUD'],\n 'short-name': 'SEA_SURFACE_HEIGHT_ALT_GRIDS_L4_2SATS_5DAY_6THDEG_V_JPL2205',\n 'version': '2205'}\nNone This dataset provides gridded Sea Surface Height Anomalies (SSHA) above a mean sea surface, on a 1/6th degree grid every 5 days. It contains the fully corrected heights, with a delay of up to 3 months. The gridded data are derived from the along-track SSHA data of TOPEX/Poseidon, Jason-1, Jason-2, Jason-3 and Jason-CS (Sentinel-6) as reference data from the level 2 along-track data found at https://podaac.jpl.nasa.gov/dataset/MERGED_TP_J1_OSTM_OST_CYCLES_V51, plus ERS-1, ERS-2, Envisat, SARAL-AltiKa, CryoSat-2, Sentinel-3A, Sentinel-3B, depending on the date, from the RADS database. The date given in the grid files is the center of the 5-day window. The grids were produced from altimeter data using Kriging interpolation, which gives best linear prediction based upon prior knowledge of covariance. \n {'DOI': '10.5067/SLREF-CDRV3', 'Authority': 'https://doi.org'} \n{'cloud-info': {'Region': 'us-west-2',\n 'S3BucketAndObjectPrefixNames': ['nsidc-cumulus-prod-protected/ATLAS/ATL21/002',\n 'nsidc-cumulus-prod-public/ATLAS/ATL21/002'],\n 'S3CredentialsAPIDocumentationURL': 'https://data.nsidc.earthdatacloud.nasa.gov/s3credentialsREADME',\n 'S3CredentialsAPIEndpoint': 'https://data.nsidc.earthdatacloud.nasa.gov/s3credentials'},\n 'concept-id': 'C2153577500-NSIDC_CPRD',\n 'file-type': \"[{'FormatType': 'Native', 'Format': 'HDF5', \"\n \"'FormatDescription': 'HTTPS'}]\",\n 'get-data': ['https://search.earthdata.nasa.gov/search?q=ATL21+V002'],\n 'short-name': 'ATL21',\n 'version': '002'}\nNone This data set contains daily and monthly gridded polar sea surface height (SSH) anomalies, derived from the along-track ATLAS/ICESat-2 L3A Sea Ice Height product (ATL10, V5). The ATL10 product identifies leads in sea ice and establishes a reference sea surface used to estimate SSH in 10 km along-track segments. ATL21 aggregates the ATL10 along-track SSH estimates and computes daily and monthly gridded SSH anomaly in NSIDC Polar Stereographic Northern and Southern Hemisphere 25 km grids. \n {'DOI': '10.5067/ATLAS/ATL21.002'} \n73\n",
"history_begin_time" : 1686637661720,
"history_end_time" : 1686637670371,
"history_notes" : null,
"history_process" : "bs1l0r",
"host_id" : "100001",
"indicator" : "Done"
},{
"history_id" : "107lcfedv97",
"history_input" : "import os\nimport xarray as xr\nimport matplotlib.pyplot as plt\n\ngranules = []\nhome_dir = os.path.expanduser('~')\n\n# we just grab 1 granule from May for each year of the dataset\nfor year in range(1990, 2019):\n granule = earthaccess.granule_query().short_name(\"SEA_SURFACE_HEIGHT_ALT_GRIDS_L4_2SATS_5DAY_6THDEG_V_JPL2205\").temporal(f\"{year}-05\", f\"{year}-06\").get(1)\n if len(granule)>0:\n granules.append(granule[0])\nprint(f\"Total granules: {len(granules)}\")\n\nds = xr.open_mfdataset(earthaccess.open(granules), chunks={})\nplot = ds.SLA.where((ds.SLA>=0) & (ds.SLA < 10)).std('Time').plot(figsize=(14,6), x='Longitude', y='Latitude')\nhome_dir = os.path.expanduser('~')\nplot.figure.savefig(os.path.join(home_dir, 'sla_plot.png'))\n",
"history_output" : "Traceback (most recent call last):\n File \"/home/jovyan/gw-workspace/107lcfedv97/xarray_earth_access.py\", line 10, in <module>\n granule = earthaccess.granule_query().short_name(\"SEA_SURFACE_HEIGHT_ALT_GRIDS_L4_2SATS_5DAY_6THDEG_V_JPL2205\").temporal(f\"{year}-05\", f\"{year}-06\").get(1)\nNameError: name 'earthaccess' is not defined\n",
"history_begin_time" : 1686637671770,
"history_end_time" : 1686637673468,
"history_notes" : null,
"history_process" : "qeb9yn",
"host_id" : "100001",
"indicator" : "Failed"
},{
"history_id" : "wlqgqbknv3c",
"history_input" : "import os\nimport numpy as np\nfrom pyproj import Geod\nfrom matplotlib import pyplot as plt\n\ndef ssl_area(lats):\n \"\"\"\n Calculate the area associated with a 1/6 by 1/6 degree box at latitude specified in 'lats'.\n \n Parameter\n ==========\n lats: a list or numpy array of size N the latitudes of interest. \n \n Return\n =======\n out: Array (N) area values (unit: m^2)\n \"\"\"\n # Define WGS84 as CRS:\n geod = Geod(ellps='WGS84')\n dx=1/12.0\n # TODO: explain this\n c_area=lambda lat: geod.polygon_area_perimeter(np.r_[-dx,dx,dx,-dx], lat+np.r_[-dx,-dx,dx,dx])[0]\n out=[]\n for lat in lats:\n out.append(c_area(lat))\n return np.array(out)\n# note: they rotated the data in the last release, this operation used to be (1,-1)\nssh_area = ssl_area(ds.Latitude.data).reshape(-1,1)\nprint(ssh_area.shape)\n\nplt.rcParams[\"figure.figsize\"] = (16,4)\n\nimg = plt.imread(\"underwater.jpg\")\n\nfig, axs = plt.subplots()\nplt.grid(True)\n\ndef global_mean(SLA, **kwargs):\n dout=((SLA*ssh_area).sum()/(SLA/SLA*ssh_area).sum())*1000\n return dout\n\nresult = ds.SLA.groupby('Time').apply(global_mean)\n\nplt.xlabel('Time (year)',fontsize=16)\nplt.ylabel('Global Mean SLA (meter)',fontsize=12)\n# axs.imshow(img, aspect='auto')\nplt.grid(True)\n\nhistoric_ts=xr.open_dataset('https://opendap.jpl.nasa.gov/opendap/allData/homage/L4/gmsl/global_timeseries_measures.nc')\n\nresult.plot(ax=axs, color=\"orange\", marker=\"o\", label='satellite record')\n\nhistoric_ts['global_average_sea_level_change'].plot(ax=axs, label='Historical in-situ record', color=\"lightblue\")\n\n\nx0,x1 = axs.get_xlim()\ny0,y1 = axs.get_ylim()\naxs.imshow(img, extent=[x0, x1, y0, y1], aspect='auto')\n\n\nplt.legend()\nhome_dir = os.path.expanduser('~')\nplt.figure.savefig(os.path.join(home_dir, 'global_sea_level_avg.png'))",
"history_output" : "Traceback (most recent call last):\n File \"/home/jovyan/gw-workspace/wlqgqbknv3c/plot_global_avg_sea_level.py\", line 28, in <module>\n ssh_area = ssl_area(ds.Latitude.data).reshape(-1,1)\nNameError: name 'ds' is not defined\n",
"history_begin_time" : 1686637675376,
"history_end_time" : 1686637676741,
"history_notes" : null,
"history_process" : "x7coy2",
"host_id" : "100001",
"indicator" : "Failed"
}]
Loading

0 comments on commit 68c2f95

Please sign in to comment.