# Import standard python packages
import sys
import subprocess
from IPython.display import display, JSON, HTML
import json
import pandas as pd
from pathlib import Path
import seaborn as sns
# import polars as pl
# Ask GRASS where its Python packages are and add them to the path
sys.path.append(
subprocess.check_output(["grass", "--config", "python_path"], text=True).strip()
)
# Import the GRASS python packages we need
import grass.script as gs
import grass.jupyter as gjIn this tutorial we will learn about the t.stac suite of tools for GRASS.
- t.stac.catalog - is a tool for exploring SpatioTemporal Asset Catalogs metadata from a STAC API.
- t.stac.collection - is a tool for exploring SpatioTemporal Asset Catalog (STAC) collection metadata.
- t.stac.item - is a tool for exploring and importing SpatioTemporal Asset Catalog item metadata and assets into GRASS.
Introduction
SpatioTemporal Asset Catalog (STAC)…
The tutorial assumes you already have GRASS installed on your machine. If not please download and install GRASS before continuing the tutorial.
Getting Started
Start GRASS Session
Create a GRASS project
gs.create_project("stac", epsg="32119", overwrite=True)and start a GRASS session.
session = gj.init("stac")Install Addon
The t.stac addons require the following Python packages.
You can install them using pip or the Python package manager of your choice.
pip install pystac pystac_client tqdmNow you can install the t.stac extensions.
gs.run_command("g.extension", extension="t.stac")Define computational region
gs.run_command(
"g.region", n=236687, s=210391, e=616042, w=598921, nsres=10, ewres=10, flags="pa"
)STAC API
stac_url = "https://earth-search.aws.element84.com/v1/"Searching STAC Catalogs
catalogs_json = gs.parse_command(
"t.stac.catalog", url=stac_url, format="json"
)
df_catalogs = pd.json_normalize(catalogs_json)
df_catalogs.head()!t.stac.catalog url={stac_url} format=plain -bCollection Metadata
items_json = gs.parse_command(
"t.stac.item",
url=stac_url,
collection_id="sentinel-2-l2a",
flags="m",
format="json",
)
df_items = pd.json_normalize(items_json, max_level=0)
display(df_items.T.head(5))!t.stac.item url={stac_url} collection_id="sentinel-2-l2a" format=plain -mQuery Items by datetime and display table
items_json = gs.parse_command(
"t.stac.item",
url=stac_url,
collection_id="sentinel-2-l2a",
flags="i",
datetime="2024-04-01/2024-09-30",
format="json",
)
num_items = len(df_items)
df_items = pd.json_normalize(items_json, max_level=0)
display(df_items.head(5))!t.stac.item url={stac_url} collection_id="sentinel-2-l2a" datetime="2024-04-01/2024-09-30" format=plain -iLet’s look closer at the properties of item id S2A_17SPV_20240919_0_L2A.
df_item = df_items.query("id == 'S2A_17SPV_20240919_0_L2A'")
df_properties = df_item["properties"].apply(pd.Series)
df_item = pd.concat([df_item, df_properties], axis=1)
df_item = df_item.drop("properties", axis=1)
display(df_item.T)Let’s look at the items properties
df_item = df_items.query("id == 'S2A_17SPV_20240919_0_L2A'")
df_properties = df_item["properties"].apply(pd.Series)
display(df_properties.T)Now we let’s look at the items assets.
df_item = df_items.query("id == 'S2A_17SPV_20240919_0_L2A'")
df_assets = df_item["assets"].apply(pd.Series)
display(df_assets.T.reset_index())Assets
Each STAC Item may contain multiple asset types (e.g. red, nir, thumbnail). The snippet below enumerates assets across items, shows per-item asset rows, and prints simple summaries.
stac_query = {"eo:cloud_cover": {"lt": 10}}
assets_json = gs.parse_command(
"t.stac.item",
url=stac_url,
collection_id="sentinel-2-l2a",
flags="a",
datetime="2024-04-01/2024-09-30",
asset_keys="red,nir",
query=json.dumps(stac_query),
format="json",
)
df_items_assets = pd.json_normalize(assets_json, max_level=0)
display(df_items_assets.head(10))display(assets_json)EO Parameters
EO Parameters
STAC item properties expose a number of Earth Observation (EO) parameters that are useful for filtering and selecting scenes. Common properties you will see in Sentinel-2 and other EO STAC records include:
datetime— acquisition time (ISO 8601)eo:cloud_cover— percent cloud cover (0–100)eo:gsd— ground sample distance / native resolution (meters)view:sun_azimuth,view:sun_elevation— sun geometryproj:epsg— EPSG code (when the projection extension is present)- collection / platform / instrument fields — identify sensor and mission
How to use them: - Use the datetime argument on t.stac.item to constrain time ranges (e.g. 2024-04-01/2024-09-30). - Use the query argument to pass a JSON object of property filters. Comparison operators follow the STAC filtering convention (e.g. lt, lte, gt, gte).
Example query (Python): stac_query = {“eo:cloud_cover”: {“lt”: 10}, “eo:gsd”: {“lte”: 10}, “view:sun_elevation”: {“gte”: 30}}
Pass this to t.stac.item (serialize with json.dumps) to fetch only low-cloud, high-sun-elevation, high-resolution items. Combine asset_keys to limit returned assets (e.g. asset_keys="red,nir"), and use flags (-a, -m, -i) to control output detail (assets, metadata, item list).
Tip: Always inspect a few returned items (format="json" or flags=“m” / flags=“a”`) to see the exact property names used by the catalog you query — naming can vary between providers and extension support.
Create Metadata Vector
stac_query = {"eo:cloud_cover": {"lt": 10}}
assets_json = gs.parse_command(
"t.stac.item",
url=stac_url,
collection_id="sentinel-2-l2a",
flags="a",
datetime="2024-04-01/2024-09-30",
asset_keys="red,nir",
query=json.dumps(stac_query),
items_vector="sentinel_2_items",
format="json",
)
df_items_assets = pd.json_normalize(assets_json, max_level=0)
display(df_items_assets.head(5))View the items with iPyleaflet
View STAC items with ipyleaflet (install with pip install ipyleaflet if needed) from ipyleaflet use gj.Interactive and add the sentinel_2_items bounding boxes.
import random
m = gj.InteractiveMap(use_region=False)
m.add_vector(
"sentinel_2_items",
style={"opacity": 1, "dashArray": "9", "fillOpacity": 0.1, "weight": 1},
hover_style={"color": "white", "dashArray": "0", "fillOpacity": 0.5},
style_callback=lambda f: {
"color": "black",
"fillColor": random.choice(["red", "yellow", "green", "orange"]),
}
)
display(m.show())Download and Import Rasters
Register STRDS
import grass.script as gs
import grass.jupyter as gj
stac_query = {"eo:cloud_cover": {"lt": 10}}
items_assets_json = gs.parse_command(
"t.stac.item",
url=stac_url,
collection_id="sentinel-2-l2a",
datetime="2024-04-01/2024-09-30",
asset_keys="red,nir",
format="json",
query=json.dumps(stac_query)
)!t.stac.item url="https://earth-search.aws.element84.com/v1/" collection_id="sentinel-2-l2a" datetime="2024-04-01/2024-09-30" asset_keys="red,nir" format="json" query='{"eo:cloud_cover": {"lt": 10}}'print(f"""
Download Estimate
Files: {items_assets_json["count"]}
Total Download Size: {items_assets_json["bytes"] / 1e9:.2f} GB
""")stac_query = {"eo:cloud_cover": {"lt": 10}}
collection_items_assets_json = gs.parse_command(
"t.stac.item",
url=stac_url,
collection_id="sentinel-2-l2a",
datetime="2024-04-01/2024-09-30",
asset_keys="red,green,blue,nir",
query=json.dumps(stac_query),
strds_output="outputs/s2_red_nir.txt",
format="json",
)print(Path("outputs/s2_red.txt").read_text())startdate = df_items_assets["datetime"].min()
enddate = df_items_assets["datetime"].max()
print(f"Start Date: {startdate}\nEnd Date: {enddate}")Create STRDS (spatio-temporal raster dataset) to manage the Sentiel data you want to analyze with GRASS.
# Create the space time dataset
gs.run_command(
"t.create",
output="sentinel_2",
type="strds",
temporaltype="absolute",
title="Sentinel 2 Red/NIR Bands",
description="Sentinel 2 Red/NIR Bands from 2024-04-01/2024-09-30",
overwrite=True,
)Download the data
print(gs.run_command(
"t.stac.item",
url=stac_url,
collection_id="sentinel-2-l2a",
datetime="2024-04-01/2024-09-30",
asset_keys="red,green,blue,nir",
query=json.dumps(stac_query),
strds_output="outputs/s2_red_nir.txt",
format="json",
method="nearest",
resolution="value",
resolution_value="10",
extent="region",
nprocs=12,
memory=1024 * 7,
flags="d",
))Register the data
# Register the output maps into a space time dataset
gs.run_command(
"t.register",
input="sentinel_2",
file="outputs/s2_red_nir.txt",
type="raster"
)gs.run_command("t.info", type="strds", input="sentinel_2")Now let’s view a RGB composite of one of our tiles
m = gj.Map(use_region=True)
m.d_rgb(
red="sentinel-2-l2a.S2A_17SPV_20240909_0_L2A.red",
blue="sentinel-2-l2a.S2A_17SPV_20240909_0_L2A.blue",
green="sentinel-2-l2a.S2A_17SPV_20240909_0_L2A.green",
)
m.d_barscale()
m.show()Temporal Map Calculations
Run a temporal map calculation on your entire STRDS to generate an NDVI layer.
gs.run_command(
"t.rast.mapcalc",
inputs=["sentinel_2.nir", "sentinel_2.red"],
output="ndvi",
basename="ndvi",
method="follows",
expression=(
"float(if('sentinel_2.nir' >= 10000, 10000, 'sentinel_2.nir') - if('sentinel_2.red' >= 10000, 10000, 'sentinel_2.red')) / (if('sentinel_2.nir' >= 10000, 10000, 'sentinel_2.nir') + if('sentinel_2.red' >= 10000, 10000, 'sentinel_2.red'))"
),
overwrite=True,
flags="n",
)Check to see it worked.
gs.run_command("t.list", type="strds")and plot the NDVI
strds_df = pd.DataFrame(
gs.parse_command(
"t.rast.list",
input="ndvi",
order="start_time",
columns=["name", "semantic_label", "start_time", "end_time", "min", "max"],
format="csv",
)
)
# Convert start_time to datetime for better formatting
strds_df["start_time"] = pd.to_datetime(strds_df["start_time"])
sns.lineplot(
x="start_time", y="max", data=strds_df
)
# Format x-axis labels for better readability
import matplotlib.dates as mdates
import matplotlib.pyplot as plt
plt.gca().xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d'))
plt.gcf().autofmt_xdate() # Auto-rotate date labels
plt.show()ndvi_list = gs.parse_command(
"t.rast.list",
input="ndvi",
order="start_time",
columns=["name", "semantic_label", "start_time", "end_time", "min", "max"],
format="csv",
)
gs.run_command("t.rast.colors", input="ndvi", color="ndvi")
for ndvi in ndvi_list:
map_name = ndvi.get("name")
m = gj.Map(use_region=True, width=500)
m.d_rast(map=map_name)
m.d_barscale()
m.d_legend(raster=map_name, flags="b")
m.show()ndvi_rasters = [
i.get("name")
for i in gs.parse_command(
"t.rast.list",
input="ndvi",
order="start_time",
columns=["name"],
format="csv",
)
]
# Create Time Series Map
m = gj.SeriesMap(use_region=True)
m.add_rasters(
rasters=ndvi_rasters
)
m.d_barscale()
m.render()
m.show()import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
from PIL import Image
def generate_map(map_name):
m = gj.Map(use_region=True)
m.d_rast(map=map_name)
m.d_legend(raster=map_name, flags="b")
m.d_barscale()
return m
for raster in ndvi_rasters:
ndvi_map = generate_map(raster)
ndvi_map.show()