sabato 28 febbraio 2026

Conversione Emit L2A in Envi

Attenzione : la struttura dei file netcdf ha subito una variazione intorno al 2025 

Da EarthData si possono scaricare i dati  

https://search.earthdata.nasa.gov/search/granules?p=C2408750690-LPCLOUD

https://earth.jpl.nasa.gov/emit/data/data-portal/coverage-and-forecasts/ 

Piccola nota: bande Emit per true color

Red: ~650 nm → Band 29
Green: ~550 nm → Band 19
Blue: ~460 nm → Band 11


i dati sono in formato netcfd ma non risulta georiferito , si apre tranquillamente in Esa Snap ma la cosa migliore e' trasformarlo in formato Envi per avere tutte le informazioni spettrali

 

In fondo alla pagina lo script per effettuare l'operazione..attenzione che a seconda che l'orbita sia discendente od ascendente si deve ruotare la matrice di 90 grafi  

Il file viene georiferito nel senso che ogni pixel ha le sue coordinate ma l'immagine non e' ruotata come dovrebbe essere


 

https://github.com/nasa/EMIT-Data-Resources 

import xarray as xr
import numpy as np
import rasterio
from rasterio.transform import from_origin

ds = xr.open_dataset('grosseto2.nc', engine='netcdf4')
ds_bands = xr.open_dataset('grosseto2.nc', engine='netcdf4', group='sensor_band_parameters')
wavelengths = ds_bands['wavelengths'].values
fwhm = ds_bands['fwhm'].values

gt = ds.attrs['geotransform']
reflectance = ds['reflectance'].values
data = np.transpose(reflectance, (2, 0, 1))
data = np.rot90(data, k=1, axes=(1, 2))

nrows, ncols = data.shape[1], data.shape[2]
nbands = data.shape[0]

transform = from_origin(gt[0], gt[3], gt[1], abs(gt[5]))

with rasterio.open(
'grosseto2.envi', 'w',
driver='ENVI',
height=nrows, width=ncols,
count=nbands,
dtype=data.dtype,
crs='EPSG:4326',
transform=transform,
) as dst:
dst.write(data)

# Rewrite the entire .hdr cleanly
with open('grosseto2.hdr', 'w') as hdr:
hdr.write('ENVI\n')
hdr.write('description = { EMIT L2A Reflectance - Grosseto }\n')
hdr.write(f'samples = {ncols}\n')
hdr.write(f'lines = {nrows}\n')
hdr.write(f'bands = {nbands}\n')
hdr.write('header offset = 0\n')
hdr.write('file type = ENVI Standard\n')
hdr.write('data type = 4\n')
hdr.write('interleave = bsq\n')
hdr.write('byte order = 0\n')
hdr.write(f'map info = {{Geographic Lat/Lon, 1, 1, {gt[0]}, {gt[3]}, {gt[1]}, {abs(gt[5])}, WGS-84}}\n')
hdr.write('wavelength units = Nanometers\n')
hdr.write('wavelength = {\n')
hdr.write(',\n'.join(f' {w:.6f}' for w in wavelengths))
hdr.write('}\n')
hdr.write('fwhm = {\n')
hdr.write(',\n'.join(f' {f:.6f}' for f in fwhm))
hdr.write('}\n')
hdr.write('band names = {\n')
hdr.write(',\n'.join(f' Band_{i+1}_{w:.2f}nm' for i, w in enumerate(wavelengths)))
hdr.write('}\n')

print(f"Done! {nbands} bands, {wavelengths[0]:.2f}-{wavelengths[-1]:.2f} nm")



 

 

Mixture-of-Experts Variational Autoencoder

Leggendo la documentazione delle libreria HyperCoast ho trovato riferimento a Moe Vae. Si tratta di una rete neurale complessa basato su 


Mixture-of-Experts Variational Autoencoder for Clustering and Generating from Similarity-Based Representations on Single Cell Data 
Andreas Kopf, Vincent Fortuin, Vignesh Ram Somnath, Manfred Claassen

https://arxiv.org/abs/1910.07763

che ha il vantaggio di lavorare su una moltitudine di dati come quelli iperspettrali

 Ho provato tramite AI, con i dati di Indian Pines 

A sinistra verita'a terra A destra risultato del modello

 

 

import torch
import torch.nn as nn
import torch.nn.functional as F
import scipy.io as sio
import numpy as np
import matplotlib.pyplot as plt
from torch.utils.data import DataLoader, TensorDataset
from sklearn.preprocessing import StandardScaler
from sklearn.svm import SVC
from scipy.signal import medfilt2d

# --- 1. ROBUST DATA LOADING ---
def load_indian_pines():
data = sio.loadmat('Indian_pines_corrected.mat')['indian_pines_corrected']
gt = sio.loadmat('Indian_pines_gt.mat')['indian_pines_gt']
h, w, b = data.shape
# Conditional cleanup for the 219 index error
if b > 200:
ignored_bands = [103, 104, 105, 106, 107, 108, 149, 150, 151, 152,
153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163]
data = np.delete(data, [i for i in ignored_bands if i < b], axis=2)
x = data.reshape(-1, data.shape[2]).astype(float)
scaler = StandardScaler()
x_scaled = scaler.fit_transform(x)
return torch.tensor(x_scaled, dtype=torch.float32), gt, h, w, data.shape[2]

# --- 2. IMPROVED MODEL ---
class Expert(nn.Module):
def __init__(self, input_dim, latent_dim):
super().__init__()
self.enc = nn.Sequential(nn.Linear(input_dim, 128), nn.BatchNorm1d(128), nn.ReLU(), nn.Linear(128, 64), nn.ReLU())
self.mu = nn.Linear(64, latent_dim)
self.logvar = nn.Linear(64, latent_dim)
self.dec = nn.Sequential(nn.Linear(latent_dim, 64), nn.ReLU(), nn.Linear(64, 128), nn.ReLU(), nn.Linear(128, input_dim))

def forward(self, x):
h = self.enc(x)
mu, lv = self.mu(h), self.logvar(h)
std = torch.exp(0.5 * lv)
z = mu + torch.randn_like(std) * std
return self.dec(z), mu, lv

class MoESimVAE(nn.Module):
def __init__(self, input_dim, latent_dim, num_experts=4):
super().__init__()
self.experts = nn.ModuleList([Expert(input_dim, latent_dim) for _ in range(num_experts)])
self.gate = nn.Sequential(nn.Linear(input_dim, 64), nn.ReLU(), nn.Linear(64, num_experts), nn.Softmax(dim=-1))

def forward(self, x):
w = self.gate(x)
recons, mus, lvs = [], [], []
final_recon = 0
for i, exp in enumerate(self.experts):
r, m, l = exp(x)
final_recon += w[:, i].unsqueeze(1) * r
mus.append(m); lvs.append(l)
return final_recon, mus, lvs, w

# --- 3. THE "SIM" LOSS ---
def compute_loss(recon, x, mus, lvs, w, sigma=0.5):
# Reconstruction + KLD
mse = F.mse_loss(recon, x, reduction='sum')
kld = 0
comb_mu = torch.zeros_like(mus[0])
for i in range(len(mus)):
kld += w[:, i].mean() * -0.5 * torch.sum(1 + lvs[i] - mus[i].pow(2) - lvs[i].exp())
comb_mu += w[:, i].unsqueeze(1) * mus[i]

# RBF Similarity (using a subset for speed/stability)
subset_idx = torch.randperm(x.size(0))[:64]
xs, zs = x[subset_idx], comb_mu[subset_idx]
dist_x = torch.cdist(xs, xs).pow(2)
dist_z = torch.cdist(zs, zs).pow(2)
k_x = torch.exp(-dist_x / (2 * sigma**2))
k_z = torch.exp(-dist_z / (2 * sigma**2))
sim_loss = F.mse_loss(k_z, k_x) * 50.0 # High weight for Sim
# Entropy to prevent expert collapse
entropy = -torch.sum(w.mean(0) * torch.log(w.mean(0) + 1e-8))
return mse + kld + sim_loss - (0.5 * entropy)

# --- 4. TRAIN AND MAP ---
def main():
x_tensor, gt, h, w, b = load_indian_pines()
loader = DataLoader(TensorDataset(x_tensor), batch_size=64, shuffle=True)
model = MoESimVAE(b, 25)
opt = torch.optim.Adam(model.parameters(), lr=1e-3)

print("Training... (Aiming for better separation)")
for epoch in range(30):
for batch in loader:
opt.zero_grad()
r, m, l, wg = model(batch[0])
loss = compute_loss(r, batch[0], m, l, wg)
loss.backward()
opt.step()

# Extract & Predict
model.eval()
with torch.no_grad():
_, mus, _, wg = model(x_tensor)
z = sum(wg[:, i].unsqueeze(1) * mus[i] for i in range(len(mus))).numpy()
y = gt.ravel()
labeled = np.where(y > 0)[0]
clf = SVC(kernel='rbf', C=10) # RBF kernel for the classifier too
clf.fit(z[labeled[::10]], y[labeled[::10]]) # Train on 10%
preds = clf.predict(z).reshape(h, w)
preds[gt == 0] = 0
# SPATIAL CLEANING (The secret sauce)
preds_clean = medfilt2d(preds.astype(float), kernel_size=3)
preds_clean[gt == 0] = 0

plt.figure(figsize=(12, 6))
plt.subplot(1, 2, 1); plt.imshow(gt, cmap='nipy_spectral'); plt.title("Ground Truth")
plt.subplot(1, 2, 2); plt.imshow(preds_clean, cmap='nipy_spectral'); plt.title("MoE-Sim-VAE (Cleaned)")
plt.show()

if __name__ == "__main__":
main()

Aviris vs Enmap

Un confronto tra spettri di punti del volo Aviris-NG zona Grosseto e Enamp

Per rendere il confronto minimamente rappresentativo e' stato preso un bersaglio antropico che non e' modificato nel tempo ed un campo sempre allo stato suolo nudo

 

Bersaglio naturale (suolo nudo)

Bersaglio Antropico

 

Bersaglio antropico

 

 

 

venerdì 27 febbraio 2026

HyperCoast Python Library

Hypercoast e' una libreria Python per leggere dati da formati iperspettrali e visualizzare spettri

L'uso migliore e' quello all'interno di Jupyter Lab

 

import hypercoast

filepath = "ang20210604t105418_rfl_v2z1_img"
ds = hypercoast.read_aviris(filepath)
print(ds)
m = hypercoast.Map()
print(m)
m
m.add_aviris(ds, wavelengths=[1000, 700, 400], vmin=0, vmax=0.2)
m.add("spectral")
display(m)


 

Aviris-NG Grosseto

 

 

 

 

Emit L2B

Il prodotto Emit L2B fornisce informazioni di tipo mineralogico

Per ogni pixel vengono indicati group_1_mineral_id, group_1_band_depth, group_2_mineral_id, group_2_band_depth   

Il gruppo 1 corrisponde a minerali ferrosi ed il gruppo 2 a carbonati ed argille

Viene selezionato lo spettro di laboratorio che ha il miglior fit per ogni pixel (non viene quindi fornita nessuna informazione su eventuali miscele, viene solo selezionato il minerale che risponde in modo piu' evidente dal punto di vista spettrale)

Nella mappa sottostante tramite Band Math

if (group_2_mineral_id == 227 AND group_2_band_depth > 0.05) then group_2_band_depth else NaN 

 

e' stato selezionato il minerale Calcite ed i colori corrispondono alla profondita' di picco (in modo molto esteso si puo' interpretare come abbondanza

 

In un file separato viene indicato il fit stastitico e l'incertezza sul valore della profondita' di picco 

 

Questi sono gli ID piu' comuni ma ne sono tabellati circa 300 (lista completa)

Group 1 (risposta spettrale fino a 1000 nm - VNIR)

ID (Index)Mineral NameDescription / DetailsColor Suggestion
1Hematite (Fine)Typical red soil/rock pigment; very common.Red
2Hematite (Med/Coarse)Larger grain size; found in more weathered surfaces.Dark Red
3Goethite (Fine)Yellow-brown pigment; indicator of wetter history.Yellow/Brown
4Goethite (Med/Coarse)Stronger, more mature goethite signatures.Orange
5Hematite + GoethiteA spectral mixture of both iron oxides.Magenta
6Nanophase HematiteExtremely fine grains; common in desert varnishes.Light Pink
7LimoniteA general term for unidentified iron hydroxides.Tan
8JarositeIron-sulfate; often indicates acidic/volcanic soil.Goldenrod
9MagnetiteCommon in volcanic sands (weaker VNIR signature).Black / Grey
10FerrihydriteEarly-stage iron oxide; rare but occasionally detected.Salmon

Group 2 (Risposta spettrale oltre 1000 nm - SWIR)

ID (Index)Mineral NameSpectral Feature Location
114Kaolinite (High Crystallinity)2200 nm (Doublet)
115Kaolinite (Low Crystallinity)2200 nm (Doublet)
127Muscovite2200 (Sharp)
128Illite2210nm
141Montmorillonite2210nm +1900nm
187Gypsum1750, 1900, 2200 nm
227Calcite2330 nm
218Dolomite2320nm

 

 


Emit Images

Script per la conversione delle immagini Emit (iperspettrale 60 m di risoluzione spaziale, 285 bande, aree riprese +/- 52 gradi di latitudine)

 



import xarray as xr
import numpy as np
import rasterio
from rasterio.transform import from_origin
from rasterio.warp import reproject, Resampling

# --- INPUT/OUTPUT ---
nc_file = "EMIT_L2A_RFL_001_20250609T081616_2516005_003.nc"
output_img = "emit_georef.img"

# --- OPEN DATASETS (lazy) ---
ds_ref = xr.open_dataset(nc_file)
ds_params = xr.open_dataset(nc_file, group="sensor_band_parameters")
ds_loc = xr.open_dataset(nc_file, group="location")

refl = ds_ref["reflectance"] # (downtrack, crosstrack, bands) — lazy
bands = refl.sizes["bands"]
dt = refl.sizes["downtrack"] # original downtrack axis
ct = refl.sizes["crosstrack"] # original crosstrack axis

print(f"Raw swath: {dt} downtrack × {ct} crosstrack × {bands} bands")

# --- WAVELENGTHS / FWHM ---
wavelengths = ds_params["wavelengths"].values
fwhm = ds_params["fwhm"].values if "fwhm" in ds_params else np.full_like(wavelengths, 2.5)

# --- LAT / LON ---
lat = ds_loc["lat"].values # (downtrack, crosstrack)
lon = ds_loc["lon"].values

# Diagnose orientation
print(f"lat corners: TL={lat[0,0]:.4f} TR={lat[0,-1]:.4f} BL={lat[-1,0]:.4f} BR={lat[-1,-1]:.4f}")
print(f"lon corners: TL={lon[0,0]:.4f} TR={lon[0,-1]:.4f} BL={lon[-1,0]:.4f} BR={lon[-1,-1]:.4f}")

# --- ROTATE 90° to fix east-west swath orientation ---
# np.rot90 on (downtrack, crosstrack) → (crosstrack, downtrack)
# After rotation: axis-0 = crosstrack (N-S), axis-1 = downtrack (E-W)
lat_r = np.rot90(lat) # (ct, dt)
lon_r = np.rot90(lon)

rows, cols = lat_r.shape # rows = ct, cols = dt
print(f"After rotation: {rows} rows × {cols} cols")
print(f"lat_r corners: TL={lat_r[0,0]:.4f} TR={lat_r[0,-1]:.4f} BL={lat_r[-1,0]:.4f} BR={lat_r[-1,-1]:.4f}")
print(f"lon_r corners: TL={lon_r[0,0]:.4f} TR={lon_r[0,-1]:.4f} BL={lon_r[-1,0]:.4f} BR={lon_r[-1,-1]:.4f}")

# --- TARGET REGULAR GRID ---
lat_min, lat_max = float(lat.min()), float(lat.max())
lon_min, lon_max = float(lon.min()), float(lon.max())

res = 0.000542232520256367 # ~60 m in degrees
n_rows = int(np.ceil((lat_max - lat_min) / res))
n_cols = int(np.ceil((lon_max - lon_min) / res))

print(f"Output grid: {n_cols} cols × {n_rows} rows ({n_rows*n_cols*bands*4/1e9:.2f} GB)")

# North-up destination transform: origin = top-left = (lon_min, lat_max)
dst_transform = from_origin(lon_min, lat_max, res, res)

# Source transform derived from the ROTATED lat/lon extent
src_x_res = (lon_max - lon_min) / cols
src_y_res = (lat_max - lat_min) / rows
src_transform = from_origin(lon_min, lat_max, src_x_res, src_y_res)

# --- CREATE EMPTY OUTPUT FILE ON DISK ---
with rasterio.open(
output_img, "w",
driver="ENVI",
height=n_rows, width=n_cols,
count=bands,
dtype=np.float32,
crs="EPSG:4326",
transform=dst_transform,
) as dst:
pass

# --- WARP BAND BY BAND (low memory: one band at a time) ---
with rasterio.open(output_img, "r+") as dst:
dst_band = np.empty((n_rows, n_cols), dtype=np.float32)

for b in range(bands):
# Load one band: (downtrack, crosstrack)
band_raw = refl.isel(bands=b).values.astype(np.float32)

# Rotate to match corrected orientation: (crosstrack, downtrack)
band_data = np.ascontiguousarray(np.rot90(band_raw))

dst_band[:] = -9999

reproject(
source=band_data,
destination=dst_band,
src_transform=src_transform,
src_crs="EPSG:4326",
dst_transform=dst_transform,
dst_crs="EPSG:4326",
resampling=Resampling.bilinear,
src_nodata=-9999,
dst_nodata=-9999,
)

dst.write(dst_band, b + 1)

if b % 20 == 0:
print(f" band {b+1}/{bands}")

print("Warp complete — writing HDR...")

# --- ENVI HDR ---
hdr_file = output_img.replace(".img", ".hdr")

map_info = (
f"Geographic Lat/Lon, 1, 1, "
f"{lon_min:.10f}, {lat_max:.10f}, "
f"{res:.10f}, {res:.10f}, "
f"WGS-84, units=Degrees"
)
wl_str = ", ".join(f"{w:.5f}" for w in wavelengths)
fwhm_str = ", ".join(f"{v:.5f}" for v in fwhm)
bn_str = ", ".join(f"Band {i+1}" for i in range(bands))

with open(hdr_file, "w") as f:
f.write("ENVI\n")
f.write(f"description = {{{output_img}}}\n")
f.write(f"samples = {n_cols}\n")
f.write(f"lines = {n_rows}\n")
f.write(f"bands = {bands}\n")
f.write("header offset = 0\n")
f.write("file type = ENVI Standard\n")
f.write("data type = 4\n") # 4 = float32
f.write("interleave = bsq\n")
f.write("byte order = 0\n")
f.write(f"map info = {{{map_info}}}\n")
f.write(
'coordinate system string = {GEOGCS["GCS_WGS_1984",'
'DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],'
'PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]]}\n'
)
f.write(f"band names = {{{bn_str}}}\n")
f.write(f"wavelength = {{{wl_str}}}\n")
f.write("wavelength units = Nanometers\n")
f.write(f"fwhm = {{{fwhm_str}}}\n")
f.write("data ignore value = -9999\n")

print("✅ Done:", output_img)
print(f" Extent: lon [{lon_min:.5f}{lon_max:.5f}] lat [{lat_min:.5f}{lat_max:.5f}]")

Kernel Panic QrCode

 In tanti anni ho visto qualche kernel panic, ma in questo formato non mi era mai successo    la cosa curiosa che al riavvio successivo ness...