common module¶
This module contains some common functions for both folium and ipyleaflet to interact with the Earth Engine Python API.
PlanetaryComputerEndpoint (TitilerEndpoint)
¶
This class contains the methods for the Microsoft Planetary Computer endpoint.
Source code in geemap/common.py
class PlanetaryComputerEndpoint(TitilerEndpoint):
"""This class contains the methods for the Microsoft Planetary Computer endpoint."""
def __init__(
self,
endpoint="https://planetarycomputer.microsoft.com/api/data/v1",
name="item",
TileMatrixSetId="WebMercatorQuad",
):
"""Initialize the PlanetaryComputerEndpoint object.
Args:
endpoint (str, optional): The endpoint of the titiler server. Defaults to "https://planetarycomputer.microsoft.com/api/data/v1".
name (str, optional): The name to be used in the file path. Defaults to "item".
TileMatrixSetId (str, optional): The TileMatrixSetId to be used in the file path. Defaults to "WebMercatorQuad".
"""
super().__init__(endpoint, name, TileMatrixSetId)
def url_for_stac_collection(self):
return f"{self.endpoint}/collection/{self.TileMatrixSetId}/tilejson.json"
def url_for_collection_assets(self):
return f"{self.endpoint}/collection/assets"
def url_for_collection_bounds(self):
return f"{self.endpoint}/collection/bounds"
def url_for_collection_info(self):
return f"{self.endpoint}/collection/info"
def url_for_collection_info_geojson(self):
return f"{self.endpoint}/collection/info.geojson"
def url_for_collection_pixel_value(self, lon, lat):
return f"{self.endpoint}/collection/point/{lon},{lat}"
def url_for_collection_wmts(self):
return f"{self.endpoint}/collection/{self.TileMatrixSetId}/WMTSCapabilities.xml"
def url_for_collection_lat_lon_assets(self, lng, lat):
return f"{self.endpoint}/collection/{lng},{lat}/assets"
def url_for_collection_bbox_assets(self, minx, miny, maxx, maxy):
return f"{self.endpoint}/collection/{minx},{miny},{maxx},{maxy}/assets"
def url_for_stac_mosaic(self, searchid):
return f"{self.endpoint}/mosaic/{searchid}/{self.TileMatrixSetId}/tilejson.json"
def url_for_mosaic_info(self, searchid):
return f"{self.endpoint}/mosaic/{searchid}/info"
def url_for_mosaic_lat_lon_assets(self, searchid, lon, lat):
return f"{self.endpoint}/mosaic/{searchid}/{lon},{lat}/assets"
__init__(self, endpoint='https://planetarycomputer.microsoft.com/api/data/v1', name='item', TileMatrixSetId='WebMercatorQuad')
special
¶
Initialize the PlanetaryComputerEndpoint object.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
endpoint |
str |
The endpoint of the titiler server. Defaults to "https://planetarycomputer.microsoft.com/api/data/v1". |
'https://planetarycomputer.microsoft.com/api/data/v1' |
name |
str |
The name to be used in the file path. Defaults to "item". |
'item' |
TileMatrixSetId |
str |
The TileMatrixSetId to be used in the file path. Defaults to "WebMercatorQuad". |
'WebMercatorQuad' |
Source code in geemap/common.py
def __init__(
self,
endpoint="https://planetarycomputer.microsoft.com/api/data/v1",
name="item",
TileMatrixSetId="WebMercatorQuad",
):
"""Initialize the PlanetaryComputerEndpoint object.
Args:
endpoint (str, optional): The endpoint of the titiler server. Defaults to "https://planetarycomputer.microsoft.com/api/data/v1".
name (str, optional): The name to be used in the file path. Defaults to "item".
TileMatrixSetId (str, optional): The TileMatrixSetId to be used in the file path. Defaults to "WebMercatorQuad".
"""
super().__init__(endpoint, name, TileMatrixSetId)
TitilerEndpoint
¶
This class contains the methods for the titiler endpoint.
Source code in geemap/common.py
class TitilerEndpoint:
"""This class contains the methods for the titiler endpoint."""
def __init__(
self,
endpoint="https://titiler.xyz",
name="stac",
TileMatrixSetId="WebMercatorQuad",
):
"""Initialize the TitilerEndpoint object.
Args:
endpoint (str, optional): The endpoint of the titiler server. Defaults to "https://titiler.xyz".
name (str, optional): The name to be used in the file path. Defaults to "stac".
TileMatrixSetId (str, optional): The TileMatrixSetId to be used in the file path. Defaults to "WebMercatorQuad".
"""
self.endpoint = endpoint
self.name = name
self.TileMatrixSetId = TileMatrixSetId
def url_for_stac_item(self):
return f"{self.endpoint}/{self.name}/{self.TileMatrixSetId}/tilejson.json"
def url_for_stac_assets(self):
return f"{self.endpoint}/{self.name}/assets"
def url_for_stac_bounds(self):
return f"{self.endpoint}/{self.name}/bounds"
def url_for_stac_info(self):
return f"{self.endpoint}/{self.name}/info"
def url_for_stac_info_geojson(self):
return f"{self.endpoint}/{self.name}/info.geojson"
def url_for_stac_statistics(self):
return f"{self.endpoint}/{self.name}/statistics"
def url_for_stac_pixel_value(self, lon, lat):
return f"{self.endpoint}/{self.name}/point/{lon},{lat}"
def url_for_stac_wmts(self):
return (
f"{self.endpoint}/{self.name}/{self.TileMatrixSetId}/WMTSCapabilities.xml"
)
__init__(self, endpoint='https://titiler.xyz', name='stac', TileMatrixSetId='WebMercatorQuad')
special
¶
Initialize the TitilerEndpoint object.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
endpoint |
str |
The endpoint of the titiler server. Defaults to "https://titiler.xyz". |
'https://titiler.xyz' |
name |
str |
The name to be used in the file path. Defaults to "stac". |
'stac' |
TileMatrixSetId |
str |
The TileMatrixSetId to be used in the file path. Defaults to "WebMercatorQuad". |
'WebMercatorQuad' |
Source code in geemap/common.py
def __init__(
self,
endpoint="https://titiler.xyz",
name="stac",
TileMatrixSetId="WebMercatorQuad",
):
"""Initialize the TitilerEndpoint object.
Args:
endpoint (str, optional): The endpoint of the titiler server. Defaults to "https://titiler.xyz".
name (str, optional): The name to be used in the file path. Defaults to "stac".
TileMatrixSetId (str, optional): The TileMatrixSetId to be used in the file path. Defaults to "WebMercatorQuad".
"""
self.endpoint = endpoint
self.name = name
self.TileMatrixSetId = TileMatrixSetId
add_crs(filename, epsg)
¶
Add a CRS to a raster dataset.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
filename |
str |
The filename of the raster dataset. |
required |
epsg |
int | str |
The EPSG code of the CRS. |
required |
Source code in geemap/common.py
def add_crs(filename, epsg):
"""Add a CRS to a raster dataset.
Args:
filename (str): The filename of the raster dataset.
epsg (int | str): The EPSG code of the CRS.
"""
try:
import rasterio
except ImportError:
raise ImportError(
"rasterio is required for adding a CRS to a raster. Please install it using 'pip install rasterio'."
)
if not os.path.exists(filename):
raise ValueError("filename must exist.")
if isinstance(epsg, int):
epsg = f"EPSG:{epsg}"
elif isinstance(epsg, str):
epsg = "EPSG:" + epsg
else:
raise ValueError("epsg must be an integer or string.")
crs = rasterio.crs.CRS({"init": epsg})
with rasterio.open(filename, mode="r+") as src:
src.crs = crs
adjust_longitude(in_fc)
¶
Adjusts longitude if it is less than -180 or greater than 180.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
in_fc |
dict |
The input dictionary containing coordinates. |
required |
Returns:
Type | Description |
---|---|
dict |
A dictionary containing the converted longitudes |
Source code in geemap/common.py
def adjust_longitude(in_fc):
"""Adjusts longitude if it is less than -180 or greater than 180.
Args:
in_fc (dict): The input dictionary containing coordinates.
Returns:
dict: A dictionary containing the converted longitudes
"""
try:
keys = in_fc.keys()
if "geometry" in keys:
coordinates = in_fc["geometry"]["coordinates"]
if in_fc["geometry"]["type"] == "Point":
longitude = coordinates[0]
if longitude < -180:
longitude = 360 + longitude
elif longitude > 180:
longitude = longitude - 360
in_fc["geometry"]["coordinates"][0] = longitude
elif in_fc["geometry"]["type"] == "Polygon":
for index1, item in enumerate(coordinates):
for index2, element in enumerate(item):
longitude = element[0]
if longitude < -180:
longitude = 360 + longitude
elif longitude > 180:
longitude = longitude - 360
in_fc["geometry"]["coordinates"][index1][index2][0] = longitude
elif in_fc["geometry"]["type"] == "LineString":
for index, element in enumerate(coordinates):
longitude = element[0]
if longitude < -180:
longitude = 360 + longitude
elif longitude > 180:
longitude = longitude - 360
in_fc["geometry"]["coordinates"][index][0] = longitude
elif "type" in keys:
coordinates = in_fc["coordinates"]
if in_fc["type"] == "Point":
longitude = coordinates[0]
if longitude < -180:
longitude = 360 + longitude
elif longitude > 180:
longitude = longitude - 360
in_fc["coordinates"][0] = longitude
elif in_fc["type"] == "Polygon":
for index1, item in enumerate(coordinates):
for index2, element in enumerate(item):
longitude = element[0]
if longitude < -180:
longitude = 360 + longitude
elif longitude > 180:
longitude = longitude - 360
in_fc["coordinates"][index1][index2][0] = longitude
elif in_fc["type"] == "LineString":
for index, element in enumerate(coordinates):
longitude = element[0]
if longitude < -180:
longitude = 360 + longitude
elif longitude > 180:
longitude = longitude - 360
in_fc["coordinates"][index][0] = longitude
return in_fc
except Exception as e:
print(e)
return None
annual_NAIP(year, region)
¶
Create an NAIP mosaic of a specified year for a specified region.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
year |
int |
The specified year to create the mosaic for. |
required |
region |
object |
ee.Geometry |
required |
Returns:
Type | Description |
---|---|
object |
ee.Image |
Source code in geemap/common.py
def annual_NAIP(year, region):
"""Create an NAIP mosaic of a specified year for a specified region.
Args:
year (int): The specified year to create the mosaic for.
region (object): ee.Geometry
Returns:
object: ee.Image
"""
start_date = ee.Date.fromYMD(year, 1, 1)
end_date = ee.Date.fromYMD(year, 12, 31)
collection = (
ee.ImageCollection("USDA/NAIP/DOQQ")
.filterDate(start_date, end_date)
.filterBounds(region)
)
time_start = ee.Date(
ee.List(collection.aggregate_array("system:time_start")).sort().get(0)
)
time_end = ee.Date(
ee.List(collection.aggregate_array("system:time_end")).sort().get(-1)
)
image = ee.Image(collection.mosaic().clip(region))
NDWI = ee.Image(image).normalizedDifference(["G", "N"]).select(["nd"], ["ndwi"])
NDVI = ee.Image(image).normalizedDifference(["N", "R"]).select(["nd"], ["ndvi"])
image = image.addBands(NDWI)
image = image.addBands(NDVI)
return image.set({"system:time_start": time_start, "system:time_end": time_end})
api_docs()
¶
Open a browser and navigate to the geemap API documentation.
Source code in geemap/common.py
def api_docs():
"""Open a browser and navigate to the geemap API documentation."""
import webbrowser
url = "https://geemap.org/geemap"
webbrowser.open_new_tab(url)
arc_active_map()
¶
Get the active map in ArcGIS Pro.
Returns:
Type | Description |
---|---|
arcpy.Map |
The active map in ArcGIS Pro. |
Source code in geemap/common.py
def arc_active_map():
"""Get the active map in ArcGIS Pro.
Returns:
arcpy.Map: The active map in ArcGIS Pro.
"""
if is_arcpy():
import arcpy
aprx = arcpy.mp.ArcGISProject("CURRENT")
m = aprx.activeMap
return m
else:
return None
arc_active_view()
¶
Get the active view in ArcGIS Pro.
Returns:
Type | Description |
---|---|
arcpy.MapView |
The active view in ArcGIS Pro. |
Source code in geemap/common.py
def arc_active_view():
"""Get the active view in ArcGIS Pro.
Returns:
arcpy.MapView: The active view in ArcGIS Pro.
"""
if is_arcpy():
import arcpy
aprx = arcpy.mp.ArcGISProject("CURRENT")
view = aprx.activeView
return view
else:
return None
arc_add_layer(url, name=None, shown=True, opacity=1.0)
¶
Add a layer to the active map in ArcGIS Pro.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
url |
str |
The URL of the tile layer to add. |
required |
name |
str |
The name of the layer. Defaults to None. |
None |
shown |
bool |
Whether the layer is shown. Defaults to True. |
True |
opacity |
float |
The opacity of the layer. Defaults to 1.0. |
1.0 |
Source code in geemap/common.py
def arc_add_layer(url, name=None, shown=True, opacity=1.0):
"""Add a layer to the active map in ArcGIS Pro.
Args:
url (str): The URL of the tile layer to add.
name (str, optional): The name of the layer. Defaults to None.
shown (bool, optional): Whether the layer is shown. Defaults to True.
opacity (float, optional): The opacity of the layer. Defaults to 1.0.
"""
if is_arcpy():
m = arc_active_map()
if m is not None:
m.addDataFromPath(url)
if isinstance(name, str):
layers = m.listLayers("Tiled service layer")
if len(layers) > 0:
layer = layers[0]
layer.name = name
layer.visible = shown
layer.transparency = 100 - (opacity * 100)
arc_zoom_to_extent(xmin, ymin, xmax, ymax)
¶
Zoom to an extent in ArcGIS Pro.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
xmin |
float |
The minimum x value of the extent. |
required |
ymin |
float |
The minimum y value of the extent. |
required |
xmax |
float |
The maximum x value of the extent. |
required |
ymax |
float |
The maximum y value of the extent. |
required |
Source code in geemap/common.py
def arc_zoom_to_extent(xmin, ymin, xmax, ymax):
"""Zoom to an extent in ArcGIS Pro.
Args:
xmin (float): The minimum x value of the extent.
ymin (float): The minimum y value of the extent.
xmax (float): The maximum x value of the extent.
ymax (float): The maximum y value of the extent.
"""
if is_arcpy():
import arcpy
view = arc_active_view()
if view is not None:
view.camera.setExtent(
arcpy.Extent(
xmin,
ymin,
xmax,
ymax,
spatial_reference=arcpy.SpatialReference(4326),
)
)
# if isinstance(zoom, int):
# scale = 156543.04 * math.cos(0) / math.pow(2, zoom)
# view.camera.scale = scale # Not working properly
array_mean(arr)
¶
Calculates the mean of an array along the given axis.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
arr |
object |
Array to calculate mean. |
required |
Returns:
Type | Description |
---|---|
object |
ee.Number |
Source code in geemap/common.py
def array_mean(arr):
"""Calculates the mean of an array along the given axis.
Args:
arr (object): Array to calculate mean.
Returns:
object: ee.Number
"""
total = ee.Array(arr).accum(0).get([-1])
size = arr.length()
return ee.Number(total.divide(size))
array_sum(arr)
¶
Accumulates elements of an array along the given axis.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
arr |
object |
Array to accumulate. |
required |
Returns:
Type | Description |
---|---|
object |
ee.Number |
Source code in geemap/common.py
def array_sum(arr):
"""Accumulates elements of an array along the given axis.
Args:
arr (object): Array to accumulate.
Returns:
object: ee.Number
"""
return ee.Array(arr).accum(0).get([-1])
array_to_image(array, output=None, source=None, dtype=None, compress='deflate', transpose=True, cellsize=None, crs=None, driver='COG', **kwargs)
¶
Save a NumPy array as a GeoTIFF using the projection information from an existing GeoTIFF file.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
array |
np.ndarray |
The NumPy array to be saved as a GeoTIFF. |
required |
output |
str |
The path to the output image. If None, a temporary file will be created. Defaults to None. |
None |
source |
str |
The path to an existing GeoTIFF file with map projection information. Defaults to None. |
None |
dtype |
np.dtype |
The data type of the output array. Defaults to None. |
None |
compress |
str |
The compression method. Can be one of the following: "deflate", "lzw", "packbits", "jpeg". Defaults to "deflate". |
'deflate' |
transpose |
bool |
Whether to transpose the array from (bands, rows, columns) to (rows, columns, bands). Defaults to True. |
True |
cellsize |
float |
The resolution of the output image in meters. Defaults to None. |
None |
crs |
str |
The CRS of the output image. Defaults to None. |
None |
driver |
str |
The driver to use for creating the output file, such as 'GTiff'. Defaults to "COG". |
'COG' |
**kwargs |
Additional keyword arguments to be passed to the rasterio.open() function. |
{} |
Source code in geemap/common.py
def array_to_image(
array,
output: str = None,
source: str = None,
dtype: str = None,
compress: str = "deflate",
transpose: bool = True,
cellsize: float = None,
crs: str = None,
driver: str = "COG",
**kwargs,
) -> str:
"""Save a NumPy array as a GeoTIFF using the projection information from an existing GeoTIFF file.
Args:
array (np.ndarray): The NumPy array to be saved as a GeoTIFF.
output (str): The path to the output image. If None, a temporary file will be created. Defaults to None.
source (str, optional): The path to an existing GeoTIFF file with map projection information. Defaults to None.
dtype (np.dtype, optional): The data type of the output array. Defaults to None.
compress (str, optional): The compression method. Can be one of the following: "deflate", "lzw", "packbits", "jpeg". Defaults to "deflate".
transpose (bool, optional): Whether to transpose the array from (bands, rows, columns) to (rows, columns, bands). Defaults to True.
cellsize (float, optional): The resolution of the output image in meters. Defaults to None.
crs (str, optional): The CRS of the output image. Defaults to None.
driver (str, optional): The driver to use for creating the output file, such as 'GTiff'. Defaults to "COG".
**kwargs: Additional keyword arguments to be passed to the rasterio.open() function.
"""
import numpy as np
import rasterio
import xarray as xr
if output is None:
return array_to_memory_file(
array, source, dtype, compress, transpose, cellsize, crs, driver, **kwargs
)
if isinstance(array, xr.DataArray):
coords = [coord for coord in array.coords]
if coords[0] == "time":
x_dim = coords[1]
y_dim = coords[2]
if array.dims[0] == "time":
array = array.isel(time=0)
array = array.rename({y_dim: "y", x_dim: "x"}).transpose("y", "x")
array = array.values
if array.ndim == 3 and transpose:
array = np.transpose(array, (1, 2, 0))
out_dir = os.path.dirname(os.path.abspath(output))
if not os.path.exists(out_dir):
os.makedirs(out_dir)
if not output.endswith(".tif"):
output += ".tif"
if source is not None:
with rasterio.open(source) as src:
crs = src.crs
transform = src.transform
if compress is None:
compress = src.compression
else:
if cellsize is None:
raise ValueError("resolution must be provided if source is not provided")
if crs is None:
raise ValueError(
"crs must be provided if source is not provided, such as EPSG:3857"
)
if "transform" not in kwargs:
# Define the geotransformation parameters
xmin, ymin, xmax, ymax = (
0,
0,
cellsize * array.shape[1],
cellsize * array.shape[0],
)
transform = rasterio.transform.from_bounds(
xmin, ymin, xmax, ymax, array.shape[1], array.shape[0]
)
else:
transform = kwargs["transform"]
if dtype is None:
# Determine the minimum and maximum values in the array
min_value = np.min(array)
max_value = np.max(array)
# Determine the best dtype for the array
if min_value >= 0 and max_value <= 1:
dtype = np.float32
elif min_value >= 0 and max_value <= 255:
dtype = np.uint8
elif min_value >= -128 and max_value <= 127:
dtype = np.int8
elif min_value >= 0 and max_value <= 65535:
dtype = np.uint16
elif min_value >= -32768 and max_value <= 32767:
dtype = np.int16
else:
dtype = np.float64
# Convert the array to the best dtype
array = array.astype(dtype)
# Define the GeoTIFF metadata
metadata = {
"driver": driver,
"height": array.shape[0],
"width": array.shape[1],
"dtype": array.dtype,
"crs": crs,
"transform": transform,
}
if array.ndim == 2:
metadata["count"] = 1
elif array.ndim == 3:
metadata["count"] = array.shape[2]
if compress is not None:
metadata["compress"] = compress
metadata.update(**kwargs)
# Create a new GeoTIFF file and write the array to it
with rasterio.open(output, "w", **metadata) as dst:
if array.ndim == 2:
dst.write(array, 1)
elif array.ndim == 3:
for i in range(array.shape[2]):
dst.write(array[:, :, i], i + 1)
array_to_memory_file(array, source=None, dtype=None, compress='deflate', transpose=True, cellsize=None, crs=None, transform=None, driver='COG', **kwargs)
¶
Convert a NumPy array to a memory file.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
array |
numpy.ndarray |
The input NumPy array. |
required |
source |
str |
Path to the source file to extract metadata from. Defaults to None. |
None |
dtype |
str |
The desired data type of the array. Defaults to None. |
None |
compress |
str |
The compression method for the output file. Defaults to "deflate". |
'deflate' |
transpose |
bool |
Whether to transpose the array from (bands, rows, columns) to (rows, columns, bands). Defaults to True. |
True |
cellsize |
float |
The cell size of the array if source is not provided. Defaults to None. |
None |
crs |
str |
The coordinate reference system of the array if source is not provided. Defaults to None. |
None |
transform |
tuple |
The affine transformation matrix if source is not provided. Defaults to None. |
None |
driver |
str |
The driver to use for creating the output file, such as 'GTiff'. Defaults to "COG". |
'COG' |
**kwargs |
Additional keyword arguments to be passed to the rasterio.open() function. |
{} |
Returns:
Type | Description |
---|---|
rasterio.DatasetReader |
The rasterio dataset reader object for the converted array. |
Source code in geemap/common.py
def array_to_memory_file(
array,
source: str = None,
dtype: str = None,
compress: str = "deflate",
transpose: bool = True,
cellsize: float = None,
crs: str = None,
transform: tuple = None,
driver="COG",
**kwargs,
):
"""Convert a NumPy array to a memory file.
Args:
array (numpy.ndarray): The input NumPy array.
source (str, optional): Path to the source file to extract metadata from. Defaults to None.
dtype (str, optional): The desired data type of the array. Defaults to None.
compress (str, optional): The compression method for the output file. Defaults to "deflate".
transpose (bool, optional): Whether to transpose the array from (bands, rows, columns) to (rows, columns, bands). Defaults to True.
cellsize (float, optional): The cell size of the array if source is not provided. Defaults to None.
crs (str, optional): The coordinate reference system of the array if source is not provided. Defaults to None.
transform (tuple, optional): The affine transformation matrix if source is not provided. Defaults to None.
driver (str, optional): The driver to use for creating the output file, such as 'GTiff'. Defaults to "COG".
**kwargs: Additional keyword arguments to be passed to the rasterio.open() function.
Returns:
rasterio.DatasetReader: The rasterio dataset reader object for the converted array.
"""
import rasterio
import numpy as np
import xarray as xr
if isinstance(array, xr.DataArray):
coords = [coord for coord in array.coords]
if coords[0] == "time":
x_dim = coords[1]
y_dim = coords[2]
if array.dims[0] == "time":
array = array.isel(time=0)
array = array.rename({y_dim: "y", x_dim: "x"}).transpose("y", "x")
array = array.values
if array.ndim == 3 and transpose:
array = np.transpose(array, (1, 2, 0))
if source is not None:
with rasterio.open(source) as src:
crs = src.crs
transform = src.transform
if compress is None:
compress = src.compression
else:
if cellsize is None:
raise ValueError("cellsize must be provided if source is not provided")
if crs is None:
raise ValueError(
"crs must be provided if source is not provided, such as EPSG:3857"
)
if "transform" not in kwargs:
# Define the geotransformation parameters
xmin, ymin, xmax, ymax = (
0,
0,
cellsize * array.shape[1],
cellsize * array.shape[0],
)
# (west, south, east, north, width, height)
transform = rasterio.transform.from_bounds(
xmin, ymin, xmax, ymax, array.shape[1], array.shape[0]
)
else:
transform = kwargs["transform"]
if dtype is None:
# Determine the minimum and maximum values in the array
min_value = np.min(array)
max_value = np.max(array)
# Determine the best dtype for the array
if min_value >= 0 and max_value <= 1:
dtype = np.float32
elif min_value >= 0 and max_value <= 255:
dtype = np.uint8
elif min_value >= -128 and max_value <= 127:
dtype = np.int8
elif min_value >= 0 and max_value <= 65535:
dtype = np.uint16
elif min_value >= -32768 and max_value <= 32767:
dtype = np.int16
else:
dtype = np.float64
# Convert the array to the best dtype
array = array.astype(dtype)
# Define the GeoTIFF metadata
metadata = {
"driver": driver,
"height": array.shape[0],
"width": array.shape[1],
"dtype": array.dtype,
"crs": crs,
"transform": transform,
}
if array.ndim == 2:
metadata["count"] = 1
elif array.ndim == 3:
metadata["count"] = array.shape[2]
if compress is not None:
metadata["compress"] = compress
metadata.update(**kwargs)
# Create a new memory file and write the array to it
memory_file = rasterio.MemoryFile()
dst = memory_file.open(**metadata)
if array.ndim == 2:
dst.write(array, 1)
elif array.ndim == 3:
for i in range(array.shape[2]):
dst.write(array[:, :, i], i + 1)
dst.close()
# Read the dataset from memory
dataset_reader = rasterio.open(dst.name, mode="r")
return dataset_reader
bands_to_image_collection(img)
¶
Converts all bands in an image to an image collection.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
img |
object |
The image to convert. |
required |
Returns:
Type | Description |
---|---|
object |
ee.ImageCollection |
Source code in geemap/common.py
def bands_to_image_collection(img):
"""Converts all bands in an image to an image collection.
Args:
img (object): The image to convert.
Returns:
object: ee.ImageCollection
"""
collection = ee.ImageCollection(img.bandNames().map(lambda b: img.select([b])))
return collection
bbox_coords(geometry, decimals=4)
¶
Get the bounding box coordinates of a geometry.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
geometry |
ee.Geometry | ee.FeatureCollection |
The input geometry. |
required |
decimals |
int |
The number of decimals to round to. Defaults to 4. |
4 |
Returns:
Type | Description |
---|---|
list |
The bounding box coordinates in the form [west, south, east, north]. |
Source code in geemap/common.py
def bbox_coords(geometry, decimals=4):
"""Get the bounding box coordinates of a geometry.
Args:
geometry (ee.Geometry | ee.FeatureCollection): The input geometry.
decimals (int, optional): The number of decimals to round to. Defaults to 4.
Returns:
list: The bounding box coordinates in the form [west, south, east, north].
"""
if isinstance(geometry, ee.FeatureCollection):
geometry = geometry.geometry()
if geometry is not None:
if not isinstance(geometry, ee.Geometry):
raise ValueError("geometry must be an ee.Geometry.")
coords = geometry.bounds().coordinates().getInfo()[0]
x = [p[0] for p in coords]
y = [p[1] for p in coords]
west = round(min(x), decimals)
east = round(max(x), decimals)
south = round(min(y), decimals)
north = round(max(y), decimals)
return [west, south, east, north]
else:
return None
bbox_to_gdf(bbox, crs='EPSG:4326')
¶
Converts a bounding box to a GeoDataFrame.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
bbox |
tuple |
A bounding box in the form of a tuple (minx, miny, maxx, maxy). |
required |
crs |
str |
The coordinate reference system of the bounding box to convert to. Defaults to "EPSG:4326". |
'EPSG:4326' |
Returns:
Type | Description |
---|---|
geopandas.GeoDataFrame |
A GeoDataFrame containing the bounding box. |
Source code in geemap/common.py
def bbox_to_gdf(bbox, crs="EPSG:4326"):
"""Converts a bounding box to a GeoDataFrame.
Args:
bbox (tuple): A bounding box in the form of a tuple (minx, miny, maxx, maxy).
crs (str, optional): The coordinate reference system of the bounding box to convert to. Defaults to "EPSG:4326".
Returns:
geopandas.GeoDataFrame: A GeoDataFrame containing the bounding box.
"""
check_package(name="geopandas", URL="https://geopandas.org")
from shapely.geometry import box
import geopandas as gpd
minx, miny, maxx, maxy = bbox
geometry = box(minx, miny, maxx, maxy)
d = {"geometry": [geometry]}
gdf = gpd.GeoDataFrame(d, crs="EPSG:4326")
gdf.to_crs(crs=crs, inplace=True)
return gdf
bbox_to_geojson(bounds)
¶
Convert coordinates of a bounding box to a geojson.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
bounds |
list |
A list of coordinates representing [left, bottom, right, top]. |
required |
Returns:
Type | Description |
---|---|
dict |
A geojson feature. |
Source code in geemap/common.py
def bbox_to_geojson(bounds):
"""Convert coordinates of a bounding box to a geojson.
Args:
bounds (list): A list of coordinates representing [left, bottom, right, top].
Returns:
dict: A geojson feature.
"""
return {
"geometry": {
"type": "Polygon",
"coordinates": [
[
[bounds[0], bounds[3]],
[bounds[0], bounds[1]],
[bounds[2], bounds[1]],
[bounds[2], bounds[3]],
[bounds[0], bounds[3]],
]
],
},
"type": "Feature",
}
blend(top_layer, bottom_layer=None, top_vis=None, bottom_vis=None, hillshade=True, expression='a*b', **kwargs)
¶
Create a blended image that is a combination of two images, e.g., DEM and hillshade. This function was inspired by Jesse Anderson. See https://github.com/jessjaco/gee-blend.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
top_layer |
ee.Image |
The top layer image, e.g., ee.Image("CGIAR/SRTM90_V4") |
required |
bottom_layer |
ee.Image |
The bottom layer image. If not specified, it will use the top layer image. |
None |
top_vis |
dict |
The top layer image vis parameters as a dictionary. Defaults to None. |
None |
bottom_vis |
dict |
The bottom layer image vis parameters as a dictionary. Defaults to None. |
None |
hillshade |
bool |
Flag to use hillshade. Defaults to True. |
True |
expression |
str |
The expression to use for the blend. Defaults to 'a*b'. |
'a*b' |
Returns:
Type | Description |
---|---|
ee.Image |
The blended image. |
Source code in geemap/common.py
def blend(
top_layer,
bottom_layer=None,
top_vis=None,
bottom_vis=None,
hillshade=True,
expression="a*b",
**kwargs,
):
"""Create a blended image that is a combination of two images, e.g., DEM and hillshade. This function was inspired by Jesse Anderson. See https://github.com/jessjaco/gee-blend.
Args:
top_layer (ee.Image): The top layer image, e.g., ee.Image("CGIAR/SRTM90_V4")
bottom_layer (ee.Image, optional): The bottom layer image. If not specified, it will use the top layer image.
top_vis (dict, optional): The top layer image vis parameters as a dictionary. Defaults to None.
bottom_vis (dict, optional): The bottom layer image vis parameters as a dictionary. Defaults to None.
hillshade (bool, optional): Flag to use hillshade. Defaults to True.
expression (str, optional): The expression to use for the blend. Defaults to 'a*b'.
Returns:
ee.Image: The blended image.
"""
from box import Box
if not isinstance(top_layer, ee.Image):
raise ValueError("top_layer must be an ee.Image.")
if bottom_layer is None:
bottom_layer = top_layer
if not isinstance(bottom_layer, ee.Image):
raise ValueError("bottom_layer must be an ee.Image.")
if top_vis is not None:
if not isinstance(top_vis, dict):
raise ValueError("top_vis must be a dictionary.")
elif "palette" in top_vis and isinstance(top_vis["palette"], Box):
try:
top_vis["palette"] = top_vis["palette"]["default"]
except Exception as e:
print("The provided palette is invalid.")
raise Exception(e)
if bottom_vis is not None:
if not isinstance(bottom_vis, dict):
raise ValueError("top_vis must be a dictionary.")
elif "palette" in bottom_vis and isinstance(bottom_vis["palette"], Box):
try:
bottom_vis["palette"] = bottom_vis["palette"]["default"]
except Exception as e:
print("The provided palette is invalid.")
raise Exception(e)
if top_vis is None:
top_bands = top_layer.bandNames().getInfo()
top_vis = {"bands": top_bands}
if hillshade:
top_vis["palette"] = ["006633", "E5FFCC", "662A00", "D8D8D8", "F5F5F5"]
top_vis["min"] = 0
top_vis["max"] = 6000
if bottom_vis is None:
bottom_bands = bottom_layer.bandNames().getInfo()
bottom_vis = {"bands": bottom_bands}
if hillshade:
bottom_vis["bands"] = ["hillshade"]
top = top_layer.visualize(**top_vis).divide(255)
if hillshade:
bottom = ee.Terrain.hillshade(bottom_layer).visualize(**bottom_vis).divide(255)
else:
bottom = bottom_layer.visualize(**bottom_vis).divide(255)
if "a" not in expression or ("b" not in expression):
raise ValueError("expression must contain 'a' and 'b'.")
result = ee.Image().expression(expression, {"a": top, "b": bottom})
return result
bounds_to_xy_range(bounds)
¶
Convert bounds to x and y range to be used as input to bokeh map.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
bounds |
list |
A list of bounds in the form [(south, west), (north, east)] or [xmin, ymin, xmax, ymax]. |
required |
Returns:
Type | Description |
---|---|
tuple |
A tuple of (x_range, y_range). |
Source code in geemap/common.py
def bounds_to_xy_range(bounds):
"""Convert bounds to x and y range to be used as input to bokeh map.
Args:
bounds (list): A list of bounds in the form [(south, west), (north, east)] or [xmin, ymin, xmax, ymax].
Returns:
tuple: A tuple of (x_range, y_range).
"""
if isinstance(bounds, tuple):
bounds = list(bounds)
elif not isinstance(bounds, list):
raise TypeError("bounds must be a list")
if len(bounds) == 4:
west, south, east, north = bounds
elif len(bounds) == 2:
south, west = bounds[0]
north, east = bounds[1]
xmin, ymin = lnglat_to_meters(west, south)
xmax, ymax = lnglat_to_meters(east, north)
x_range = (xmin, xmax)
y_range = (ymin, ymax)
return x_range, y_range
build_api_tree(api_dict, output_widget, layout_width='100%')
¶
Builds an Earth Engine API tree view.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
api_dict |
dict |
The dictionary containing information about each Earth Engine API function. |
required |
output_widget |
object |
An Output widget. |
required |
layout_width |
str |
The percentage width of the widget. Defaults to '100%'. |
'100%' |
Returns:
Type | Description |
---|---|
tuple |
Returns a tuple containing two items: a tree Output widget and a tree dictionary. |
Source code in geemap/common.py
def build_api_tree(api_dict, output_widget, layout_width="100%"):
"""Builds an Earth Engine API tree view.
Args:
api_dict (dict): The dictionary containing information about each Earth Engine API function.
output_widget (object): An Output widget.
layout_width (str, optional): The percentage width of the widget. Defaults to '100%'.
Returns:
tuple: Returns a tuple containing two items: a tree Output widget and a tree dictionary.
"""
warnings.filterwarnings("ignore")
tree = Tree()
tree_dict = {}
names = api_dict.keys()
def handle_click(event):
if event["new"]:
name = event["owner"].name
values = api_dict[name]
with output_widget:
output_widget.outputs = ()
html_widget = widgets.HTML(value=values["html"])
display(html_widget)
for name in names:
func_list = ee_function_tree(name)
first = func_list[0]
if first not in tree_dict.keys():
tree_dict[first] = Node(first)
tree_dict[first].opened = False
tree.add_node(tree_dict[first])
for index, func in enumerate(func_list):
if index > 0:
if func not in tree_dict.keys():
node = tree_dict[func_list[index - 1]]
node.opened = False
tree_dict[func] = Node(func)
node.add_node(tree_dict[func])
if index == len(func_list) - 1:
node = tree_dict[func_list[index]]
node.icon = "file"
node.observe(handle_click, "selected")
return tree, tree_dict
build_repo_tree(out_dir=None, name='gee_repos')
¶
Builds a repo tree for GEE account.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
out_dir |
str |
The output directory for the repos. Defaults to None. |
None |
name |
str |
The output name for the repo directory. Defaults to 'gee_repos'. |
'gee_repos' |
Returns:
Type | Description |
---|---|
tuple |
Returns a tuple containing a tree widget, an output widget, and a tree dictionary containing nodes. |
Source code in geemap/common.py
def build_repo_tree(out_dir=None, name="gee_repos"):
"""Builds a repo tree for GEE account.
Args:
out_dir (str): The output directory for the repos. Defaults to None.
name (str, optional): The output name for the repo directory. Defaults to 'gee_repos'.
Returns:
tuple: Returns a tuple containing a tree widget, an output widget, and a tree dictionary containing nodes.
"""
warnings.filterwarnings("ignore")
if out_dir is None:
out_dir = os.path.join(os.path.expanduser("~"))
repo_dir = os.path.join(out_dir, name)
if not os.path.exists(repo_dir):
os.makedirs(repo_dir)
URLs = {
# 'Owner': 'https://earthengine.googlesource.com/{ee_user_id()}/default',
"Writer": "",
"Reader": "https://github.com/gee-community/geemap",
"Examples": "https://github.com/giswqs/earthengine-py-examples",
"Archive": "https://earthengine.googlesource.com/EGU2017-EE101",
}
user_id = ee_user_id()
if user_id is not None:
URLs["Owner"] = f"https://earthengine.googlesource.com/{ee_user_id()}/default"
path_widget = widgets.Text(placeholder="Enter the link to a Git repository here...")
path_widget.layout.width = "475px"
clone_widget = widgets.Button(
description="Clone",
button_style="primary",
tooltip="Clone the repository to folder.",
)
info_widget = widgets.HBox()
groups = ["Owner", "Writer", "Reader", "Examples", "Archive"]
for group in groups:
group_dir = os.path.join(repo_dir, group)
if not os.path.exists(group_dir):
os.makedirs(group_dir)
example_dir = os.path.join(repo_dir, "Examples/earthengine-py-examples")
if not os.path.exists(example_dir):
clone_github_repo(URLs["Examples"], out_dir=example_dir)
left_widget, right_widget, tree_dict = file_browser(
in_dir=repo_dir,
add_root_node=False,
search_description="Filter scripts...",
use_import=True,
return_sep_widgets=True,
)
info_widget.children = [right_widget]
def handle_folder_click(event):
if event["new"]:
url = ""
selected = event["owner"]
if selected.name in URLs.keys():
url = URLs[selected.name]
path_widget.value = url
clone_widget.disabled = False
info_widget.children = [path_widget, clone_widget]
else:
info_widget.children = [right_widget]
for group in groups:
dirname = os.path.join(repo_dir, group)
node = tree_dict[dirname]
node.observe(handle_folder_click, "selected")
def handle_clone_click(b):
url = path_widget.value
default_dir = os.path.join(repo_dir, "Examples")
if url == "":
path_widget.value = "Please enter a valid URL to the repository."
else:
for group in groups:
key = os.path.join(repo_dir, group)
node = tree_dict[key]
if node.selected:
default_dir = key
try:
path_widget.value = "Cloning..."
clone_dir = os.path.join(default_dir, os.path.basename(url))
if url.find("github.com") != -1:
clone_github_repo(url, out_dir=clone_dir)
elif url.find("googlesource") != -1:
clone_google_repo(url, out_dir=clone_dir)
path_widget.value = "Cloned to {}".format(clone_dir)
clone_widget.disabled = True
except Exception as e:
path_widget.value = (
"An error occurred when trying to clone the repository " + str(e)
)
clone_widget.disabled = True
clone_widget.on_click(handle_clone_click)
return left_widget, info_widget, tree_dict
center_zoom_to_xy_range(center, zoom)
¶
Convert center and zoom to x and y range to be used as input to bokeh map.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
center |
tuple |
A tuple of (latitude, longitude). |
required |
zoom |
int |
The zoom level. |
required |
Returns:
Type | Description |
---|---|
tuple |
A tuple of (x_range, y_range). |
Source code in geemap/common.py
def center_zoom_to_xy_range(center, zoom):
"""Convert center and zoom to x and y range to be used as input to bokeh map.
Args:
center (tuple): A tuple of (latitude, longitude).
zoom (int): The zoom level.
Returns:
tuple: A tuple of (x_range, y_range).
"""
if isinstance(center, tuple) or isinstance(center, list):
pass
else:
raise TypeError("center must be a tuple or list")
if not isinstance(zoom, int):
raise TypeError("zoom must be an integer")
latitude, longitude = center
x_range = (-179, 179)
y_range = (-70, 70)
x_full_length = x_range[1] - x_range[0]
y_full_length = y_range[1] - y_range[0]
x_length = x_full_length / 2 ** (zoom - 2)
y_length = y_full_length / 2 ** (zoom - 2)
south = latitude - y_length / 2
north = latitude + y_length / 2
west = longitude - x_length / 2
east = longitude + x_length / 2
xmin, ymin = lnglat_to_meters(west, south)
xmax, ymax = lnglat_to_meters(east, north)
x_range = (xmin, xmax)
y_range = (ymin, ymax)
return x_range, y_range
check_basemap(basemap)
¶
Check Google basemaps
Parameters:
Name | Type | Description | Default |
---|---|---|---|
basemap |
str |
The basemap name. |
required |
Returns:
Type | Description |
---|---|
str |
The basemap name. |
Source code in geemap/common.py
def check_basemap(basemap):
"""Check Google basemaps
Args:
basemap (str): The basemap name.
Returns:
str: The basemap name.
"""
if isinstance(basemap, str):
map_dict = {
"ROADMAP": "Google Maps",
"SATELLITE": "Google Satellite",
"TERRAIN": "Google Terrain",
"HYBRID": "Google Hybrid",
}
if basemap.upper() in map_dict.keys():
return map_dict[basemap.upper()]
else:
return basemap
else:
return basemap
check_cmap(cmap)
¶
Check the colormap and return a list of colors.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
cmap |
str | list | Box |
The colormap to check. |
required |
Returns:
Type | Description |
---|---|
list |
A list of colors. |
Source code in geemap/common.py
def check_cmap(cmap):
"""Check the colormap and return a list of colors.
Args:
cmap (str | list | Box): The colormap to check.
Returns:
list: A list of colors.
"""
from box import Box
from .colormaps import get_palette
if isinstance(cmap, str):
try:
palette = get_palette(cmap)
if isinstance(palette, dict):
palette = palette["default"]
return palette
except Exception as e:
try:
return check_color(cmap)
except Exception as e:
raise Exception(f"{cmap} is not a valid colormap.")
elif isinstance(cmap, Box):
return list(cmap["default"])
elif isinstance(cmap, list) or isinstance(cmap, tuple):
return cmap
else:
raise Exception(f"{cmap} is not a valid colormap.")
check_color(in_color)
¶
Checks the input color and returns the corresponding hex color code.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
in_color |
str or tuple |
It can be a string (e.g., 'red', '#ffff00', 'ffff00', 'ff0') or RGB tuple (e.g., (255, 127, 0)). |
required |
Returns:
Type | Description |
---|---|
str |
A hex color code. |
Source code in geemap/common.py
def check_color(in_color):
"""Checks the input color and returns the corresponding hex color code.
Args:
in_color (str or tuple): It can be a string (e.g., 'red', '#ffff00', 'ffff00', 'ff0') or RGB tuple (e.g., (255, 127, 0)).
Returns:
str: A hex color code.
"""
import colour
out_color = "#000000" # default black color
if isinstance(in_color, tuple) and len(in_color) == 3:
# rescale color if necessary
if all(isinstance(item, int) for item in in_color):
in_color = [c / 255.0 for c in in_color]
return colour.Color(rgb=tuple(in_color)).hex_l
else:
# try to guess the color system
try:
return colour.Color(in_color).hex_l
except Exception as e:
pass
# try again by adding an extra # (GEE handle hex codes without #)
try:
return colour.Color(f"#{in_color}").hex_l
except Exception as e:
print(
f"The provided color ({in_color}) is invalid. Using the default black color."
)
print(e)
return out_color
check_dir(dir_path, make_dirs=True)
¶
Checks if a directory exists and creates it if it does not.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
dir_path |
[str |
The path to the directory. |
required |
make_dirs |
bool |
Whether to create the directory if it does not exist. Defaults to True. |
True |
Exceptions:
Type | Description |
---|---|
FileNotFoundError |
If the directory could not be found. |
TypeError |
If the input directory path is not a string. |
Returns:
Type | Description |
---|---|
str |
The path to the directory. |
Source code in geemap/common.py
def check_dir(dir_path, make_dirs=True):
"""Checks if a directory exists and creates it if it does not.
Args:
dir_path ([str): The path to the directory.
make_dirs (bool, optional): Whether to create the directory if it does not exist. Defaults to True.
Raises:
FileNotFoundError: If the directory could not be found.
TypeError: If the input directory path is not a string.
Returns:
str: The path to the directory.
"""
if isinstance(dir_path, str):
if dir_path.startswith("~"):
dir_path = os.path.expanduser(dir_path)
else:
dir_path = os.path.abspath(dir_path)
if not os.path.exists(dir_path) and make_dirs:
os.makedirs(dir_path)
if os.path.exists(dir_path):
return dir_path
else:
raise FileNotFoundError("The provided directory could not be found.")
else:
raise TypeError("The provided directory path must be a string.")
check_file_path(file_path, make_dirs=True)
¶
Gets the absolute file path.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
file_path |
[str |
The path to the file. |
required |
make_dirs |
bool |
Whether to create the directory if it does not exist. Defaults to True. |
True |
Exceptions:
Type | Description |
---|---|
FileNotFoundError |
If the directory could not be found. |
TypeError |
If the input directory path is not a string. |
Returns:
Type | Description |
---|---|
str |
The absolute path to the file. |
Source code in geemap/common.py
def check_file_path(file_path, make_dirs=True):
"""Gets the absolute file path.
Args:
file_path ([str): The path to the file.
make_dirs (bool, optional): Whether to create the directory if it does not exist. Defaults to True.
Raises:
FileNotFoundError: If the directory could not be found.
TypeError: If the input directory path is not a string.
Returns:
str: The absolute path to the file.
"""
if isinstance(file_path, str):
if file_path.startswith("~"):
file_path = os.path.expanduser(file_path)
else:
file_path = os.path.abspath(file_path)
file_dir = os.path.dirname(file_path)
if not os.path.exists(file_dir) and make_dirs:
os.makedirs(file_dir)
return file_path
else:
raise TypeError("The provided file path must be a string.")
check_git_install()
¶
Checks if Git is installed.
Returns:
Type | Description |
---|---|
bool |
Returns True if Git is installed, otherwise returns False. |
Source code in geemap/common.py
def check_git_install():
"""Checks if Git is installed.
Returns:
bool: Returns True if Git is installed, otherwise returns False.
"""
import webbrowser
cmd = "git --version"
output = os.popen(cmd).read()
if "git version" in output:
return True
else:
url = "https://git-scm.com/downloads"
print(f"Git is not installed. Please download Git from {url} and install it.")
webbrowser.open_new_tab(url)
return False
check_html_string(html_string)
¶
Check if an HTML string contains local images and convert them to base64.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
html_string |
str |
The HTML string. |
required |
Returns:
Type | Description |
---|---|
str |
The HTML string with local images converted to base64. |
Source code in geemap/common.py
def check_html_string(html_string):
"""Check if an HTML string contains local images and convert them to base64.
Args:
html_string (str): The HTML string.
Returns:
str: The HTML string with local images converted to base64.
"""
import re
import base64
# Search for img tags with src attribute
img_regex = r'<img[^>]+src\s*=\s*["\']([^"\':]+)["\'][^>]*>'
for match in re.findall(img_regex, html_string):
with open(match, "rb") as img_file:
img_data = img_file.read()
base64_data = base64.b64encode(img_data).decode("utf-8")
html_string = html_string.replace(
'src="{}"'.format(match),
'src="data:image/png;base64,' + base64_data + '"',
)
return html_string
check_install(package)
¶
Checks whether a package is installed. If not, it will install the package.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
package |
str |
The name of the package to check. |
required |
Source code in geemap/common.py
def check_install(package):
"""Checks whether a package is installed. If not, it will install the package.
Args:
package (str): The name of the package to check.
"""
import subprocess
try:
__import__(package)
# print('{} is already installed.'.format(package))
except ImportError:
print(f"{package} is not installed. Installing ...")
try:
subprocess.check_call(["python", "-m", "pip", "install", package])
except Exception as e:
print(f"Failed to install {package}")
print(e)
print(f"{package} has been installed successfully.")
check_titiler_endpoint(titiler_endpoint=None)
¶
Returns the default titiler endpoint.
Returns:
Type | Description |
---|---|
object |
A titiler endpoint. |
Source code in geemap/common.py
def check_titiler_endpoint(titiler_endpoint=None):
"""Returns the default titiler endpoint.
Returns:
object: A titiler endpoint.
"""
if titiler_endpoint is None:
if os.environ.get("TITILER_ENDPOINT") is not None:
titiler_endpoint = os.environ.get("TITILER_ENDPOINT")
if titiler_endpoint == "planetary-computer":
titiler_endpoint = PlanetaryComputerEndpoint()
else:
titiler_endpoint = "https://titiler.xyz"
elif titiler_endpoint in ["planetary-computer", "pc"]:
titiler_endpoint = PlanetaryComputerEndpoint()
return titiler_endpoint
classify(data, column, cmap=None, colors=None, labels=None, scheme='Quantiles', k=5, legend_kwds=None, classification_kwds=None)
¶
Classify a dataframe column using a variety of classification schemes.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
data |
str | pd.DataFrame | gpd.GeoDataFrame |
The data to classify. It can be a filepath to a vector dataset, a pandas dataframe, or a geopandas geodataframe. |
required |
column |
str |
The column to classify. |
required |
cmap |
str |
The name of a colormap recognized by matplotlib. Defaults to None. |
None |
colors |
list |
A list of colors to use for the classification. Defaults to None. |
None |
labels |
list |
A list of labels to use for the legend. Defaults to None. |
None |
scheme |
str |
Name of a choropleth classification scheme (requires mapclassify). Name of a choropleth classification scheme (requires mapclassify). A mapclassify.MapClassifier object will be used under the hood. Supported are all schemes provided by mapclassify (e.g. 'BoxPlot', 'EqualInterval', 'FisherJenks', 'FisherJenksSampled', 'HeadTailBreaks', 'JenksCaspall', 'JenksCaspallForced', 'JenksCaspallSampled', 'MaxP', 'MaximumBreaks', 'NaturalBreaks', 'Quantiles', 'Percentiles', 'StdMean', 'UserDefined'). Arguments can be passed in classification_kwds. |
'Quantiles' |
k |
int |
Number of classes (ignored if scheme is None or if column is categorical). Default to 5. |
5 |
legend_kwds |
dict |
Keyword arguments to pass to :func: |
None |
classification_kwds |
dict |
Keyword arguments to pass to mapclassify. Defaults to None. |
None |
Returns:
Type | Description |
---|---|
pd.DataFrame, dict |
A pandas dataframe with the classification applied and a legend dictionary. |
Source code in geemap/common.py
def classify(
data,
column,
cmap=None,
colors=None,
labels=None,
scheme="Quantiles",
k=5,
legend_kwds=None,
classification_kwds=None,
):
"""Classify a dataframe column using a variety of classification schemes.
Args:
data (str | pd.DataFrame | gpd.GeoDataFrame): The data to classify. It can be a filepath to a vector dataset, a pandas dataframe, or a geopandas geodataframe.
column (str): The column to classify.
cmap (str, optional): The name of a colormap recognized by matplotlib. Defaults to None.
colors (list, optional): A list of colors to use for the classification. Defaults to None.
labels (list, optional): A list of labels to use for the legend. Defaults to None.
scheme (str, optional): Name of a choropleth classification scheme (requires mapclassify).
Name of a choropleth classification scheme (requires mapclassify).
A mapclassify.MapClassifier object will be used
under the hood. Supported are all schemes provided by mapclassify (e.g.
'BoxPlot', 'EqualInterval', 'FisherJenks', 'FisherJenksSampled',
'HeadTailBreaks', 'JenksCaspall', 'JenksCaspallForced',
'JenksCaspallSampled', 'MaxP', 'MaximumBreaks',
'NaturalBreaks', 'Quantiles', 'Percentiles', 'StdMean',
'UserDefined'). Arguments can be passed in classification_kwds.
k (int, optional): Number of classes (ignored if scheme is None or if column is categorical). Default to 5.
legend_kwds (dict, optional): Keyword arguments to pass to :func:`matplotlib.pyplot.legend` or `matplotlib.pyplot.colorbar`. Defaults to None.
Keyword arguments to pass to :func:`matplotlib.pyplot.legend` or
Additional accepted keywords when `scheme` is specified:
fmt : string
A formatting specification for the bin edges of the classes in the
legend. For example, to have no decimals: ``{"fmt": "{:.0f}"}``.
labels : list-like
A list of legend labels to override the auto-generated labblels.
Needs to have the same number of elements as the number of
classes (`k`).
interval : boolean (default False)
An option to control brackets from mapclassify legend.
If True, open/closed interval brackets are shown in the legend.
classification_kwds (dict, optional): Keyword arguments to pass to mapclassify. Defaults to None.
Returns:
pd.DataFrame, dict: A pandas dataframe with the classification applied and a legend dictionary.
"""
import numpy as np
import pandas as pd
import geopandas as gpd
import matplotlib as mpl
import matplotlib.pyplot as plt
try:
import mapclassify
except ImportError:
raise ImportError(
'mapclassify is required for this function. Install with "pip install mapclassify".'
)
if isinstance(data, gpd.GeoDataFrame) or isinstance(data, pd.DataFrame):
df = data
else:
try:
df = gpd.read_file(data)
except Exception:
raise TypeError(
"Data must be a GeoDataFrame or a path to a file that can be read by geopandas.read_file()."
)
if df.empty:
warnings.warn(
"The GeoDataFrame you are attempting to plot is "
"empty. Nothing has been displayed.",
UserWarning,
)
return
columns = df.columns.values.tolist()
if column not in columns:
raise ValueError(
f"{column} is not a column in the GeoDataFrame. It must be one of {columns}."
)
# Convert categorical data to numeric
init_column = None
value_list = None
if np.issubdtype(df[column].dtype, np.object0):
value_list = df[column].unique().tolist()
value_list.sort()
df["category"] = df[column].replace(value_list, range(0, len(value_list)))
init_column = column
column = "category"
k = len(value_list)
if legend_kwds is not None:
legend_kwds = legend_kwds.copy()
# To accept pd.Series and np.arrays as column
if isinstance(column, (np.ndarray, pd.Series)):
if column.shape[0] != df.shape[0]:
raise ValueError(
"The dataframe and given column have different number of rows."
)
else:
values = column
# Make sure index of a Series matches index of df
if isinstance(values, pd.Series):
values = values.reindex(df.index)
else:
values = df[column]
values = df[column]
nan_idx = np.asarray(pd.isna(values), dtype="bool")
if cmap is None:
cmap = "Blues"
try:
cmap = plt.get_cmap(cmap, k)
except:
cmap = plt.cm.get_cmap(cmap, k)
if colors is None:
colors = [mpl.colors.rgb2hex(cmap(i))[1:] for i in range(cmap.N)]
colors = ["#" + i for i in colors]
elif isinstance(colors, list):
colors = [check_color(i) for i in colors]
elif isinstance(colors, str):
colors = [check_color(colors)] * k
allowed_schemes = [
"BoxPlot",
"EqualInterval",
"FisherJenks",
"FisherJenksSampled",
"HeadTailBreaks",
"JenksCaspall",
"JenksCaspallForced",
"JenksCaspallSampled",
"MaxP",
"MaximumBreaks",
"NaturalBreaks",
"Quantiles",
"Percentiles",
"StdMean",
"UserDefined",
]
if scheme.lower() not in [s.lower() for s in allowed_schemes]:
raise ValueError(
f"{scheme} is not a valid scheme. It must be one of {allowed_schemes}."
)
if classification_kwds is None:
classification_kwds = {}
if "k" not in classification_kwds:
classification_kwds["k"] = k
binning = mapclassify.classify(
np.asarray(values[~nan_idx]), scheme, **classification_kwds
)
df["category"] = binning.yb
df["color"] = [colors[i] for i in df["category"]]
if legend_kwds is None:
legend_kwds = {}
if "interval" not in legend_kwds:
legend_kwds["interval"] = True
if "fmt" not in legend_kwds:
if np.issubdtype(df[column].dtype, np.floating):
legend_kwds["fmt"] = "{:.2f}"
else:
legend_kwds["fmt"] = "{:.0f}"
if labels is None:
# set categorical to True for creating the legend
if legend_kwds is not None and "labels" in legend_kwds:
if len(legend_kwds["labels"]) != binning.k:
raise ValueError(
"Number of labels must match number of bins, "
"received {} labels for {} bins".format(
len(legend_kwds["labels"]), binning.k
)
)
else:
labels = list(legend_kwds.pop("labels"))
else:
# fmt = "{:.2f}"
if legend_kwds is not None and "fmt" in legend_kwds:
fmt = legend_kwds.pop("fmt")
labels = binning.get_legend_classes(fmt)
if legend_kwds is not None:
show_interval = legend_kwds.pop("interval", False)
else:
show_interval = False
if not show_interval:
labels = [c[1:-1] for c in labels]
if init_column is not None:
labels = value_list
elif isinstance(labels, list):
if len(labels) != len(colors):
raise ValueError("The number of labels must match the number of colors.")
else:
raise ValueError("labels must be a list or None.")
legend_dict = dict(zip(labels, colors))
df["category"] = df["category"] + 1
return df, legend_dict
clip_image(image, mask, output)
¶
Clip an image by mask.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
image |
str |
Path to the image file in GeoTIFF format. |
required |
mask |
str | list | dict |
The mask used to extract the image. It can be a path to vector datasets (e.g., GeoJSON, Shapefile), a list of coordinates, or m.user_roi. |
required |
output |
str |
Path to the output file. |
required |
Exceptions:
Type | Description |
---|---|
ImportError |
If the fiona or rasterio package is not installed. |
FileNotFoundError |
If the image is not found. |
ValueError |
If the mask is not a valid GeoJSON or raster file. |
FileNotFoundError |
If the mask file is not found. |
Source code in geemap/common.py
def clip_image(image, mask, output):
"""Clip an image by mask.
Args:
image (str): Path to the image file in GeoTIFF format.
mask (str | list | dict): The mask used to extract the image. It can be a path to vector datasets (e.g., GeoJSON, Shapefile), a list of coordinates, or m.user_roi.
output (str): Path to the output file.
Raises:
ImportError: If the fiona or rasterio package is not installed.
FileNotFoundError: If the image is not found.
ValueError: If the mask is not a valid GeoJSON or raster file.
FileNotFoundError: If the mask file is not found.
"""
try:
import fiona
import rasterio
import rasterio.mask
except ImportError as e:
raise ImportError(e)
if not os.path.exists(image):
raise FileNotFoundError(f"{image} does not exist.")
if not output.endswith(".tif"):
raise ValueError("Output must be a tif file.")
output = check_file_path(output)
if isinstance(mask, ee.Geometry):
mask = mask.coordinates().getInfo()[0]
if isinstance(mask, str):
if mask.startswith("http"):
mask = download_file(mask, output)
if not os.path.exists(mask):
raise FileNotFoundError(f"{mask} does not exist.")
elif isinstance(mask, list) or isinstance(mask, dict):
if isinstance(mask, list):
geojson = {
"type": "FeatureCollection",
"features": [
{
"type": "Feature",
"properties": {},
"geometry": {"type": "Polygon", "coordinates": [mask]},
}
],
}
else:
geojson = {
"type": "FeatureCollection",
"features": [mask],
}
mask = temp_file_path(".geojson")
with open(mask, "w") as f:
json.dump(geojson, f)
with fiona.open(mask, "r") as shapefile:
shapes = [feature["geometry"] for feature in shapefile]
with rasterio.open(image) as src:
out_image, out_transform = rasterio.mask.mask(src, shapes, crop=True)
out_meta = src.meta
out_meta.update(
{
"driver": "GTiff",
"height": out_image.shape[1],
"width": out_image.shape[2],
"transform": out_transform,
}
)
with rasterio.open(output, "w", **out_meta) as dest:
dest.write(out_image)
clone_github_repo(url, out_dir)
¶
Clones a GitHub repository.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
url |
str |
The link to the GitHub repository |
required |
out_dir |
str |
The output directory for the cloned repository. |
required |
Source code in geemap/common.py
def clone_github_repo(url, out_dir):
"""Clones a GitHub repository.
Args:
url (str): The link to the GitHub repository
out_dir (str): The output directory for the cloned repository.
"""
repo_name = os.path.basename(url)
# url_zip = os.path.join(url, 'archive/master.zip')
url_zip = url + "/archive/master.zip"
if os.path.exists(out_dir):
print(
"The specified output directory already exists. Please choose a new directory."
)
return
parent_dir = os.path.dirname(out_dir)
out_file_path = os.path.join(parent_dir, repo_name + ".zip")
try:
urllib.request.urlretrieve(url_zip, out_file_path)
except Exception:
print("The provided URL is invalid. Please double check the URL.")
return
with zipfile.ZipFile(out_file_path, "r") as zip_ref:
zip_ref.extractall(parent_dir)
src = out_file_path.replace(".zip", "-master")
os.rename(src, out_dir)
os.remove(out_file_path)
clone_google_repo(url, out_dir=None)
¶
Clones an Earth Engine repository from https://earthengine.googlesource.com, such as https://earthengine.googlesource.com/users/google/datasets
Parameters:
Name | Type | Description | Default |
---|---|---|---|
url |
str |
The link to the Earth Engine repository |
required |
out_dir |
str |
The output directory for the cloned repository. Defaults to None. |
None |
Source code in geemap/common.py
def clone_google_repo(url, out_dir=None):
"""Clones an Earth Engine repository from https://earthengine.googlesource.com, such as https://earthengine.googlesource.com/users/google/datasets
Args:
url (str): The link to the Earth Engine repository
out_dir (str, optional): The output directory for the cloned repository. Defaults to None.
"""
repo_name = os.path.basename(url)
if out_dir is None:
out_dir = os.path.join(os.getcwd(), repo_name)
if not os.path.exists(os.path.dirname(out_dir)):
os.makedirs(os.path.dirname(out_dir))
if os.path.exists(out_dir):
print(
"The specified output directory already exists. Please choose a new directory."
)
return
if check_git_install():
cmd = f'git clone "{url}" "{out_dir}"'
os.popen(cmd).read()
clone_repo(out_dir='.', unzip=True)
¶
Clones the geemap GitHub repository.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
out_dir |
str |
Output folder for the repo. Defaults to '.'. |
'.' |
unzip |
bool |
Whether to unzip the repository. Defaults to True. |
True |
Source code in geemap/common.py
def clone_repo(out_dir=".", unzip=True):
"""Clones the geemap GitHub repository.
Args:
out_dir (str, optional): Output folder for the repo. Defaults to '.'.
unzip (bool, optional): Whether to unzip the repository. Defaults to True.
"""
url = "https://github.com/gee-community/geemap/archive/master.zip"
filename = "geemap-master.zip"
download_from_url(url, out_file_name=filename, out_dir=out_dir, unzip=unzip)
cog_bands(url, titiler_endpoint=None, timeout=300)
¶
Get band names of a Cloud Optimized GeoTIFF (COG).
Parameters:
Name | Type | Description | Default |
---|---|---|---|
url |
str |
HTTP URL to a COG, e.g., https://opendata.digitalglobe.com/events/mauritius-oil-spill/post-event/2020-08-12/105001001F1B5B00/105001001F1B5B00.tif |
required |
titiler_endpoint |
str |
Titiler endpoint. Defaults to "https://titiler.xyz". |
None |
timeout |
int |
Timeout in seconds. Defaults to 300. |
300 |
Returns:
Type | Description |
---|---|
list |
A list of band names |
Source code in geemap/common.py
def cog_bands(url, titiler_endpoint=None, timeout=300):
"""Get band names of a Cloud Optimized GeoTIFF (COG).
Args:
url (str): HTTP URL to a COG, e.g., https://opendata.digitalglobe.com/events/mauritius-oil-spill/post-event/2020-08-12/105001001F1B5B00/105001001F1B5B00.tif
titiler_endpoint (str, optional): Titiler endpoint. Defaults to "https://titiler.xyz".
timeout (int, optional): Timeout in seconds. Defaults to 300.
Returns:
list: A list of band names
"""
titiler_endpoint = check_titiler_endpoint(titiler_endpoint)
url = get_direct_url(url)
r = requests.get(
f"{titiler_endpoint}/cog/info",
params={
"url": url,
},
timeout=timeout,
).json()
bands = [b[0] for b in r["band_descriptions"]]
return bands
cog_bounds(url, titiler_endpoint=None, timeout=300)
¶
Get the bounding box of a Cloud Optimized GeoTIFF (COG).
Parameters:
Name | Type | Description | Default |
---|---|---|---|
url |
str |
HTTP URL to a COG, e.g., https://opendata.digitalglobe.com/events/mauritius-oil-spill/post-event/2020-08-12/105001001F1B5B00/105001001F1B5B00.tif |
required |
titiler_endpoint |
str |
Titiler endpoint. Defaults to "https://titiler.xyz". |
None |
timeout |
int |
Timeout in seconds. Defaults to 300. |
300 |
Returns:
Type | Description |
---|---|
list |
A list of values representing [left, bottom, right, top] |
Source code in geemap/common.py
def cog_bounds(url, titiler_endpoint=None, timeout=300):
"""Get the bounding box of a Cloud Optimized GeoTIFF (COG).
Args:
url (str): HTTP URL to a COG, e.g., https://opendata.digitalglobe.com/events/mauritius-oil-spill/post-event/2020-08-12/105001001F1B5B00/105001001F1B5B00.tif
titiler_endpoint (str, optional): Titiler endpoint. Defaults to "https://titiler.xyz".
timeout (int, optional): Timeout in seconds. Defaults to 300.
Returns:
list: A list of values representing [left, bottom, right, top]
"""
titiler_endpoint = check_titiler_endpoint(titiler_endpoint)
url = get_direct_url(url)
r = requests.get(
f"{titiler_endpoint}/cog/bounds", params={"url": url}, timeout=timeout
).json()
if "bounds" in r.keys():
bounds = r["bounds"]
else:
bounds = None
return bounds
cog_center(url, titiler_endpoint=None)
¶
Get the centroid of a Cloud Optimized GeoTIFF (COG).
Parameters:
Name | Type | Description | Default |
---|---|---|---|
url |
str |
HTTP URL to a COG, e.g., https://opendata.digitalglobe.com/events/mauritius-oil-spill/post-event/2020-08-12/105001001F1B5B00/105001001F1B5B00.tif |
required |
titiler_endpoint |
str |
Titiler endpoint. Defaults to "https://titiler.xyz". |
None |
Returns:
Type | Description |
---|---|
tuple |
A tuple representing (longitude, latitude) |
Source code in geemap/common.py
def cog_center(url, titiler_endpoint=None):
"""Get the centroid of a Cloud Optimized GeoTIFF (COG).
Args:
url (str): HTTP URL to a COG, e.g., https://opendata.digitalglobe.com/events/mauritius-oil-spill/post-event/2020-08-12/105001001F1B5B00/105001001F1B5B00.tif
titiler_endpoint (str, optional): Titiler endpoint. Defaults to "https://titiler.xyz".
Returns:
tuple: A tuple representing (longitude, latitude)
"""
titiler_endpoint = check_titiler_endpoint(titiler_endpoint)
url = get_direct_url(url)
bounds = cog_bounds(url, titiler_endpoint)
center = ((bounds[0] + bounds[2]) / 2, (bounds[1] + bounds[3]) / 2) # (lat, lon)
return center
cog_info(url, titiler_endpoint=None, return_geojson=False, timeout=300)
¶
Get band statistics of a Cloud Optimized GeoTIFF (COG).
Parameters:
Name | Type | Description | Default |
---|---|---|---|
url |
str |
HTTP URL to a COG, e.g., https://opendata.digitalglobe.com/events/mauritius-oil-spill/post-event/2020-08-12/105001001F1B5B00/105001001F1B5B00.tif |
required |
titiler_endpoint |
str |
Titiler endpoint. Defaults to "https://titiler.xyz". |
None |
timeout |
int |
Timeout in seconds. Defaults to 300. |
300 |
Returns:
Type | Description |
---|---|
list |
A dictionary of band info. |
Source code in geemap/common.py
def cog_info(url, titiler_endpoint=None, return_geojson=False, timeout=300):
"""Get band statistics of a Cloud Optimized GeoTIFF (COG).
Args:
url (str): HTTP URL to a COG, e.g., https://opendata.digitalglobe.com/events/mauritius-oil-spill/post-event/2020-08-12/105001001F1B5B00/105001001F1B5B00.tif
titiler_endpoint (str, optional): Titiler endpoint. Defaults to "https://titiler.xyz".
timeout (int, optional): Timeout in seconds. Defaults to 300.
Returns:
list: A dictionary of band info.
"""
titiler_endpoint = check_titiler_endpoint(titiler_endpoint)
url = get_direct_url(url)
info = "info"
if return_geojson:
info = "info.geojson"
r = requests.get(
f"{titiler_endpoint}/cog/{info}",
params={
"url": url,
},
timeout=timeout,
).json()
return r
cog_mosaic(links, titiler_endpoint=None, username='anonymous', layername=None, overwrite=False, verbose=True, timeout=300, **kwargs)
¶
Creates a COG mosaic from a list of COG URLs.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
links |
list |
A list containing COG HTTP URLs. |
required |
titiler_endpoint |
str |
Titiler endpoint. Defaults to "https://titiler.xyz". |
None |
username |
str |
User name for the titiler endpoint. Defaults to "anonymous". |
'anonymous' |
layername |
[type] |
Layer name to use. Defaults to None. |
None |
overwrite |
bool |
Whether to overwrite the layer name if existing. Defaults to False. |
False |
verbose |
bool |
Whether to print out descriptive information. Defaults to True. |
True |
timeout |
int |
Timeout in seconds. Defaults to 300. |
300 |
Exceptions:
Type | Description |
---|---|
Exception |
If the COG mosaic fails to create. |
Returns:
Type | Description |
---|---|
str |
The tile URL for the COG mosaic. |
Source code in geemap/common.py
def cog_mosaic(
links,
titiler_endpoint=None,
username="anonymous",
layername=None,
overwrite=False,
verbose=True,
timeout=300,
**kwargs,
):
"""Creates a COG mosaic from a list of COG URLs.
Args:
links (list): A list containing COG HTTP URLs.
titiler_endpoint (str, optional): Titiler endpoint. Defaults to "https://titiler.xyz".
username (str, optional): User name for the titiler endpoint. Defaults to "anonymous".
layername ([type], optional): Layer name to use. Defaults to None.
overwrite (bool, optional): Whether to overwrite the layer name if existing. Defaults to False.
verbose (bool, optional): Whether to print out descriptive information. Defaults to True.
timeout (int, optional): Timeout in seconds. Defaults to 300.
Raises:
Exception: If the COG mosaic fails to create.
Returns:
str: The tile URL for the COG mosaic.
"""
titiler_endpoint = check_titiler_endpoint(titiler_endpoint)
if layername is None:
layername = "layer_" + random_string(5)
try:
if verbose:
print("Creating COG masaic ...")
# Create token
r = requests.post(
f"{titiler_endpoint}/tokens/create",
json={"username": username, "scope": ["mosaic:read", "mosaic:create"]},
).json()
token = r["token"]
# Create mosaic
requests.post(
f"{titiler_endpoint}/mosaicjson/create",
json={
"username": username,
"layername": layername,
"files": links,
# "overwrite": overwrite
},
params={
"access_token": token,
},
).json()
r2 = requests.get(
f"{titiler_endpoint}/mosaicjson/{username}.{layername}/tilejson.json",
timeout=timeout,
).json()
return r2["tiles"][0]
except Exception as e:
raise Exception(e)
cog_mosaic_from_file(filepath, skip_rows=0, titiler_endpoint=None, username='anonymous', layername=None, overwrite=False, verbose=True, **kwargs)
¶
Creates a COG mosaic from a csv/txt file stored locally for through HTTP URL.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
filepath |
str |
Local path or HTTP URL to the csv/txt file containing COG URLs. |
required |
skip_rows |
int |
The number of rows to skip in the file. Defaults to 0. |
0 |
titiler_endpoint |
str |
Titiler endpoint. Defaults to "https://titiler.xyz". |
None |
username |
str |
User name for the titiler endpoint. Defaults to "anonymous". |
'anonymous' |
layername |
[type] |
Layer name to use. Defaults to None. |
None |
overwrite |
bool |
Whether to overwrite the layer name if existing. Defaults to False. |
False |
verbose |
bool |
Whether to print out descriptive information. Defaults to True. |
True |
Returns:
Type | Description |
---|---|
str |
The tile URL for the COG mosaic. |
Source code in geemap/common.py
def cog_mosaic_from_file(
filepath,
skip_rows=0,
titiler_endpoint=None,
username="anonymous",
layername=None,
overwrite=False,
verbose=True,
**kwargs,
):
"""Creates a COG mosaic from a csv/txt file stored locally for through HTTP URL.
Args:
filepath (str): Local path or HTTP URL to the csv/txt file containing COG URLs.
skip_rows (int, optional): The number of rows to skip in the file. Defaults to 0.
titiler_endpoint (str, optional): Titiler endpoint. Defaults to "https://titiler.xyz".
username (str, optional): User name for the titiler endpoint. Defaults to "anonymous".
layername ([type], optional): Layer name to use. Defaults to None.
overwrite (bool, optional): Whether to overwrite the layer name if existing. Defaults to False.
verbose (bool, optional): Whether to print out descriptive information. Defaults to True.
Returns:
str: The tile URL for the COG mosaic.
"""
import urllib
titiler_endpoint = check_titiler_endpoint(titiler_endpoint)
links = []
if filepath.startswith("http"):
data = urllib.request.urlopen(filepath)
for line in data:
links.append(line.decode("utf-8").strip())
else:
with open(filepath) as f:
links = [line.strip() for line in f.readlines()]
links = links[skip_rows:]
# print(links)
mosaic = cog_mosaic(
links, titiler_endpoint, username, layername, overwrite, verbose, **kwargs
)
return mosaic
cog_pixel_value(lon, lat, url, bidx=None, titiler_endpoint=None, timeout=300, **kwargs)
¶
Get pixel value from COG.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
lon |
float |
Longitude of the pixel. |
required |
lat |
float |
Latitude of the pixel. |
required |
url |
str |
HTTP URL to a COG, e.g., 'https://github.com/opengeos/data/releases/download/raster/Libya-2023-07-01.tif' |
required |
bidx |
str |
Dataset band indexes (e.g bidx=1, bidx=1&bidx=2&bidx=3). Defaults to None. |
None |
titiler_endpoint |
str |
Titiler endpoint, e.g., "https://titiler.xyz", "planetary-computer", "pc". Defaults to None. |
None |
timeout |
int |
Timeout in seconds. Defaults to 300. |
300 |
Returns:
Type | Description |
---|---|
list |
A dictionary of band info. |
Source code in geemap/common.py
def cog_pixel_value(
lon,
lat,
url,
bidx=None,
titiler_endpoint=None,
timeout=300,
**kwargs,
):
"""Get pixel value from COG.
Args:
lon (float): Longitude of the pixel.
lat (float): Latitude of the pixel.
url (str): HTTP URL to a COG, e.g., 'https://github.com/opengeos/data/releases/download/raster/Libya-2023-07-01.tif'
bidx (str, optional): Dataset band indexes (e.g bidx=1, bidx=1&bidx=2&bidx=3). Defaults to None.
titiler_endpoint (str, optional): Titiler endpoint, e.g., "https://titiler.xyz", "planetary-computer", "pc". Defaults to None.
timeout (int, optional): Timeout in seconds. Defaults to 300.
Returns:
list: A dictionary of band info.
"""
titiler_endpoint = check_titiler_endpoint(titiler_endpoint)
url = get_direct_url(url)
titiler_endpoint = check_titiler_endpoint(titiler_endpoint)
kwargs["url"] = url
if bidx is not None:
kwargs["bidx"] = bidx
r = requests.get(
f"{titiler_endpoint}/cog/point/{lon},{lat}", params=kwargs, timeout=timeout
).json()
bands = cog_bands(url, titiler_endpoint)
# if isinstance(titiler_endpoint, str):
# r = requests.get(f"{titiler_endpoint}/cog/point/{lon},{lat}", params=kwargs).json()
# else:
# r = requests.get(
# titiler_endpoint.url_for_stac_pixel_value(lon, lat), params=kwargs
# ).json()
if "detail" in r:
print(r["detail"])
return None
else:
values = r["values"]
result = dict(zip(bands, values))
return result
cog_stats(url, titiler_endpoint=None, timeout=300)
¶
Get band statistics of a Cloud Optimized GeoTIFF (COG).
Parameters:
Name | Type | Description | Default |
---|---|---|---|
url |
str |
HTTP URL to a COG, e.g., https://opendata.digitalglobe.com/events/mauritius-oil-spill/post-event/2020-08-12/105001001F1B5B00/105001001F1B5B00.tif |
required |
titiler_endpoint |
str |
Titiler endpoint. Defaults to "https://titiler.xyz". |
None |
timeout |
int |
Timeout in seconds. Defaults to 300. |
300 |
Returns:
Type | Description |
---|---|
list |
A dictionary of band statistics. |
Source code in geemap/common.py
def cog_stats(url, titiler_endpoint=None, timeout=300):
"""Get band statistics of a Cloud Optimized GeoTIFF (COG).
Args:
url (str): HTTP URL to a COG, e.g., https://opendata.digitalglobe.com/events/mauritius-oil-spill/post-event/2020-08-12/105001001F1B5B00/105001001F1B5B00.tif
titiler_endpoint (str, optional): Titiler endpoint. Defaults to "https://titiler.xyz".
timeout (int, optional): Timeout in seconds. Defaults to 300.
Returns:
list: A dictionary of band statistics.
"""
titiler_endpoint = check_titiler_endpoint(titiler_endpoint)
url = get_direct_url(url)
r = requests.get(
f"{titiler_endpoint}/cog/statistics",
params={
"url": url,
},
timeout=timeout,
).json()
return r
cog_tile(url, bands=None, titiler_endpoint=None, timeout=300, proxies=None, **kwargs)
¶
Get a tile layer from a Cloud Optimized GeoTIFF (COG). Source code adapted from https://developmentseed.org/titiler/examples/notebooks/Working_with_CloudOptimizedGeoTIFF_simple/
Parameters:
Name | Type | Description | Default |
---|---|---|---|
url |
str |
HTTP URL to a COG, e.g., https://opendata.digitalglobe.com/events/mauritius-oil-spill/post-event/2020-08-12/105001001F1B5B00/105001001F1B5B00.tif |
required |
titiler_endpoint |
str |
Titiler endpoint. Defaults to "https://titiler.xyz". |
None |
timeout |
int |
Timeout in seconds. Defaults to 300. |
300 |
proxies |
dict |
Proxies to use. Defaults to None. |
None |
Returns:
Type | Description |
---|---|
tuple |
Returns the COG Tile layer URL and bounds. |
Source code in geemap/common.py
def cog_tile(
url,
bands=None,
titiler_endpoint=None,
timeout=300,
proxies=None,
**kwargs,
):
"""Get a tile layer from a Cloud Optimized GeoTIFF (COG).
Source code adapted from https://developmentseed.org/titiler/examples/notebooks/Working_with_CloudOptimizedGeoTIFF_simple/
Args:
url (str): HTTP URL to a COG, e.g., https://opendata.digitalglobe.com/events/mauritius-oil-spill/post-event/2020-08-12/105001001F1B5B00/105001001F1B5B00.tif
titiler_endpoint (str, optional): Titiler endpoint. Defaults to "https://titiler.xyz".
timeout (int, optional): Timeout in seconds. Defaults to 300.
proxies (dict, optional): Proxies to use. Defaults to None.
Returns:
tuple: Returns the COG Tile layer URL and bounds.
"""
titiler_endpoint = check_titiler_endpoint(titiler_endpoint)
url = get_direct_url(url)
kwargs["url"] = url
band_names = cog_bands(url, titiler_endpoint)
if bands is None and "bidx" not in kwargs:
if len(band_names) >= 3:
kwargs["bidx"] = [1, 2, 3]
elif bands is not None and "bidx" not in kwargs:
if all(isinstance(x, int) for x in bands):
kwargs["bidx"] = bands
elif all(isinstance(x, str) for x in bands):
kwargs["bidx"] = [band_names.index(x) + 1 for x in bands]
else:
raise ValueError("Bands must be a list of integers or strings.")
if "palette" in kwargs:
kwargs["colormap_name"] = kwargs.pop("palette")
if "colormap" in kwargs:
kwargs["colormap_name"] = kwargs.pop("colormap")
if "rescale" not in kwargs:
stats = cog_stats(url, titiler_endpoint)
percentile_2 = min([stats[s]["percentile_2"] for s in stats])
percentile_98 = max([stats[s]["percentile_98"] for s in stats])
kwargs["rescale"] = f"{percentile_2},{percentile_98}"
TileMatrixSetId = "WebMercatorQuad"
if "TileMatrixSetId" in kwargs.keys():
TileMatrixSetId = kwargs["TileMatrixSetId"]
kwargs.pop("TileMatrixSetId")
r = requests.get(
f"{titiler_endpoint}/cog/{TileMatrixSetId}/tilejson.json",
params=kwargs,
timeout=timeout,
proxies=proxies,
).json()
return r["tiles"][0]
cog_validate(source, verbose=False)
¶
Validate Cloud Optimized Geotiff.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
source |
str |
A dataset path or URL. Will be opened in "r" mode. |
required |
verbose |
bool |
Whether to print the output of the validation. Defaults to False. |
False |
Exceptions:
Type | Description |
---|---|
ImportError |
If the rio-cogeo package is not installed. |
FileNotFoundError |
If the provided file could not be found. |
Returns:
Type | Description |
---|---|
tuple |
A tuple containing the validation results (True is src_path is a valid COG, List of validation errors, and a list of validation warnings). |
Source code in geemap/common.py
def cog_validate(source, verbose=False):
"""Validate Cloud Optimized Geotiff.
Args:
source (str): A dataset path or URL. Will be opened in "r" mode.
verbose (bool, optional): Whether to print the output of the validation. Defaults to False.
Raises:
ImportError: If the rio-cogeo package is not installed.
FileNotFoundError: If the provided file could not be found.
Returns:
tuple: A tuple containing the validation results (True is src_path is a valid COG, List of validation errors, and a list of validation warnings).
"""
try:
from rio_cogeo.cogeo import cog_validate, cog_info
except ImportError:
raise ImportError(
"The rio-cogeo package is not installed. Please install it with `pip install rio-cogeo` or `conda install rio-cogeo -c conda-forge`."
)
if not source.startswith("http"):
source = check_file_path(source)
if not os.path.exists(source):
raise FileNotFoundError("The provided input file could not be found.")
if verbose:
return cog_info(source)
else:
return cog_validate(source)
column_stats(collection, column, stats_type)
¶
Aggregates over a given property of the objects in a collection, calculating the sum, min, max, mean, sample standard deviation, sample variance, total standard deviation and total variance of the selected property.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
collection |
FeatureCollection |
The input feature collection to calculate statistics. |
required |
column |
str |
The name of the column to calculate statistics. |
required |
stats_type |
str |
The type of statistics to calculate. |
required |
Returns:
Type | Description |
---|---|
dict |
The dictionary containing information about the requested statistics. |
Source code in geemap/common.py
def column_stats(collection, column, stats_type):
"""Aggregates over a given property of the objects in a collection, calculating the sum, min, max, mean,
sample standard deviation, sample variance, total standard deviation and total variance of the selected property.
Args:
collection (FeatureCollection): The input feature collection to calculate statistics.
column (str): The name of the column to calculate statistics.
stats_type (str): The type of statistics to calculate.
Returns:
dict: The dictionary containing information about the requested statistics.
"""
stats_type = stats_type.lower()
allowed_stats = ["min", "max", "mean", "median", "sum", "stdDev", "variance"]
if stats_type not in allowed_stats:
print(
"The stats type must be one of the following: {}".format(
",".join(allowed_stats)
)
)
return
stats_dict = {
"min": ee.Reducer.min(),
"max": ee.Reducer.max(),
"mean": ee.Reducer.mean(),
"median": ee.Reducer.median(),
"sum": ee.Reducer.sum(),
"stdDev": ee.Reducer.stdDev(),
"variance": ee.Reducer.variance(),
}
selectors = [column]
stats = collection.reduceColumns(
**{"selectors": selectors, "reducer": stats_dict[stats_type]}
)
return stats
connect_postgis(database, host='localhost', user=None, password=None, port=5432, use_env_var=False)
¶
Connects to a PostGIS database.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
database |
str |
Name of the database |
required |
host |
str |
Hosting server for the database. Defaults to "localhost". |
'localhost' |
user |
str |
User name to access the database. Defaults to None. |
None |
password |
str |
Password to access the database. Defaults to None. |
None |
port |
int |
Port number to connect to at the server host. Defaults to 5432. |
5432 |
use_env_var |
bool |
Whether to use environment variables. It set to True, user and password are treated as an environment variables with default values user="SQL_USER" and password="SQL_PASSWORD". Defaults to False. |
False |
Exceptions:
Type | Description |
---|---|
ValueError |
If user is not specified. |
ValueError |
If password is not specified. |
Returns:
Type | Description |
---|---|
[type] |
[description] |
Source code in geemap/common.py
def connect_postgis(
database, host="localhost", user=None, password=None, port=5432, use_env_var=False
):
"""Connects to a PostGIS database.
Args:
database (str): Name of the database
host (str, optional): Hosting server for the database. Defaults to "localhost".
user (str, optional): User name to access the database. Defaults to None.
password (str, optional): Password to access the database. Defaults to None.
port (int, optional): Port number to connect to at the server host. Defaults to 5432.
use_env_var (bool, optional): Whether to use environment variables. It set to True, user and password are treated as an environment variables with default values user="SQL_USER" and password="SQL_PASSWORD". Defaults to False.
Raises:
ValueError: If user is not specified.
ValueError: If password is not specified.
Returns:
[type]: [description]
"""
check_package(name="geopandas", URL="https://geopandas.org")
check_package(
name="sqlalchemy",
URL="https://docs.sqlalchemy.org/en/14/intro.html#installation",
)
from sqlalchemy import create_engine
if use_env_var:
if user is not None:
user = os.getenv(user)
else:
user = os.getenv("SQL_USER")
if password is not None:
password = os.getenv(password)
else:
password = os.getenv("SQL_PASSWORD")
if user is None:
raise ValueError("user is not specified.")
if password is None:
raise ValueError("password is not specified.")
connection_string = f"postgresql://{user}:{password}@{host}:{port}/{database}"
engine = create_engine(connection_string)
return engine
convert_lidar(source, destination=None, point_format_id=None, file_version=None, **kwargs)
¶
Converts a Las from one point format to another Automatically upgrades the file version if source file version is not compatible with the new point_format_id
Parameters:
Name | Type | Description | Default |
---|---|---|---|
source |
str | laspy.lasdatas.base.LasBase |
The source data to be converted. |
required |
destination |
str |
The destination file path. Defaults to None. |
None |
point_format_id |
int |
The new point format id (the default is None, which won't change the source format id). |
None |
file_version |
str |
The new file version. None by default which means that the file_version may be upgraded for compatibility with the new point_format. The file version will not be downgraded. |
None |
Returns:
Type | Description |
---|---|
aspy.lasdatas.base.LasBase |
The converted LasData object. |
Source code in geemap/common.py
def convert_lidar(
source, destination=None, point_format_id=None, file_version=None, **kwargs
):
"""Converts a Las from one point format to another Automatically upgrades the file version if source file version
is not compatible with the new point_format_id
Args:
source (str | laspy.lasdatas.base.LasBase): The source data to be converted.
destination (str, optional): The destination file path. Defaults to None.
point_format_id (int, optional): The new point format id (the default is None, which won't change the source format id).
file_version (str, optional): The new file version. None by default which means that the file_version may be upgraded
for compatibility with the new point_format. The file version will not be downgraded.
Returns:
aspy.lasdatas.base.LasBase: The converted LasData object.
"""
try:
import laspy
except ImportError:
print(
"The laspy package is required for this function. Use `pip install laspy[lazrs,laszip]` to install it."
)
return
if isinstance(source, str):
source = read_lidar(source)
las = laspy.convert(
source, point_format_id=point_format_id, file_version=file_version
)
if destination is None:
return las
else:
destination = check_file_path(destination)
write_lidar(las, destination, **kwargs)
return destination
coords_to_geojson(coords)
¶
Convert a list of bbox coordinates representing [left, bottom, right, top] to geojson FeatureCollection.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
coords |
list |
A list of bbox coordinates representing [left, bottom, right, top]. |
required |
Returns:
Type | Description |
---|---|
dict |
A geojson FeatureCollection. |
Source code in geemap/common.py
def coords_to_geojson(coords):
"""Convert a list of bbox coordinates representing [left, bottom, right, top] to geojson FeatureCollection.
Args:
coords (list): A list of bbox coordinates representing [left, bottom, right, top].
Returns:
dict: A geojson FeatureCollection.
"""
features = []
for bbox in coords:
features.append(bbox_to_geojson(bbox))
return {"type": "FeatureCollection", "features": features}
copy_credentials_to_colab()
¶
Copies ee credentials from Google Drive to Google Colab.
Source code in geemap/common.py
def copy_credentials_to_colab():
"""Copies ee credentials from Google Drive to Google Colab."""
src = "/content/drive/My Drive/.config/earthengine/credentials"
dst = "/root/.config/earthengine/credentials"
wd = os.path.dirname(dst)
if not os.path.exists(wd):
os.makedirs(wd)
shutil.copyfile(src, dst)
copy_credentials_to_drive()
¶
Copies ee credentials from Google Colab to Google Drive.
Source code in geemap/common.py
def copy_credentials_to_drive():
"""Copies ee credentials from Google Colab to Google Drive."""
src = "/root/.config/earthengine/credentials"
dst = "/content/drive/My Drive/.config/earthengine/credentials"
wd = os.path.dirname(dst)
if not os.path.exists(wd):
os.makedirs(wd)
shutil.copyfile(src, dst)
create_code_cell(code='', where='below')
¶
Creates a code cell in the IPython Notebook.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
code |
str |
Code to fill the new code cell with. Defaults to ''. |
'' |
where |
str |
Where to add the new code cell. It can be one of the following: above, below, at_bottom. Defaults to 'below'. |
'below' |
Source code in geemap/common.py
def create_code_cell(code="", where="below"):
"""Creates a code cell in the IPython Notebook.
Args:
code (str, optional): Code to fill the new code cell with. Defaults to ''.
where (str, optional): Where to add the new code cell. It can be one of the following: above, below, at_bottom. Defaults to 'below'.
"""
import base64
import pyperclip
from IPython.display import Javascript, display
try:
pyperclip.copy(str(code))
except Exception as e:
pass
encoded_code = (base64.b64encode(str.encode(code))).decode()
display(
Javascript(
"""
var code = IPython.notebook.insert_cell_{0}('code');
code.set_text(atob("{1}"));
""".format(
where, encoded_code
)
)
)
create_colorbar(width=150, height=30, palette=['blue', 'green', 'red'], add_ticks=True, add_labels=True, labels=None, vertical=False, out_file=None, font_type='arial.ttf', font_size=12, font_color='black', add_outline=True, outline_color='black')
¶
Creates a colorbar based on the provided palette.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
width |
int |
Width of the colorbar in pixels. Defaults to 150. |
150 |
height |
int |
Height of the colorbar in pixels. Defaults to 30. |
30 |
palette |
list |
Palette for the colorbar. Each color can be provided as a string (e.g., 'red'), a hex string (e.g., '#ff0000'), or an RGB tuple (255, 0, 255). Defaults to ['blue', 'green', 'red']. |
['blue', 'green', 'red'] |
add_ticks |
bool |
Whether to add tick markers to the colorbar. Defaults to True. |
True |
add_labels |
bool |
Whether to add labels to the colorbar. Defaults to True. |
True |
labels |
list |
A list of labels to add to the colorbar. Defaults to None. |
None |
vertical |
bool |
Whether to rotate the colorbar vertically. Defaults to False. |
False |
out_file |
str |
File path to the output colorbar in png format. Defaults to None. |
None |
font_type |
str |
Font type to use for labels. Defaults to 'arial.ttf'. |
'arial.ttf' |
font_size |
int |
Font size to use for labels. Defaults to 12. |
12 |
font_color |
str |
Font color to use for labels. Defaults to 'black'. |
'black' |
add_outline |
bool |
Whether to add an outline to the colorbar. Defaults to True. |
True |
outline_color |
str |
Color for the outline of the colorbar. Defaults to 'black'. |
'black' |
Returns:
Type | Description |
---|---|
str |
File path of the output colorbar in png format. |
Source code in geemap/common.py
def create_colorbar(
width=150,
height=30,
palette=["blue", "green", "red"],
add_ticks=True,
add_labels=True,
labels=None,
vertical=False,
out_file=None,
font_type="arial.ttf",
font_size=12,
font_color="black",
add_outline=True,
outline_color="black",
):
"""Creates a colorbar based on the provided palette.
Args:
width (int, optional): Width of the colorbar in pixels. Defaults to 150.
height (int, optional): Height of the colorbar in pixels. Defaults to 30.
palette (list, optional): Palette for the colorbar. Each color can be provided as a string (e.g., 'red'), a hex string (e.g., '#ff0000'), or an RGB tuple (255, 0, 255). Defaults to ['blue', 'green', 'red'].
add_ticks (bool, optional): Whether to add tick markers to the colorbar. Defaults to True.
add_labels (bool, optional): Whether to add labels to the colorbar. Defaults to True.
labels (list, optional): A list of labels to add to the colorbar. Defaults to None.
vertical (bool, optional): Whether to rotate the colorbar vertically. Defaults to False.
out_file (str, optional): File path to the output colorbar in png format. Defaults to None.
font_type (str, optional): Font type to use for labels. Defaults to 'arial.ttf'.
font_size (int, optional): Font size to use for labels. Defaults to 12.
font_color (str, optional): Font color to use for labels. Defaults to 'black'.
add_outline (bool, optional): Whether to add an outline to the colorbar. Defaults to True.
outline_color (str, optional): Color for the outline of the colorbar. Defaults to 'black'.
Returns:
str: File path of the output colorbar in png format.
"""
import decimal
# import io
import pkg_resources
from colour import Color
from PIL import Image, ImageDraw, ImageFont
warnings.simplefilter("ignore")
pkg_dir = os.path.dirname(pkg_resources.resource_filename("geemap", "geemap.py"))
if out_file is None:
filename = "colorbar_" + random_string() + ".png"
out_dir = os.path.join(os.path.expanduser("~"), "Downloads")
out_file = os.path.join(out_dir, filename)
elif not out_file.endswith(".png"):
print("The output file must end with .png")
return
else:
out_file = os.path.abspath(out_file)
if not os.path.exists(os.path.dirname(out_file)):
os.makedirs(os.path.dirname(out_file))
im = Image.new("RGBA", (width, height))
ld = im.load()
def float_range(start, stop, step):
while start < stop:
yield float(start)
start += decimal.Decimal(step)
n_colors = len(palette)
decimal_places = 2
rgb_colors = [Color(check_color(c)).rgb for c in palette]
keys = [
round(c, decimal_places)
for c in list(float_range(0, 1.0001, 1.0 / (n_colors - 1)))
]
heatmap = []
for index, item in enumerate(keys):
pair = [item, rgb_colors[index]]
heatmap.append(pair)
def gaussian(x, a, b, c, d=0):
return a * math.exp(-((x - b) ** 2) / (2 * c**2)) + d
def pixel(x, width=100, map=[], spread=1):
width = float(width)
r = sum(
[
gaussian(x, p[1][0], p[0] * width, width / (spread * len(map)))
for p in map
]
)
g = sum(
[
gaussian(x, p[1][1], p[0] * width, width / (spread * len(map)))
for p in map
]
)
b = sum(
[
gaussian(x, p[1][2], p[0] * width, width / (spread * len(map)))
for p in map
]
)
return min(1.0, r), min(1.0, g), min(1.0, b)
for x in range(im.size[0]):
r, g, b = pixel(x, width=width, map=heatmap)
r, g, b = [int(256 * v) for v in (r, g, b)]
for y in range(im.size[1]):
ld[x, y] = r, g, b
if add_outline:
draw = ImageDraw.Draw(im)
draw.rectangle(
[(0, 0), (width - 1, height - 1)], outline=check_color(outline_color)
)
del draw
if add_ticks:
tick_length = height * 0.1
x = [key * width for key in keys]
y_top = height - tick_length
y_bottom = height
draw = ImageDraw.Draw(im)
for i in x:
shape = [(i, y_top), (i, y_bottom)]
draw.line(shape, fill="black", width=0)
del draw
if vertical:
im = im.transpose(Image.ROTATE_90)
width, height = im.size
if labels is None:
labels = [str(c) for c in keys]
elif len(labels) == 2:
try:
lowerbound = float(labels[0])
upperbound = float(labels[1])
step = (upperbound - lowerbound) / (len(palette) - 1)
labels = [str(lowerbound + c * step) for c in range(0, len(palette))]
except Exception as e:
print(e)
print("The labels are invalid.")
return
elif len(labels) == len(palette):
labels = [str(c) for c in labels]
else:
print("The labels must have the same length as the palette.")
return
if add_labels:
default_font = os.path.join(pkg_dir, "data/fonts/arial.ttf")
if font_type == "arial.ttf":
font = ImageFont.truetype(default_font, font_size)
else:
try:
font_list = system_fonts(show_full_path=True)
font_names = [os.path.basename(f) for f in font_list]
if (font_type in font_list) or (font_type in font_names):
font = ImageFont.truetype(font_type, font_size)
else:
print(
"The specified font type could not be found on your system. Using the default font instead."
)
font = ImageFont.truetype(default_font, font_size)
except Exception as e:
print(e)
font = ImageFont.truetype(default_font, font_size)
font_color = check_color(font_color)
draw = ImageDraw.Draw(im)
w, h = draw.textsize(labels[0], font=font)
for label in labels:
w_tmp, h_tmp = draw.textsize(label, font)
if w_tmp > w:
w = w_tmp
if h_tmp > h:
h = h_tmp
W, H = width + w * 2, height + h * 2
background = Image.new("RGBA", (W, H))
draw = ImageDraw.Draw(background)
if vertical:
xy = (0, h)
else:
xy = (w, 0)
background.paste(im, xy, im)
for index, label in enumerate(labels):
w_tmp, h_tmp = draw.textsize(label, font)
if vertical:
spacing = 5
x = width + spacing
y = int(height + h - keys[index] * height - h_tmp / 2 - 1)
draw.text((x, y), label, font=font, fill=font_color)
else:
x = int(keys[index] * width + w - w_tmp / 2)
spacing = int(h * 0.05)
y = height + spacing
draw.text((x, y), label, font=font, fill=font_color)
im = background.copy()
im.save(out_file)
return out_file
create_contours(image, min_value, max_value, interval, kernel=None, region=None, values=None)
¶
Creates contours from an image. Code adapted from https://mygeoblog.com/2017/01/28/contour-lines-in-gee. Credits to MyGeoBlog.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
image |
ee.Image |
An image to create contours. |
required |
min_value |
float |
The minimum value of contours. |
required |
max_value |
float |
The maximum value of contours. |
required |
interval |
float |
The interval between contours. |
required |
kernel |
ee.Kernel |
The kernel to use for smoothing image. Defaults to None. |
None |
region |
ee.Geometry | ee.FeatureCollection |
The region of interest. Defaults to None. |
None |
values |
list |
A list of values to create contours for. Defaults to None. |
None |
Exceptions:
Type | Description |
---|---|
TypeError |
The image must be an ee.Image. |
TypeError |
The region must be an ee.Geometry or ee.FeatureCollection. |
Returns:
Type | Description |
---|---|
ee.Image |
The image containing contours. |
Source code in geemap/common.py
def create_contours(
image, min_value, max_value, interval, kernel=None, region=None, values=None
):
"""Creates contours from an image. Code adapted from https://mygeoblog.com/2017/01/28/contour-lines-in-gee. Credits to MyGeoBlog.
Args:
image (ee.Image): An image to create contours.
min_value (float): The minimum value of contours.
max_value (float): The maximum value of contours.
interval (float): The interval between contours.
kernel (ee.Kernel, optional): The kernel to use for smoothing image. Defaults to None.
region (ee.Geometry | ee.FeatureCollection, optional): The region of interest. Defaults to None.
values (list, optional): A list of values to create contours for. Defaults to None.
Raises:
TypeError: The image must be an ee.Image.
TypeError: The region must be an ee.Geometry or ee.FeatureCollection.
Returns:
ee.Image: The image containing contours.
"""
if not isinstance(image, ee.Image):
raise TypeError("The image must be an ee.Image.")
if region is not None:
if isinstance(region, ee.FeatureCollection) or isinstance(region, ee.Geometry):
pass
else:
raise TypeError(
"The region must be an ee.Geometry or ee.FeatureCollection."
)
if kernel is None:
kernel = ee.Kernel.gaussian(5, 3)
if isinstance(values, list):
values = ee.List(values)
elif isinstance(values, ee.List):
pass
if values is None:
values = ee.List.sequence(min_value, max_value, interval)
def contouring(value):
mycountour = (
image.convolve(kernel)
.subtract(ee.Image.constant(value))
.zeroCrossing()
.multiply(ee.Image.constant(value).toFloat())
)
return mycountour.mask(mycountour)
contours = values.map(contouring)
if region is not None:
if isinstance(region, ee.FeatureCollection):
return ee.ImageCollection(contours).mosaic().clipToCollection(region)
elif isinstance(region, ee.Geometry):
return ee.ImageCollection(contours).mosaic().clip(region)
else:
return ee.ImageCollection(contours).mosaic()
create_download_button(label, data, file_name=None, mime=None, key=None, help=None, on_click=None, args=None, **kwargs)
¶
Streamlit function to create a download button.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
label |
str |
A short label explaining to the user what this button is for.. |
required |
data |
str | list |
The contents of the file to be downloaded. See example below for caching techniques to avoid recomputing this data unnecessarily. |
required |
file_name |
str |
An optional string to use as the name of the file to be downloaded, such as 'my_file.csv'. If not specified, the name will be automatically generated. Defaults to None. |
None |
mime |
str |
The MIME type of the data. If None, defaults to "text/plain" (if data is of type str or is a textual file) or "application/octet-stream" (if data is of type bytes or is a binary file). Defaults to None. |
None |
key |
str |
An optional string or integer to use as the unique key for the widget. If this is omitted, a key will be generated for the widget based on its content. Multiple widgets of the same type may not share the same key. Defaults to None. |
None |
help |
str |
An optional tooltip that gets displayed when the button is hovered over. Defaults to None. |
None |
on_click |
str |
An optional callback invoked when this button is clicked. Defaults to None. |
None |
args |
list |
An optional tuple of args to pass to the callback. Defaults to None. |
None |
kwargs |
dict |
An optional tuple of args to pass to the callback. |
{} |
Source code in geemap/common.py
def create_download_button(
label,
data,
file_name=None,
mime=None,
key=None,
help=None,
on_click=None,
args=None,
**kwargs,
):
"""Streamlit function to create a download button.
Args:
label (str): A short label explaining to the user what this button is for..
data (str | list): The contents of the file to be downloaded. See example below for caching techniques to avoid recomputing this data unnecessarily.
file_name (str, optional): An optional string to use as the name of the file to be downloaded, such as 'my_file.csv'. If not specified, the name will be automatically generated. Defaults to None.
mime (str, optional): The MIME type of the data. If None, defaults to "text/plain" (if data is of type str or is a textual file) or "application/octet-stream" (if data is of type bytes or is a binary file). Defaults to None.
key (str, optional): An optional string or integer to use as the unique key for the widget. If this is omitted, a key will be generated for the widget based on its content. Multiple widgets of the same type may not share the same key. Defaults to None.
help (str, optional): An optional tooltip that gets displayed when the button is hovered over. Defaults to None.
on_click (str, optional): An optional callback invoked when this button is clicked. Defaults to None.
args (list, optional): An optional tuple of args to pass to the callback. Defaults to None.
kwargs (dict, optional): An optional tuple of args to pass to the callback.
"""
try:
import streamlit as st
import pandas as pd
if isinstance(data, str):
if file_name is None:
file_name = data.split("/")[-1]
if data.endswith(".csv"):
data = pd.read_csv(data).to_csv()
if mime is None:
mime = "text/csv"
return st.download_button(
label, data, file_name, mime, key, help, on_click, args, **kwargs
)
elif (
data.endswith(".gif") or data.endswith(".png") or data.endswith(".jpg")
):
if mime is None:
mime = f"image/{os.path.splitext(data)[1][1:]}"
with open(data, "rb") as file:
return st.download_button(
label,
file,
file_name,
mime,
key,
help,
on_click,
args,
**kwargs,
)
else:
return st.download_button(
label,
label,
data,
file_name,
mime,
key,
help,
on_click,
args,
**kwargs,
)
except ImportError:
print("Streamlit is not installed. Please run 'pip install streamlit'.")
return
except Exception as e:
raise Exception(e)
create_download_link(filename, title='Click here to download: ')
¶
Downloads a file from voila. Adopted from https://github.com/voila-dashboards/voila/issues/578
Parameters:
Name | Type | Description | Default |
---|---|---|---|
filename |
str |
The file path to the file to download |
required |
title |
str |
str. Defaults to "Click here to download: ". |
'Click here to download: ' |
Returns:
Type | Description |
---|---|
str |
HTML download URL. |
Source code in geemap/common.py
def create_download_link(filename, title="Click here to download: "):
"""Downloads a file from voila. Adopted from https://github.com/voila-dashboards/voila/issues/578
Args:
filename (str): The file path to the file to download
title (str, optional): str. Defaults to "Click here to download: ".
Returns:
str: HTML download URL.
"""
import base64
from IPython.display import HTML
data = open(filename, "rb").read()
b64 = base64.b64encode(data)
payload = b64.decode()
basename = os.path.basename(filename)
html = '<a download="{filename}" href="data:text/csv;base64,{payload}" style="color:#0000FF;" target="_blank">{title}</a>'
html = html.format(payload=payload, title=title + f" {basename}", filename=basename)
return HTML(html)
create_grid(ee_object, scale, proj=None)
¶
Create a grid covering an Earth Engine object.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
ee_object |
ee.Image | ee.Geometry | ee.FeatureCollection |
The Earth Engine object. |
required |
scale |
float |
The grid cell size. |
required |
proj |
str |
The projection. Defaults to None. |
None |
Returns:
Type | Description |
---|---|
ee.FeatureCollection |
The grid as a feature collection. |
Source code in geemap/common.py
def create_grid(ee_object, scale, proj=None):
"""Create a grid covering an Earth Engine object.
Args:
ee_object (ee.Image | ee.Geometry | ee.FeatureCollection): The Earth Engine object.
scale (float): The grid cell size.
proj (str, optional): The projection. Defaults to None.
Returns:
ee.FeatureCollection: The grid as a feature collection.
"""
if isinstance(ee_object, ee.FeatureCollection) or isinstance(ee_object, ee.Image):
geometry = ee_object.geometry()
elif isinstance(ee_object, ee.Geometry):
geometry = ee_object
else:
raise ValueError(
"ee_object must be an ee.FeatureCollection, ee.Image, or ee.Geometry"
)
if proj is None:
proj = geometry.projection()
grid = geometry.coveringGrid(proj, scale)
return grid
create_legend(title='Legend', labels=None, colors=None, legend_dict=None, builtin_legend=None, opacity=1.0, position='bottomright', draggable=True, output=None, style={})
¶
Create a legend in HTML format. Reference: https://bit.ly/3oV6vnH
Parameters:
Name | Type | Description | Default |
---|---|---|---|
title |
str |
Title of the legend. Defaults to 'Legend'. Defaults to "Legend". |
'Legend' |
colors |
list |
A list of legend colors. Defaults to None. |
None |
labels |
list |
A list of legend labels. Defaults to None. |
None |
legend_dict |
dict |
A dictionary containing legend items as keys and color as values. If provided, legend_keys and legend_colors will be ignored. Defaults to None. |
None |
builtin_legend |
str |
Name of the builtin legend to add to the map. Defaults to None. |
None |
opacity |
float |
The opacity of the legend. Defaults to 1.0. |
1.0 |
position |
str |
The position of the legend, can be one of the following: "topleft", "topright", "bottomleft", "bottomright". Defaults to "bottomright". |
'bottomright' |
draggable |
bool |
If True, the legend can be dragged to a new position. Defaults to True. |
True |
output |
str |
The output file path (*.html) to save the legend. Defaults to None. |
None |
style |
Additional keyword arguments to style the legend, such as position, bottom, right, z-index, border, background-color, border-radius, padding, font-size, etc. The default style is: style = { 'position': 'fixed', 'z-index': '9999', 'border': '2px solid grey', 'background-color': 'rgba(255, 255, 255, 0.8)', 'border-radius': '5px', 'padding': '10px', 'font-size': '14px', 'bottom': '20px', 'right': '5px' } |
{} |
Returns:
Type | Description |
---|---|
str |
The HTML code of the legend. |
Source code in geemap/common.py
def create_legend(
title="Legend",
labels=None,
colors=None,
legend_dict=None,
builtin_legend=None,
opacity=1.0,
position="bottomright",
draggable=True,
output=None,
style={},
):
"""Create a legend in HTML format. Reference: https://bit.ly/3oV6vnH
Args:
title (str, optional): Title of the legend. Defaults to 'Legend'. Defaults to "Legend".
colors (list, optional): A list of legend colors. Defaults to None.
labels (list, optional): A list of legend labels. Defaults to None.
legend_dict (dict, optional): A dictionary containing legend items as keys and color as values.
If provided, legend_keys and legend_colors will be ignored. Defaults to None.
builtin_legend (str, optional): Name of the builtin legend to add to the map. Defaults to None.
opacity (float, optional): The opacity of the legend. Defaults to 1.0.
position (str, optional): The position of the legend, can be one of the following:
"topleft", "topright", "bottomleft", "bottomright". Defaults to "bottomright".
draggable (bool, optional): If True, the legend can be dragged to a new position. Defaults to True.
output (str, optional): The output file path (*.html) to save the legend. Defaults to None.
style: Additional keyword arguments to style the legend, such as position, bottom, right, z-index,
border, background-color, border-radius, padding, font-size, etc. The default style is:
style = {
'position': 'fixed',
'z-index': '9999',
'border': '2px solid grey',
'background-color': 'rgba(255, 255, 255, 0.8)',
'border-radius': '5px',
'padding': '10px',
'font-size': '14px',
'bottom': '20px',
'right': '5px'
}
Returns:
str: The HTML code of the legend.
"""
import pkg_resources
from .legends import builtin_legends
pkg_dir = os.path.dirname(pkg_resources.resource_filename("geemap", "geemap.py"))
legend_template = os.path.join(pkg_dir, "data/template/legend_style.html")
if draggable:
legend_template = os.path.join(pkg_dir, "data/template/legend.txt")
if not os.path.exists(legend_template):
raise FileNotFoundError("The legend template does not exist.")
if labels is not None:
if not isinstance(labels, list):
print("The legend keys must be a list.")
return
else:
labels = ["One", "Two", "Three", "Four", "etc"]
if colors is not None:
if not isinstance(colors, list):
print("The legend colors must be a list.")
return
elif all(isinstance(item, tuple) for item in colors):
try:
colors = [rgb_to_hex(x) for x in colors]
except Exception as e:
print(e)
elif all((item.startswith("#") and len(item) == 7) for item in colors):
pass
elif all((len(item) == 6) for item in colors):
pass
else:
print("The legend colors must be a list of tuples.")
return
else:
colors = [
"#8DD3C7",
"#FFFFB3",
"#BEBADA",
"#FB8072",
"#80B1D3",
]
if len(labels) != len(colors):
print("The legend keys and values must be the same length.")
return
allowed_builtin_legends = builtin_legends.keys()
if builtin_legend is not None:
if builtin_legend not in allowed_builtin_legends:
print(
"The builtin legend must be one of the following: {}".format(
", ".join(allowed_builtin_legends)
)
)
return
else:
legend_dict = builtin_legends[builtin_legend]
labels = list(legend_dict.keys())
colors = list(legend_dict.values())
if legend_dict is not None:
if not isinstance(legend_dict, dict):
print("The legend dict must be a dictionary.")
return
else:
labels = list(legend_dict.keys())
colors = list(legend_dict.values())
if all(isinstance(item, tuple) for item in colors):
try:
colors = [rgb_to_hex(x) for x in colors]
except Exception as e:
print(e)
allowed_positions = [
"topleft",
"topright",
"bottomleft",
"bottomright",
]
if position not in allowed_positions:
raise ValueError(
"The position must be one of the following: {}".format(
", ".join(allowed_positions)
)
)
if position == "bottomright":
if "bottom" not in style:
style["bottom"] = "20px"
if "right" not in style:
style["right"] = "5px"
if "left" in style:
del style["left"]
if "top" in style:
del style["top"]
elif position == "bottomleft":
if "bottom" not in style:
style["bottom"] = "5px"
if "left" not in style:
style["left"] = "5px"
if "right" in style:
del style["right"]
if "top" in style:
del style["top"]
elif position == "topright":
if "top" not in style:
style["top"] = "5px"
if "right" not in style:
style["right"] = "5px"
if "left" in style:
del style["left"]
if "bottom" in style:
del style["bottom"]
elif position == "topleft":
if "top" not in style:
style["top"] = "5px"
if "left" not in style:
style["left"] = "5px"
if "right" in style:
del style["right"]
if "bottom" in style:
del style["bottom"]
if "position" not in style:
style["position"] = "fixed"
if "z-index" not in style:
style["z-index"] = "9999"
if "background-color" not in style:
style["background-color"] = "rgba(255, 255, 255, 0.8)"
if "padding" not in style:
style["padding"] = "10px"
if "border-radius" not in style:
style["border-radius"] = "5px"
if "font-size" not in style:
style["font-size"] = "14px"
content = []
with open(legend_template) as f:
lines = f.readlines()
if draggable:
for index, line in enumerate(lines):
if index < 36:
content.append(line)
elif index == 36:
line = lines[index].replace("Legend", title)
content.append(line)
elif index < 39:
content.append(line)
elif index == 39:
for i, color in enumerate(colors):
item = f" <li><span style='background:{check_color(color)};opacity:{opacity};'></span>{labels[i]}</li>\n"
content.append(item)
elif index > 41:
content.append(line)
content = content[3:-1]
else:
for index, line in enumerate(lines):
if index < 8:
content.append(line)
elif index == 8:
for key, value in style.items():
content.append(
" {}: {};\n".format(key.replace("_", "-"), value)
)
elif index < 17:
pass
elif index < 19:
content.append(line)
elif index == 19:
content.append(line.replace("Legend", title))
elif index < 22:
content.append(line)
elif index == 22:
for index, key in enumerate(labels):
color = colors[index]
if not color.startswith("#"):
color = "#" + color
item = " <li><span style='background:{};opacity:{};'></span>{}</li>\n".format(
color, opacity, key
)
content.append(item)
elif index < 33:
pass
else:
content.append(line)
legend_text = "".join(content)
if output is not None:
with open(output, "w") as f:
f.write(legend_text)
else:
return legend_text
create_nlcd_qml(out_qml)
¶
Create a QGIS Layer Style (.qml) for NLCD data
Parameters:
Name | Type | Description | Default |
---|---|---|---|
out_qml |
str |
File path to the output qml. |
required |
Source code in geemap/common.py
def create_nlcd_qml(out_qml):
"""Create a QGIS Layer Style (.qml) for NLCD data
Args:
out_qml (str): File path to the output qml.
"""
import pkg_resources
pkg_dir = os.path.dirname(pkg_resources.resource_filename("geemap", "geemap.py"))
data_dir = os.path.join(pkg_dir, "data")
template_dir = os.path.join(data_dir, "template")
qml_template = os.path.join(template_dir, "NLCD.qml")
out_dir = os.path.dirname(out_qml)
if not os.path.exists(out_dir):
os.makedirs(out_dir)
shutil.copyfile(qml_template, out_qml)
credentials_in_colab()
¶
Checks if the ee credentials file exists in Google Colab.
Returns:
Type | Description |
---|---|
bool |
Returns True if Google Drive is mounted, False otherwise. |
Source code in geemap/common.py
def credentials_in_colab():
"""Checks if the ee credentials file exists in Google Colab.
Returns:
bool: Returns True if Google Drive is mounted, False otherwise.
"""
credentials_path = "/root/.config/earthengine/credentials"
if os.path.exists(credentials_path):
return True
else:
return False
credentials_in_drive()
¶
Checks if the ee credentials file exists in Google Drive.
Returns:
Type | Description |
---|---|
bool |
Returns True if Google Drive is mounted, False otherwise. |
Source code in geemap/common.py
def credentials_in_drive():
"""Checks if the ee credentials file exists in Google Drive.
Returns:
bool: Returns True if Google Drive is mounted, False otherwise.
"""
credentials_path = "/content/drive/My Drive/.config/earthengine/credentials"
if os.path.exists(credentials_path):
return True
else:
return False
csv_points_to_shp(in_csv, out_shp, latitude='latitude', longitude='longitude')
¶
Converts a csv file containing points (latitude, longitude) into a shapefile.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
in_csv |
str |
File path or HTTP URL to the input csv file. For example, https://raw.githubusercontent.com/giswqs/data/main/world/world_cities.csv |
required |
out_shp |
str |
File path to the output shapefile. |
required |
latitude |
str |
Column name for the latitude column. Defaults to 'latitude'. |
'latitude' |
longitude |
str |
Column name for the longitude column. Defaults to 'longitude'. |
'longitude' |
Source code in geemap/common.py
def csv_points_to_shp(in_csv, out_shp, latitude="latitude", longitude="longitude"):
"""Converts a csv file containing points (latitude, longitude) into a shapefile.
Args:
in_csv (str): File path or HTTP URL to the input csv file. For example, https://raw.githubusercontent.com/giswqs/data/main/world/world_cities.csv
out_shp (str): File path to the output shapefile.
latitude (str, optional): Column name for the latitude column. Defaults to 'latitude'.
longitude (str, optional): Column name for the longitude column. Defaults to 'longitude'.
"""
import whitebox
if in_csv.startswith("http") and in_csv.endswith(".csv"):
out_dir = os.path.join(os.path.expanduser("~"), "Downloads")
out_name = os.path.basename(in_csv)
if not os.path.exists(out_dir):
os.makedirs(out_dir)
download_from_url(in_csv, out_dir=out_dir, verbose=False)
in_csv = os.path.join(out_dir, out_name)
wbt = whitebox.WhiteboxTools()
in_csv = os.path.abspath(in_csv)
out_shp = os.path.abspath(out_shp)
if not os.path.exists(in_csv):
raise Exception("The provided csv file does not exist.")
with open(in_csv, encoding="utf-8") as csv_file:
reader = csv.DictReader(csv_file)
fields = reader.fieldnames
xfield = fields.index(longitude)
yfield = fields.index(latitude)
wbt.csv_points_to_vector(in_csv, out_shp, xfield=xfield, yfield=yfield, epsg=4326)
csv_to_df(in_csv, **kwargs)
¶
Converts a CSV file to pandas dataframe.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
in_csv |
str |
File path to the input CSV. |
required |
Returns:
Type | Description |
---|---|
pd.DataFrame |
pandas DataFrame |
Source code in geemap/common.py
def csv_to_df(in_csv, **kwargs):
"""Converts a CSV file to pandas dataframe.
Args:
in_csv (str): File path to the input CSV.
Returns:
pd.DataFrame: pandas DataFrame
"""
import pandas as pd
in_csv = github_raw_url(in_csv)
try:
return pd.read_csv(in_csv, **kwargs)
except Exception as e:
raise Exception(e)
csv_to_ee(in_csv, latitude='latitude', longitude='longitude', encoding='utf-8', geodesic=True)
¶
Creates points for a CSV file and exports data as a GeoJSON.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
in_csv |
str |
The file path to the input CSV file. |
required |
latitude |
str |
The name of the column containing latitude coordinates. Defaults to "latitude". |
'latitude' |
longitude |
str |
The name of the column containing longitude coordinates. Defaults to "longitude". |
'longitude' |
encoding |
str |
The encoding of characters. Defaults to "utf-8". |
'utf-8' |
geodesic |
bool |
Whether line segments should be interpreted as spherical geodesics. If false, indicates that line segments should be interpreted as planar lines in the specified CRS. If absent, defaults to true if the CRS is geographic (including the default EPSG:4326), or to false if the CRS is projected. |
True |
Returns:
Type | Description |
---|---|
ee_object |
An ee.Geometry object |
Source code in geemap/common.py
def csv_to_ee(
in_csv, latitude="latitude", longitude="longitude", encoding="utf-8", geodesic=True
):
"""Creates points for a CSV file and exports data as a GeoJSON.
Args:
in_csv (str): The file path to the input CSV file.
latitude (str, optional): The name of the column containing latitude coordinates. Defaults to "latitude".
longitude (str, optional): The name of the column containing longitude coordinates. Defaults to "longitude".
encoding (str, optional): The encoding of characters. Defaults to "utf-8".
geodesic (bool, optional): Whether line segments should be interpreted as spherical geodesics. If false, indicates that line segments should be interpreted as planar lines in the specified CRS. If absent, defaults to true if the CRS is geographic (including the default EPSG:4326), or to false if the CRS is projected.
Returns:
ee_object: An ee.Geometry object
"""
geojson = csv_to_geojson(
in_csv, latitude=latitude, longitude=longitude, encoding=encoding
)
fc = geojson_to_ee(geojson, geodesic=geodesic)
return fc
csv_to_gdf(in_csv, latitude='latitude', longitude='longitude', encoding='utf-8')
¶
Creates points for a CSV file and converts them to a GeoDataFrame.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
in_csv |
str |
The file path to the input CSV file. |
required |
latitude |
str |
The name of the column containing latitude coordinates. Defaults to "latitude". |
'latitude' |
longitude |
str |
The name of the column containing longitude coordinates. Defaults to "longitude". |
'longitude' |
encoding |
str |
The encoding of characters. Defaults to "utf-8". |
'utf-8' |
Returns:
Type | Description |
---|---|
object |
GeoDataFrame. |
Source code in geemap/common.py
def csv_to_gdf(in_csv, latitude="latitude", longitude="longitude", encoding="utf-8"):
"""Creates points for a CSV file and converts them to a GeoDataFrame.
Args:
in_csv (str): The file path to the input CSV file.
latitude (str, optional): The name of the column containing latitude coordinates. Defaults to "latitude".
longitude (str, optional): The name of the column containing longitude coordinates. Defaults to "longitude".
encoding (str, optional): The encoding of characters. Defaults to "utf-8".
Returns:
object: GeoDataFrame.
"""
check_package(name="geopandas", URL="https://geopandas.org")
import geopandas as gpd
out_dir = os.getcwd()
out_geojson = os.path.join(out_dir, random_string() + ".geojson")
csv_to_geojson(in_csv, out_geojson, latitude, longitude, encoding)
gdf = gpd.read_file(out_geojson)
os.remove(out_geojson)
return gdf
csv_to_geojson(in_csv, out_geojson=None, latitude='latitude', longitude='longitude', encoding='utf-8')
¶
Creates points for a CSV file and exports data as a GeoJSON.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
in_csv |
str |
The file path to the input CSV file. |
required |
out_geojson |
str |
The file path to the exported GeoJSON. Default to None. |
None |
latitude |
str |
The name of the column containing latitude coordinates. Defaults to "latitude". |
'latitude' |
longitude |
str |
The name of the column containing longitude coordinates. Defaults to "longitude". |
'longitude' |
encoding |
str |
The encoding of characters. Defaults to "utf-8". |
'utf-8' |
Source code in geemap/common.py
def csv_to_geojson(
in_csv,
out_geojson=None,
latitude="latitude",
longitude="longitude",
encoding="utf-8",
):
"""Creates points for a CSV file and exports data as a GeoJSON.
Args:
in_csv (str): The file path to the input CSV file.
out_geojson (str): The file path to the exported GeoJSON. Default to None.
latitude (str, optional): The name of the column containing latitude coordinates. Defaults to "latitude".
longitude (str, optional): The name of the column containing longitude coordinates. Defaults to "longitude".
encoding (str, optional): The encoding of characters. Defaults to "utf-8".
"""
import pandas as pd
in_csv = github_raw_url(in_csv)
if out_geojson is not None:
out_geojson = check_file_path(out_geojson)
df = pd.read_csv(in_csv)
geojson = df_to_geojson(
df, latitude=latitude, longitude=longitude, encoding=encoding
)
if out_geojson is None:
return geojson
else:
with open(out_geojson, "w", encoding=encoding) as f:
f.write(json.dumps(geojson))
csv_to_shp(in_csv, out_shp, latitude='latitude', longitude='longitude', encoding='utf-8')
¶
Converts a csv file with latlon info to a point shapefile.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
in_csv |
str |
The input csv file containing longitude and latitude columns. |
required |
out_shp |
str |
The file path to the output shapefile. |
required |
latitude |
str |
The column name of the latitude column. Defaults to 'latitude'. |
'latitude' |
longitude |
str |
The column name of the longitude column. Defaults to 'longitude'. |
'longitude' |
Source code in geemap/common.py
def csv_to_shp(
in_csv, out_shp, latitude="latitude", longitude="longitude", encoding="utf-8"
):
"""Converts a csv file with latlon info to a point shapefile.
Args:
in_csv (str): The input csv file containing longitude and latitude columns.
out_shp (str): The file path to the output shapefile.
latitude (str, optional): The column name of the latitude column. Defaults to 'latitude'.
longitude (str, optional): The column name of the longitude column. Defaults to 'longitude'.
"""
import shapefile as shp
if in_csv.startswith("http") and in_csv.endswith(".csv"):
in_csv = github_raw_url(in_csv)
in_csv = download_file(in_csv, quiet=True, overwrite=True)
try:
points = shp.Writer(out_shp, shapeType=shp.POINT)
with open(in_csv, encoding=encoding) as csvfile:
csvreader = csv.DictReader(csvfile)
header = csvreader.fieldnames
[points.field(field) for field in header]
for row in csvreader:
points.point((float(row[longitude])), (float(row[latitude])))
points.record(*tuple([row[f] for f in header]))
out_prj = out_shp.replace(".shp", ".prj")
with open(out_prj, "w") as f:
prj_str = 'GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199433]] '
f.write(prj_str)
except Exception as e:
raise Exception(e)
csv_to_vector(in_csv, output, latitude='latitude', longitude='longitude', encoding='utf-8', **kwargs)
¶
Creates points for a CSV file and converts them to a vector dataset.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
in_csv |
str |
The file path to the input CSV file. |
required |
output |
str |
The file path to the output vector dataset. |
required |
latitude |
str |
The name of the column containing latitude coordinates. Defaults to "latitude". |
'latitude' |
longitude |
str |
The name of the column containing longitude coordinates. Defaults to "longitude". |
'longitude' |
encoding |
str |
The encoding of characters. Defaults to "utf-8". |
'utf-8' |
Source code in geemap/common.py
def csv_to_vector(
in_csv,
output,
latitude="latitude",
longitude="longitude",
encoding="utf-8",
**kwargs,
):
"""Creates points for a CSV file and converts them to a vector dataset.
Args:
in_csv (str): The file path to the input CSV file.
output (str): The file path to the output vector dataset.
latitude (str, optional): The name of the column containing latitude coordinates. Defaults to "latitude".
longitude (str, optional): The name of the column containing longitude coordinates. Defaults to "longitude".
encoding (str, optional): The encoding of characters. Defaults to "utf-8".
"""
gdf = csv_to_gdf(in_csv, latitude, longitude, encoding)
gdf.to_file(output, **kwargs)
date_sequence(start, end, unit, date_format='YYYY-MM-dd', step=1)
¶
Creates a date sequence.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
start |
str |
The start date, e.g., '2000-01-01'. |
required |
end |
str |
The end date, e.g., '2000-12-31'. |
required |
unit |
str |
One of 'year', 'quarter', 'month' 'week', 'day', 'hour', 'minute', or 'second'. |
required |
date_format |
str |
A pattern, as described at http://joda-time.sourceforge.net/apidocs/org/joda/time/format/DateTimeFormat.html. Defaults to 'YYYY-MM-dd'. |
'YYYY-MM-dd' |
step |
int |
The step size. Defaults to 1. |
1 |
Returns:
Type | Description |
---|---|
ee.List |
A list of date sequence. |
Source code in geemap/common.py
def date_sequence(start, end, unit, date_format="YYYY-MM-dd", step=1):
"""Creates a date sequence.
Args:
start (str): The start date, e.g., '2000-01-01'.
end (str): The end date, e.g., '2000-12-31'.
unit (str): One of 'year', 'quarter', 'month' 'week', 'day', 'hour', 'minute', or 'second'.
date_format (str, optional): A pattern, as described at http://joda-time.sourceforge.net/apidocs/org/joda/time/format/DateTimeFormat.html. Defaults to 'YYYY-MM-dd'.
step (int, optional): The step size. Defaults to 1.
Returns:
ee.List: A list of date sequence.
"""
def get_quarter(d):
return str((int(d[5:7]) - 1) // 3 * 3 + 1).zfill(2)
def get_monday(d):
date_obj = datetime.datetime.strptime(d, "%Y-%m-%d")
start_of_week = date_obj - datetime.timedelta(days=date_obj.weekday())
return start_of_week.strftime("%Y-%m-%d")
if unit == "year":
start = start[:4] + "-01-01"
elif unit == "month":
start = start[:7] + "-01"
elif unit == "quarter":
start = start[:5] + get_quarter(start) + "-01"
elif unit == "week":
start = get_monday(start)
start_date = ee.Date(start)
end_date = ee.Date(end)
if unit != "quarter":
count = ee.Number(end_date.difference(start_date, unit)).toInt()
num_seq = ee.List.sequence(0, count)
if step > 1:
num_seq = num_seq.slice(0, num_seq.size(), step)
date_seq = num_seq.map(
lambda d: start_date.advance(d, unit).format(date_format)
)
else:
unit = "month"
count = ee.Number(end_date.difference(start_date, unit)).divide(3).toInt()
num_seq = ee.List.sequence(0, count.multiply(3), 3)
date_seq = num_seq.map(
lambda d: start_date.advance(d, unit).format(date_format)
)
return date_seq
delete_shp(in_shp, verbose=False)
¶
Deletes a shapefile.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
in_shp |
str |
The input shapefile to delete. |
required |
verbose |
bool |
Whether to print out descriptive text. Defaults to False. |
False |
Source code in geemap/common.py
def delete_shp(in_shp, verbose=False):
"""Deletes a shapefile.
Args:
in_shp (str): The input shapefile to delete.
verbose (bool, optional): Whether to print out descriptive text. Defaults to False.
"""
from pathlib import Path
in_shp = os.path.abspath(in_shp)
in_dir = os.path.dirname(in_shp)
basename = os.path.basename(in_shp).replace(".shp", "")
files = Path(in_dir).rglob(basename + ".*")
for file in files:
filepath = os.path.join(in_dir, str(file))
try:
os.remove(filepath)
if verbose:
print(f"Deleted {filepath}")
except Exception as e:
if verbose:
print(e)
df_to_ee(df, latitude='latitude', longitude='longitude', **kwargs)
¶
Converts a pandas DataFrame to ee.FeatureCollection.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
df |
pandas.DataFrame |
An input pandas.DataFrame. |
required |
latitude |
str |
Column name for the latitude column. Defaults to 'latitude'. |
'latitude' |
longitude |
str |
Column name for the longitude column. Defaults to 'longitude'. |
'longitude' |
Exceptions:
Type | Description |
---|---|
TypeError |
The input data type must be pandas.DataFrame. |
Returns:
Type | Description |
---|---|
ee.FeatureCollection |
The ee.FeatureCollection converted from the input pandas DataFrame. |
Source code in geemap/common.py
def df_to_ee(df, latitude="latitude", longitude="longitude", **kwargs):
"""Converts a pandas DataFrame to ee.FeatureCollection.
Args:
df (pandas.DataFrame): An input pandas.DataFrame.
latitude (str, optional): Column name for the latitude column. Defaults to 'latitude'.
longitude (str, optional): Column name for the longitude column. Defaults to 'longitude'.
Raises:
TypeError: The input data type must be pandas.DataFrame.
Returns:
ee.FeatureCollection: The ee.FeatureCollection converted from the input pandas DataFrame.
"""
import pandas as pd
if not isinstance(df, pd.DataFrame):
raise TypeError("The input data type must be pandas.DataFrame.")
geojson = df_to_geojson(df, latitude=latitude, longitude=longitude)
fc = geojson_to_ee(geojson)
return fc
df_to_geojson(df, out_geojson=None, latitude='latitude', longitude='longitude', encoding='utf-8')
¶
Creates points for a Pandas DataFrame and exports data as a GeoJSON.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
df |
pandas.DataFrame |
The input Pandas DataFrame. |
required |
out_geojson |
str |
The file path to the exported GeoJSON. Default to None. |
None |
latitude |
str |
The name of the column containing latitude coordinates. Defaults to "latitude". |
'latitude' |
longitude |
str |
The name of the column containing longitude coordinates. Defaults to "longitude". |
'longitude' |
encoding |
str |
The encoding of characters. Defaults to "utf-8". |
'utf-8' |
Source code in geemap/common.py
def df_to_geojson(
df,
out_geojson=None,
latitude="latitude",
longitude="longitude",
encoding="utf-8",
):
"""Creates points for a Pandas DataFrame and exports data as a GeoJSON.
Args:
df (pandas.DataFrame): The input Pandas DataFrame.
out_geojson (str): The file path to the exported GeoJSON. Default to None.
latitude (str, optional): The name of the column containing latitude coordinates. Defaults to "latitude".
longitude (str, optional): The name of the column containing longitude coordinates. Defaults to "longitude".
encoding (str, optional): The encoding of characters. Defaults to "utf-8".
"""
from geojson import Feature, FeatureCollection, Point
if out_geojson is not None:
out_dir = os.path.dirname(os.path.abspath(out_geojson))
if not os.path.exists(out_dir):
os.makedirs(out_dir)
features = df.apply(
lambda row: Feature(
geometry=Point((float(row[longitude]), float(row[latitude]))),
properties=dict(row),
),
axis=1,
).tolist()
geojson = FeatureCollection(features=features)
if out_geojson is None:
return geojson
else:
with open(out_geojson, "w", encoding=encoding) as f:
f.write(json.dumps(geojson))
dict_to_csv(data_dict, out_csv, by_row=False, timeout=300, proxies=None)
¶
Downloads an ee.Dictionary as a CSV file.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
data_dict |
ee.Dictionary |
The input ee.Dictionary. |
required |
out_csv |
str |
The output file path to the CSV file. |
required |
by_row |
bool |
Whether to use by row or by column. Defaults to False. |
False |
timeout |
int |
Timeout in seconds. Defaults to 300 seconds. |
300 |
proxies |
dict |
Proxy settings. Defaults to None. |
None |
Source code in geemap/common.py
def dict_to_csv(data_dict, out_csv, by_row=False, timeout=300, proxies=None):
"""Downloads an ee.Dictionary as a CSV file.
Args:
data_dict (ee.Dictionary): The input ee.Dictionary.
out_csv (str): The output file path to the CSV file.
by_row (bool, optional): Whether to use by row or by column. Defaults to False.
timeout (int, optional): Timeout in seconds. Defaults to 300 seconds.
proxies (dict, optional): Proxy settings. Defaults to None.
"""
out_dir = os.path.dirname(out_csv)
if not os.path.exists(out_dir):
os.makedirs(out_dir)
if not by_row:
csv_feature = ee.Feature(None, data_dict)
csv_feat_col = ee.FeatureCollection([csv_feature])
else:
keys = data_dict.keys()
data = keys.map(lambda k: ee.Dictionary({"name": k, "value": data_dict.get(k)}))
csv_feature = data.map(lambda f: ee.Feature(None, f))
csv_feat_col = ee.FeatureCollection(csv_feature)
ee_export_vector(csv_feat_col, out_csv, timeout=timeout, proxies=proxies)
display_html(src, width=950, height=600)
¶
Display an HTML file in a Jupyter Notebook.
Args src (str): File path to HTML file. width (int, optional): Width of the map. Defaults to 950. height (int, optional): Height of the map. Defaults to 600.
Source code in geemap/common.py
def display_html(src, width=950, height=600):
"""Display an HTML file in a Jupyter Notebook.
Args
src (str): File path to HTML file.
width (int, optional): Width of the map. Defaults to 950.
height (int, optional): Height of the map. Defaults to 600.
"""
if not os.path.isfile(src):
raise ValueError(f"{src} is not a valid file path.")
display(IFrame(src=src, width=width, height=height))
download_ee_image(image, filename, region=None, crs=None, crs_transform=None, scale=None, resampling='near', dtype=None, overwrite=True, num_threads=None, max_tile_size=None, max_tile_dim=None, shape=None, scale_offset=False, unmask_value=None, **kwargs)
¶
Download an Earth Engine Image as a GeoTIFF. Images larger than the `Earth Engine size limit are split and downloaded as separate tiles, then re-assembled into a single GeoTIFF. See https://github.com/dugalh/geedim/blob/main/geedim/download.py#L574
Parameters:
Name | Type | Description | Default |
---|---|---|---|
image |
ee.Image |
The image to be downloaded. |
required |
filename |
str |
Name of the destination file. |
required |
region |
ee.Geometry |
Region defined by geojson polygon in WGS84. Defaults to the entire image granule. |
None |
crs |
str |
Reproject image(s) to this EPSG or WKT CRS. Where image bands have different CRSs, all are re-projected to this CRS. Defaults to the CRS of the minimum scale band. |
None |
crs_transform |
list |
tuple of float, list of float, rio.Affine, optional List of 6 numbers specifying an affine transform in the specified CRS. In row-major order: [xScale, xShearing, xTranslation, yShearing, yScale, yTranslation]. All bands are re-projected to this transform. |
None |
scale |
float |
Resample image(s) to this pixel scale (size) (m). Where image bands have different scales, all are resampled to this scale. Defaults to the minimum scale of image bands. |
None |
resampling |
ResamplingMethod |
Resampling method, can be 'near', 'bilinear', 'bicubic', or 'average'. Defaults to None. |
'near' |
dtype |
str |
Convert to this data type ( |
None |
overwrite |
bool |
Overwrite the destination file if it exists. Defaults to True. |
True |
num_threads |
int |
Number of tiles to download concurrently. Defaults to a sensible auto value. |
None |
max_tile_size |
int, optional Maximum tile size (MB). If None, defaults to the Earth Engine download size limit (32 MB). |
None |
|
max_tile_dim |
int, optional Maximum tile width/height (pixels). If None, defaults to Earth Engine download limit (10000). |
None |
|
shape |
tuple of int, optional (height, width) dimensions to export (pixels). |
None |
|
scale_offset |
bool, optional Whether to apply any EE band scales and offsets to the image. |
False |
|
unmask_value |
float |
The value to use for pixels that are masked in the input image. If the exported image contains zero values, you should set the unmask value to a non-zero value so that the zero values are not treated as missing data. Defaults to None. |
None |
Source code in geemap/common.py
def download_ee_image(
image,
filename,
region=None,
crs=None,
crs_transform=None,
scale=None,
resampling="near",
dtype=None,
overwrite=True,
num_threads=None,
max_tile_size=None,
max_tile_dim=None,
shape=None,
scale_offset=False,
unmask_value=None,
**kwargs,
):
"""Download an Earth Engine Image as a GeoTIFF. Images larger than the `Earth Engine size limit are split and downloaded as
separate tiles, then re-assembled into a single GeoTIFF. See https://github.com/dugalh/geedim/blob/main/geedim/download.py#L574
Args:
image (ee.Image): The image to be downloaded.
filename (str): Name of the destination file.
region (ee.Geometry, optional): Region defined by geojson polygon in WGS84. Defaults to the entire image granule.
crs (str, optional): Reproject image(s) to this EPSG or WKT CRS. Where image bands have different CRSs, all are
re-projected to this CRS. Defaults to the CRS of the minimum scale band.
crs_transform (list, optional): tuple of float, list of float, rio.Affine, optional
List of 6 numbers specifying an affine transform in the specified CRS. In row-major order:
[xScale, xShearing, xTranslation, yShearing, yScale, yTranslation]. All bands are re-projected to
this transform.
scale (float, optional): Resample image(s) to this pixel scale (size) (m). Where image bands have different scales,
all are resampled to this scale. Defaults to the minimum scale of image bands.
resampling (ResamplingMethod, optional): Resampling method, can be 'near', 'bilinear', 'bicubic', or 'average'. Defaults to None.
dtype (str, optional): Convert to this data type (`uint8`, `int8`, `uint16`, `int16`, `uint32`, `int32`, `float32`
or `float64`). Defaults to auto select a minimum size type that can represent the range of pixel values.
overwrite (bool, optional): Overwrite the destination file if it exists. Defaults to True.
num_threads (int, optional): Number of tiles to download concurrently. Defaults to a sensible auto value.
max_tile_size: int, optional
Maximum tile size (MB). If None, defaults to the Earth Engine download size limit (32 MB).
max_tile_dim: int, optional
Maximum tile width/height (pixels). If None, defaults to Earth Engine download limit (10000).
shape: tuple of int, optional
(height, width) dimensions to export (pixels).
scale_offset: bool, optional
Whether to apply any EE band scales and offsets to the image.
unmask_value (float, optional): The value to use for pixels that are masked in the input image. If the exported image contains
zero values, you should set the unmask value to a non-zero value so that the zero values are not treated as missing data. Defaults to None.
"""
if os.environ.get("USE_MKDOCS") is not None:
return
try:
import geedim as gd
except ImportError:
raise ImportError(
"Please install geedim using `pip install geedim` or `conda install -c conda-forge geedim`"
)
if not isinstance(image, ee.Image):
raise ValueError("image must be an ee.Image.")
if unmask_value is not None:
if isinstance(region, ee.Geometry):
image = image.clip(region)
elif isinstance(region, ee.FeatureCollection):
image = image.clipToCollection(region)
image = image.unmask(unmask_value, sameFootprint=False)
if region is not None:
kwargs["region"] = region
if crs is not None:
kwargs["crs"] = crs
if crs_transform is not None:
kwargs["crs_transform"] = crs_transform
if scale is not None:
kwargs["scale"] = scale
if resampling is not None:
kwargs["resampling"] = resampling
if dtype is not None:
kwargs["dtype"] = dtype
if max_tile_size is not None:
kwargs["max_tile_size"] = max_tile_size
if max_tile_dim is not None:
kwargs["max_tile_dim"] = max_tile_dim
if shape is not None:
kwargs["shape"] = shape
if scale_offset:
kwargs["scale_offset"] = scale_offset
img = gd.download.BaseImage(image)
img.download(filename, overwrite=overwrite, num_threads=num_threads, **kwargs)
download_ee_image_collection(collection, out_dir=None, filenames=None, region=None, crs=None, crs_transform=None, scale=None, resampling='near', dtype=None, overwrite=True, num_threads=None, max_tile_size=None, max_tile_dim=None, shape=None, scale_offset=False, unmask_value=None, **kwargs)
¶
Download an Earth Engine ImageCollection as GeoTIFFs. Images larger than the `Earth Engine size limit are split and downloaded as separate tiles, then re-assembled into a single GeoTIFF. See https://github.com/dugalh/geedim/blob/main/geedim/download.py#L574
Parameters:
Name | Type | Description | Default |
---|---|---|---|
collection |
ee.ImageCollection |
The image collection to be downloaded. |
required |
out_dir |
str |
The directory to save the downloaded images. Defaults to the current directory. |
None |
filenames |
list |
A list of filenames to use for the downloaded images. Defaults to the image ID. |
None |
region |
ee.Geometry |
Region defined by geojson polygon in WGS84. Defaults to the entire image granule. |
None |
crs |
str |
Reproject image(s) to this EPSG or WKT CRS. Where image bands have different CRSs, all are re-projected to this CRS. Defaults to the CRS of the minimum scale band. |
None |
crs_transform |
list |
tuple of float, list of float, rio.Affine, optional List of 6 numbers specifying an affine transform in the specified CRS. In row-major order: [xScale, xShearing, xTranslation, yShearing, yScale, yTranslation]. All bands are re-projected to this transform. |
None |
scale |
float |
Resample image(s) to this pixel scale (size) (m). Where image bands have different scales, all are resampled to this scale. Defaults to the minimum scale of image bands. |
None |
resampling |
ResamplingMethod |
Resampling method, can be 'near', 'bilinear', 'bicubic', or 'average'. Defaults to None. |
'near' |
dtype |
str |
Convert to this data type ( |
None |
overwrite |
bool |
Overwrite the destination file if it exists. Defaults to True. |
True |
num_threads |
int |
Number of tiles to download concurrently. Defaults to a sensible auto value. |
None |
max_tile_size |
int, optional Maximum tile size (MB). If None, defaults to the Earth Engine download size limit (32 MB). |
None |
|
max_tile_dim |
int, optional Maximum tile width/height (pixels). If None, defaults to Earth Engine download limit (10000). |
None |
|
shape |
tuple of int, optional (height, width) dimensions to export (pixels). |
None |
|
scale_offset |
bool, optional Whether to apply any EE band scales and offsets to the image. |
False |
|
unmask_value |
float |
The value to use for pixels that are masked in the input image. If the exported image contains zero values, you should set the unmask value to a non-zero value so that the zero values are not treated as missing data. Defaults to None. |
None |
Source code in geemap/common.py
def download_ee_image_collection(
collection,
out_dir=None,
filenames=None,
region=None,
crs=None,
crs_transform=None,
scale=None,
resampling="near",
dtype=None,
overwrite=True,
num_threads=None,
max_tile_size=None,
max_tile_dim=None,
shape=None,
scale_offset=False,
unmask_value=None,
**kwargs,
):
"""Download an Earth Engine ImageCollection as GeoTIFFs. Images larger than the `Earth Engine size limit are split and downloaded as
separate tiles, then re-assembled into a single GeoTIFF. See https://github.com/dugalh/geedim/blob/main/geedim/download.py#L574
Args:
collection (ee.ImageCollection): The image collection to be downloaded.
out_dir (str, optional): The directory to save the downloaded images. Defaults to the current directory.
filenames (list, optional): A list of filenames to use for the downloaded images. Defaults to the image ID.
region (ee.Geometry, optional): Region defined by geojson polygon in WGS84. Defaults to the entire image granule.
crs (str, optional): Reproject image(s) to this EPSG or WKT CRS. Where image bands have different CRSs, all are
re-projected to this CRS. Defaults to the CRS of the minimum scale band.
crs_transform (list, optional): tuple of float, list of float, rio.Affine, optional
List of 6 numbers specifying an affine transform in the specified CRS. In row-major order:
[xScale, xShearing, xTranslation, yShearing, yScale, yTranslation]. All bands are re-projected to
this transform.
scale (float, optional): Resample image(s) to this pixel scale (size) (m). Where image bands have different scales,
all are resampled to this scale. Defaults to the minimum scale of image bands.
resampling (ResamplingMethod, optional): Resampling method, can be 'near', 'bilinear', 'bicubic', or 'average'. Defaults to None.
dtype (str, optional): Convert to this data type (`uint8`, `int8`, `uint16`, `int16`, `uint32`, `int32`, `float32`
or `float64`). Defaults to auto select a minimum size type that can represent the range of pixel values.
overwrite (bool, optional): Overwrite the destination file if it exists. Defaults to True.
num_threads (int, optional): Number of tiles to download concurrently. Defaults to a sensible auto value.
max_tile_size: int, optional
Maximum tile size (MB). If None, defaults to the Earth Engine download size limit (32 MB).
max_tile_dim: int, optional
Maximum tile width/height (pixels). If None, defaults to Earth Engine download limit (10000).
shape: tuple of int, optional
(height, width) dimensions to export (pixels).
scale_offset: bool, optional
Whether to apply any EE band scales and offsets to the image.
unmask_value (float, optional): The value to use for pixels that are masked in the input image. If the exported image contains zero values,
you should set the unmask value to a non-zero value so that the zero values are not treated as missing data. Defaults to None.
"""
if not isinstance(collection, ee.ImageCollection):
raise ValueError("ee_object must be an ee.ImageCollection.")
if out_dir is None:
out_dir = os.getcwd()
if not os.path.exists(out_dir):
os.makedirs(out_dir)
try:
count = int(collection.size().getInfo())
print(f"Total number of images: {count}\n")
if filenames is not None:
if len(filenames) != count:
raise ValueError(
f"The number of filenames must match the number of image: {count}"
)
for i in range(0, count):
image = ee.Image(collection.toList(count).get(i))
if filenames is not None:
name = filenames[i]
if not name.endswith(".tif"):
name = name + ".tif"
else:
name = image.get("system:index").getInfo() + ".tif"
filename = os.path.join(os.path.abspath(out_dir), name)
print(f"Downloading {i + 1}/{count}: {name}")
download_ee_image(
image,
filename,
region,
crs,
crs_transform,
scale,
resampling,
dtype,
overwrite,
num_threads,
max_tile_size,
max_tile_dim,
shape,
scale_offset,
unmask_value,
**kwargs,
)
except Exception as e:
raise Exception(f"Error downloading image collection: {e}")
download_ee_image_tiles(image, features, out_dir=None, prefix=None, crs=None, crs_transform=None, scale=None, resampling='near', dtype=None, overwrite=True, num_threads=None, max_tile_size=None, max_tile_dim=None, shape=None, scale_offset=False, unmask_value=None, column=None, **kwargs)
¶
Download an Earth Engine Image as small tiles based on ee.FeatureCollection. Images larger than the `Earth Engine size limit are split and downloaded as separate tiles, then re-assembled into a single GeoTIFF. See https://github.com/dugalh/geedim/blob/main/geedim/download.py#L574
Parameters:
Name | Type | Description | Default |
---|---|---|---|
image |
ee.Image |
The image to be downloaded. |
required |
features |
ee.FeatureCollection |
The features to loop through to download image. |
required |
out_dir |
str |
The output directory. Defaults to None. |
None |
prefix |
str |
The prefix for the output file. Defaults to None. |
None |
crs |
str |
Reproject image(s) to this EPSG or WKT CRS. Where image bands have different CRSs, all are re-projected to this CRS. Defaults to the CRS of the minimum scale band. |
None |
crs_transform |
list |
tuple of float, list of float, rio.Affine, optional List of 6 numbers specifying an affine transform in the specified CRS. In row-major order: [xScale, xShearing, xTranslation, yShearing, yScale, yTranslation]. All bands are re-projected to this transform. |
None |
scale |
float |
Resample image(s) to this pixel scale (size) (m). Where image bands have different scales, all are resampled to this scale. Defaults to the minimum scale of image bands. |
None |
resampling |
ResamplingMethod |
Resampling method, can be 'near', 'bilinear', 'bicubic', or 'average'. Defaults to None. |
'near' |
dtype |
str |
Convert to this data type ( |
None |
overwrite |
bool |
Overwrite the destination file if it exists. Defaults to True. |
True |
num_threads |
int |
Number of tiles to download concurrently. Defaults to a sensible auto value. |
None |
max_tile_size |
int, optional Maximum tile size (MB). If None, defaults to the Earth Engine download size limit (32 MB). |
None |
|
max_tile_dim |
int, optional Maximum tile width/height (pixels). If None, defaults to Earth Engine download limit (10000). |
None |
|
shape |
tuple of int, optional (height, width) dimensions to export (pixels). |
None |
|
scale_offset |
bool, optional Whether to apply any EE band scales and offsets to the image. |
False |
|
unmask_value |
float |
The value to use for pixels that are masked in the input image. If the exported image contains zero values, you should set the unmask value to a non-zero value so that the zero values are not treated as missing data. Defaults to None. |
None |
column |
str |
The column name to use for the filename. Defaults to None. |
None |
Source code in geemap/common.py
def download_ee_image_tiles(
image,
features,
out_dir=None,
prefix=None,
crs=None,
crs_transform=None,
scale=None,
resampling="near",
dtype=None,
overwrite=True,
num_threads=None,
max_tile_size=None,
max_tile_dim=None,
shape=None,
scale_offset=False,
unmask_value=None,
column=None,
**kwargs,
):
"""Download an Earth Engine Image as small tiles based on ee.FeatureCollection. Images larger than the `Earth Engine size limit are split and downloaded as
separate tiles, then re-assembled into a single GeoTIFF. See https://github.com/dugalh/geedim/blob/main/geedim/download.py#L574
Args:
image (ee.Image): The image to be downloaded.
features (ee.FeatureCollection): The features to loop through to download image.
out_dir (str, optional): The output directory. Defaults to None.
prefix (str, optional): The prefix for the output file. Defaults to None.
crs (str, optional): Reproject image(s) to this EPSG or WKT CRS. Where image bands have different CRSs, all are
re-projected to this CRS. Defaults to the CRS of the minimum scale band.
crs_transform (list, optional): tuple of float, list of float, rio.Affine, optional
List of 6 numbers specifying an affine transform in the specified CRS. In row-major order:
[xScale, xShearing, xTranslation, yShearing, yScale, yTranslation]. All bands are re-projected to
this transform.
scale (float, optional): Resample image(s) to this pixel scale (size) (m). Where image bands have different scales,
all are resampled to this scale. Defaults to the minimum scale of image bands.
resampling (ResamplingMethod, optional): Resampling method, can be 'near', 'bilinear', 'bicubic', or 'average'. Defaults to None.
dtype (str, optional): Convert to this data type (`uint8`, `int8`, `uint16`, `int16`, `uint32`, `int32`, `float32`
or `float64`). Defaults to auto select a minimum size type that can represent the range of pixel values.
overwrite (bool, optional): Overwrite the destination file if it exists. Defaults to True.
num_threads (int, optional): Number of tiles to download concurrently. Defaults to a sensible auto value.
max_tile_size: int, optional
Maximum tile size (MB). If None, defaults to the Earth Engine download size limit (32 MB).
max_tile_dim: int, optional
Maximum tile width/height (pixels). If None, defaults to Earth Engine download limit (10000).
shape: tuple of int, optional
(height, width) dimensions to export (pixels).
scale_offset: bool, optional
Whether to apply any EE band scales and offsets to the image.
unmask_value (float, optional): The value to use for pixels that are masked in the input image. If the exported image contains zero values,
you should set the unmask value to a non-zero value so that the zero values are not treated as missing data. Defaults to None.
column (str, optional): The column name to use for the filename. Defaults to None.
"""
import time
start = time.time()
if os.environ.get("USE_MKDOCS") is not None:
return
if not isinstance(features, ee.FeatureCollection):
raise ValueError("features must be an ee.FeatureCollection.")
if out_dir is None:
out_dir = os.getcwd()
if not os.path.exists(out_dir):
os.makedirs(out_dir)
if prefix is None:
prefix = ""
count = features.size().getInfo()
collection = features.toList(count)
if column is not None:
names = features.aggregate_array(column).getInfo()
else:
names = [str(i + 1).zfill(len(str(count))) for i in range(count)]
for i in range(count):
region = ee.Feature(collection.get(i)).geometry()
filename = os.path.join(
out_dir, "{}{}.tif".format(prefix, names[i].replace("/", "_"))
)
print(f"Downloading {i + 1}/{count}: {filename}")
download_ee_image(
image,
filename,
region,
crs,
crs_transform,
scale,
resampling,
dtype,
overwrite,
num_threads,
max_tile_size,
max_tile_dim,
shape,
scale_offset,
unmask_value,
**kwargs,
)
print(f"Downloaded {count} tiles in {time.time() - start} seconds.")
download_ee_image_tiles_parallel(image, features, out_dir=None, prefix=None, crs=None, crs_transform=None, scale=None, resampling='near', dtype=None, overwrite=True, num_threads=None, max_tile_size=None, max_tile_dim=None, shape=None, scale_offset=False, unmask_value=None, column=None, job_args={'n_jobs': -1}, ee_init=True, **kwargs)
¶
Download an Earth Engine Image as small tiles based on ee.FeatureCollection. Images larger than the `Earth Engine size limit are split and downloaded as separate tiles, then re-assembled into a single GeoTIFF. See https://github.com/dugalh/geedim/blob/main/geedim/download.py#L574
Parameters:
Name | Type | Description | Default |
---|---|---|---|
image |
ee.Image |
The image to be downloaded. |
required |
features |
ee.FeatureCollection |
The features to loop through to download image. |
required |
out_dir |
str |
The output directory. Defaults to None. |
None |
prefix |
str |
The prefix for the output file. Defaults to None. |
None |
crs |
str |
Reproject image(s) to this EPSG or WKT CRS. Where image bands have different CRSs, all are re-projected to this CRS. Defaults to the CRS of the minimum scale band. |
None |
crs_transform |
list |
tuple of float, list of float, rio.Affine, optional List of 6 numbers specifying an affine transform in the specified CRS. In row-major order: [xScale, xShearing, xTranslation, yShearing, yScale, yTranslation]. All bands are re-projected to this transform. |
None |
scale |
float |
Resample image(s) to this pixel scale (size) (m). Where image bands have different scales, all are resampled to this scale. Defaults to the minimum scale of image bands. |
None |
resampling |
ResamplingMethod |
Resampling method, can be 'near', 'bilinear', 'bicubic', or 'average'. Defaults to None. |
'near' |
dtype |
str |
Convert to this data type ( |
None |
overwrite |
bool |
Overwrite the destination file if it exists. Defaults to True. |
True |
num_threads |
int |
Number of tiles to download concurrently. Defaults to a sensible auto value. |
None |
max_tile_size |
int, optional Maximum tile size (MB). If None, defaults to the Earth Engine download size limit (32 MB). |
None |
|
max_tile_dim |
int, optional Maximum tile width/height (pixels). If None, defaults to Earth Engine download limit (10000). |
None |
|
shape |
tuple of int, optional (height, width) dimensions to export (pixels). |
None |
|
scale_offset |
bool, optional Whether to apply any EE band scales and offsets to the image. |
False |
|
unmask_value |
float |
The value to use for pixels that are masked in the input image. If the exported image contains zero values, you should set the unmask value to a non-zero value so that the zero values are not treated as missing data. Defaults to None. |
None |
column |
str |
The column name in the feature collection to use as the filename. Defaults to None. |
None |
job_args |
dict |
The arguments to pass to joblib.Parallel. Defaults to {"n_jobs": -1}. |
{'n_jobs': -1} |
ee_init |
bool |
Whether to initialize Earth Engine. Defaults to True. |
True |
Source code in geemap/common.py
def download_ee_image_tiles_parallel(
image,
features,
out_dir=None,
prefix=None,
crs=None,
crs_transform=None,
scale=None,
resampling="near",
dtype=None,
overwrite=True,
num_threads=None,
max_tile_size=None,
max_tile_dim=None,
shape=None,
scale_offset=False,
unmask_value=None,
column=None,
job_args={"n_jobs": -1},
ee_init=True,
**kwargs,
):
"""Download an Earth Engine Image as small tiles based on ee.FeatureCollection. Images larger than the `Earth Engine size limit are split and downloaded as
separate tiles, then re-assembled into a single GeoTIFF. See https://github.com/dugalh/geedim/blob/main/geedim/download.py#L574
Args:
image (ee.Image): The image to be downloaded.
features (ee.FeatureCollection): The features to loop through to download image.
out_dir (str, optional): The output directory. Defaults to None.
prefix (str, optional): The prefix for the output file. Defaults to None.
crs (str, optional): Reproject image(s) to this EPSG or WKT CRS. Where image bands have different CRSs, all are
re-projected to this CRS. Defaults to the CRS of the minimum scale band.
crs_transform (list, optional): tuple of float, list of float, rio.Affine, optional
List of 6 numbers specifying an affine transform in the specified CRS. In row-major order:
[xScale, xShearing, xTranslation, yShearing, yScale, yTranslation]. All bands are re-projected to
this transform.
scale (float, optional): Resample image(s) to this pixel scale (size) (m). Where image bands have different scales,
all are resampled to this scale. Defaults to the minimum scale of image bands.
resampling (ResamplingMethod, optional): Resampling method, can be 'near', 'bilinear', 'bicubic', or 'average'. Defaults to None.
dtype (str, optional): Convert to this data type (`uint8`, `int8`, `uint16`, `int16`, `uint32`, `int32`, `float32`
or `float64`). Defaults to auto select a minimum size type that can represent the range of pixel values.
overwrite (bool, optional): Overwrite the destination file if it exists. Defaults to True.
num_threads (int, optional): Number of tiles to download concurrently. Defaults to a sensible auto value.
max_tile_size: int, optional
Maximum tile size (MB). If None, defaults to the Earth Engine download size limit (32 MB).
max_tile_dim: int, optional
Maximum tile width/height (pixels). If None, defaults to Earth Engine download limit (10000).
shape: tuple of int, optional
(height, width) dimensions to export (pixels).
scale_offset: bool, optional
Whether to apply any EE band scales and offsets to the image.
unmask_value (float, optional): The value to use for pixels that are masked in the input image. If the exported image contains zero values,
you should set the unmask value to a non-zero value so that the zero values are not treated as missing data. Defaults to None.
column (str, optional): The column name in the feature collection to use as the filename. Defaults to None.
job_args (dict, optional): The arguments to pass to joblib.Parallel. Defaults to {"n_jobs": -1}.
ee_init (bool, optional): Whether to initialize Earth Engine. Defaults to True.
"""
import joblib
import time
start = time.time()
if os.environ.get("USE_MKDOCS") is not None:
return
if not isinstance(features, ee.FeatureCollection):
raise ValueError("features must be an ee.FeatureCollection.")
if out_dir is None:
out_dir = os.getcwd()
if not os.path.exists(out_dir):
os.makedirs(out_dir)
if prefix is None:
prefix = ""
count = features.size().getInfo()
if column is not None:
names = features.aggregate_array(column).getInfo()
else:
names = [str(i + 1).zfill(len(str(count))) for i in range(count)]
collection = features.toList(count)
def download_data(index):
if ee_init:
ee_initialize(opt_url="https://earthengine-highvolume.googleapis.com")
region = ee.Feature(collection.get(index)).geometry()
filename = os.path.join(
out_dir, "{}{}.tif".format(prefix, names[index].replace("/", "_"))
)
print(f"Downloading {index + 1}/{count}: {filename}")
download_ee_image(
image,
filename,
region,
crs,
crs_transform,
scale,
resampling,
dtype,
overwrite,
num_threads,
max_tile_size,
max_tile_dim,
shape,
scale_offset,
unmask_value,
**kwargs,
)
with joblib.Parallel(**job_args) as parallel:
parallel(joblib.delayed(download_data)(index) for index in range(count))
end = time.time()
print(f"Finished in {end - start} seconds.")
download_ee_video(collection, video_args, out_gif, timeout=300, proxies=None)
¶
Downloads a video thumbnail as a GIF image from Earth Engine.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
collection |
object |
An ee.ImageCollection. |
required |
video_args |
object |
Parameters for expring the video thumbnail. |
required |
out_gif |
str |
File path to the output GIF. |
required |
timeout |
int |
The number of seconds the request will be timed out. Defaults to 300. |
300 |
proxies |
dict |
A dictionary of proxy servers to use. Defaults to None. |
None |
Source code in geemap/common.py
def download_ee_video(collection, video_args, out_gif, timeout=300, proxies=None):
"""Downloads a video thumbnail as a GIF image from Earth Engine.
Args:
collection (object): An ee.ImageCollection.
video_args (object): Parameters for expring the video thumbnail.
out_gif (str): File path to the output GIF.
timeout (int, optional): The number of seconds the request will be timed out. Defaults to 300.
proxies (dict, optional): A dictionary of proxy servers to use. Defaults to None.
"""
out_gif = os.path.abspath(out_gif)
if not out_gif.endswith(".gif"):
print("The output file must have an extension of .gif.")
return
if not os.path.exists(os.path.dirname(out_gif)):
os.makedirs(os.path.dirname(out_gif))
if "region" in video_args.keys():
roi = video_args["region"]
if not isinstance(roi, ee.Geometry):
try:
roi = roi.geometry()
except Exception as e:
print("Could not convert the provided roi to ee.Geometry")
print(e)
return
video_args["region"] = roi
if "dimensions" not in video_args:
video_args["dimensions"] = 768
try:
print("Generating URL...")
url = collection.getVideoThumbURL(video_args)
print(f"Downloading GIF image from {url}\nPlease wait ...")
r = requests.get(url, stream=True, timeout=timeout, proxies=proxies)
if r.status_code != 200:
print("An error occurred while downloading.")
print(r.json()["error"]["message"])
return
else:
with open(out_gif, "wb") as fd:
for chunk in r.iter_content(chunk_size=1024):
fd.write(chunk)
print(f"The GIF image has been saved to: {out_gif}")
except Exception as e:
print(e)
download_file(url=None, output=None, quiet=False, proxy=None, speed=None, use_cookies=True, verify=True, id=None, fuzzy=False, resume=False, unzip=True, overwrite=False)
¶
Download a file from URL, including Google Drive shared URL.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
url |
str |
Google Drive URL is also supported. Defaults to None. |
None |
output |
str |
Output filename. Default is basename of URL. |
None |
quiet |
bool |
Suppress terminal output. Default is False. |
False |
proxy |
str |
Proxy. Defaults to None. |
None |
speed |
float |
Download byte size per second (e.g., 256KB/s = 256 * 1024). Defaults to None. |
None |
use_cookies |
bool |
Flag to use cookies. Defaults to True. |
True |
verify |
bool | str |
Either a bool, in which case it controls whether the server's TLS certificate is verified, or a string, in which case it must be a path to a CA bundle to use. Default is True.. Defaults to True. |
True |
id |
str |
Google Drive's file ID. Defaults to None. |
None |
fuzzy |
bool |
Fuzzy extraction of Google Drive's file Id. Defaults to False. |
False |
resume |
bool |
Resume the download from existing tmp file if possible. Defaults to False. |
False |
unzip |
bool |
Unzip the file. Defaults to True. |
True |
overwrite |
bool |
Overwrite the file if it already exists. Defaults to False. |
False |
Returns:
Type | Description |
---|---|
str |
The output file path. |
Source code in geemap/common.py
def download_file(
url=None,
output=None,
quiet=False,
proxy=None,
speed=None,
use_cookies=True,
verify=True,
id=None,
fuzzy=False,
resume=False,
unzip=True,
overwrite=False,
):
"""Download a file from URL, including Google Drive shared URL.
Args:
url (str, optional): Google Drive URL is also supported. Defaults to None.
output (str, optional): Output filename. Default is basename of URL.
quiet (bool, optional): Suppress terminal output. Default is False.
proxy (str, optional): Proxy. Defaults to None.
speed (float, optional): Download byte size per second (e.g., 256KB/s = 256 * 1024). Defaults to None.
use_cookies (bool, optional): Flag to use cookies. Defaults to True.
verify (bool | str, optional): Either a bool, in which case it controls whether the server's TLS certificate is verified, or a string, in which case it must be a path to a CA bundle to use. Default is True.. Defaults to True.
id (str, optional): Google Drive's file ID. Defaults to None.
fuzzy (bool, optional): Fuzzy extraction of Google Drive's file Id. Defaults to False.
resume (bool, optional): Resume the download from existing tmp file if possible. Defaults to False.
unzip (bool, optional): Unzip the file. Defaults to True.
overwrite (bool, optional): Overwrite the file if it already exists. Defaults to False.
Returns:
str: The output file path.
"""
import gdown
if output is None:
if isinstance(url, str) and url.startswith("http"):
output = os.path.basename(url)
if isinstance(url, str):
if os.path.exists(os.path.abspath(output)) and (not overwrite):
print(
f"{output} already exists. Skip downloading. Set overwrite=True to overwrite."
)
return os.path.abspath(output)
else:
url = github_raw_url(url)
if "https://drive.google.com/file/d/" in url:
fuzzy = True
output = gdown.download(
url, output, quiet, proxy, speed, use_cookies, verify, id, fuzzy, resume
)
if unzip and output.endswith(".zip"):
with zipfile.ZipFile(output, "r") as zip_ref:
if not quiet:
print("Extracting files...")
zip_ref.extractall(os.path.dirname(output))
return os.path.abspath(output)
download_folder(url=None, id=None, output=None, quiet=False, proxy=None, speed=None, use_cookies=True, remaining_ok=False)
¶
Downloads the entire folder from URL.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
url |
str |
URL of the Google Drive folder. Must be of the format 'https://drive.google.com/drive/folders/{url}'. Defaults to None. |
None |
id |
str |
Google Drive's folder ID. Defaults to None. |
None |
output |
str |
String containing the path of the output folder. Defaults to current working directory. |
None |
quiet |
bool |
Suppress terminal output. Defaults to False. |
False |
proxy |
str |
Proxy. Defaults to None. |
None |
speed |
float |
Download byte size per second (e.g., 256KB/s = 256 * 1024). Defaults to None. |
None |
use_cookies |
bool |
Flag to use cookies. Defaults to True. |
True |
resume |
bool |
Resume the download from existing tmp file if possible. Defaults to False. |
required |
Returns:
Type | Description |
---|---|
list |
List of files downloaded, or None if failed. |
Source code in geemap/common.py
def download_folder(
url=None,
id=None,
output=None,
quiet=False,
proxy=None,
speed=None,
use_cookies=True,
remaining_ok=False,
):
"""Downloads the entire folder from URL.
Args:
url (str, optional): URL of the Google Drive folder. Must be of the format 'https://drive.google.com/drive/folders/{url}'. Defaults to None.
id (str, optional): Google Drive's folder ID. Defaults to None.
output (str, optional): String containing the path of the output folder. Defaults to current working directory.
quiet (bool, optional): Suppress terminal output. Defaults to False.
proxy (str, optional): Proxy. Defaults to None.
speed (float, optional): Download byte size per second (e.g., 256KB/s = 256 * 1024). Defaults to None.
use_cookies (bool, optional): Flag to use cookies. Defaults to True.
resume (bool, optional): Resume the download from existing tmp file if possible. Defaults to False.
Returns:
list: List of files downloaded, or None if failed.
"""
import gdown
files = gdown.download_folder(
url, id, output, quiet, proxy, speed, use_cookies, remaining_ok
)
return files
download_from_gdrive(gfile_url, file_name, out_dir='.', unzip=True, verbose=True)
¶
Download a file shared via Google Drive (e.g., https://drive.google.com/file/d/18SUo_HcDGltuWYZs1s7PpOmOq_FvFn04/view?usp=sharing)
Parameters:
Name | Type | Description | Default |
---|---|---|---|
gfile_url |
str |
The Google Drive shared file URL |
required |
file_name |
str |
The output file name to use. |
required |
out_dir |
str |
The output directory. Defaults to '.'. |
'.' |
unzip |
bool |
Whether to unzip the output file if it is a zip file. Defaults to True. |
True |
verbose |
bool |
Whether to display or not the output of the function |
True |
Source code in geemap/common.py
def download_from_gdrive(gfile_url, file_name, out_dir=".", unzip=True, verbose=True):
"""Download a file shared via Google Drive
(e.g., https://drive.google.com/file/d/18SUo_HcDGltuWYZs1s7PpOmOq_FvFn04/view?usp=sharing)
Args:
gfile_url (str): The Google Drive shared file URL
file_name (str): The output file name to use.
out_dir (str, optional): The output directory. Defaults to '.'.
unzip (bool, optional): Whether to unzip the output file if it is a zip file. Defaults to True.
verbose (bool, optional): Whether to display or not the output of the function
"""
try:
from google_drive_downloader import GoogleDriveDownloader as gdd
except ImportError:
raise Exception(
"Please install the google_drive_downloader package using `pip install googledrivedownloader`"
)
file_id = gfile_url.split("/")[5]
if verbose:
print(f"Google Drive file id: {file_id}")
dest_path = os.path.join(out_dir, file_name)
gdd.download_file_from_google_drive(file_id, dest_path, True, unzip)
return
download_from_url(url, out_file_name=None, out_dir='.', unzip=True, verbose=True)
¶
Download a file from a URL (e.g., https://github.com/giswqs/whitebox/raw/master/examples/testdata.zip)
Parameters:
Name | Type | Description | Default |
---|---|---|---|
url |
str |
The HTTP URL to download. |
required |
out_file_name |
str |
The output file name to use. Defaults to None. |
None |
out_dir |
str |
The output directory to use. Defaults to '.'. |
'.' |
unzip |
bool |
Whether to unzip the downloaded file if it is a zip file. Defaults to True. |
True |
verbose |
bool |
Whether to display or not the output of the function |
True |
Source code in geemap/common.py
def download_from_url(url, out_file_name=None, out_dir=".", unzip=True, verbose=True):
"""Download a file from a URL (e.g., https://github.com/giswqs/whitebox/raw/master/examples/testdata.zip)
Args:
url (str): The HTTP URL to download.
out_file_name (str, optional): The output file name to use. Defaults to None.
out_dir (str, optional): The output directory to use. Defaults to '.'.
unzip (bool, optional): Whether to unzip the downloaded file if it is a zip file. Defaults to True.
verbose (bool, optional): Whether to display or not the output of the function
"""
in_file_name = os.path.basename(url)
if out_file_name is None:
out_file_name = in_file_name
out_file_path = os.path.join(os.path.abspath(out_dir), out_file_name)
if verbose:
print(f"Downloading {url} ...")
try:
urllib.request.urlretrieve(url, out_file_path)
except Exception:
raise Exception("The URL is invalid. Please double check the URL.")
final_path = out_file_path
if unzip:
# if it is a zip file
if ".zip" in out_file_name:
if verbose:
print(f"Unzipping {out_file_name} ...")
with zipfile.ZipFile(out_file_path, "r") as zip_ref:
zip_ref.extractall(out_dir)
final_path = os.path.join(
os.path.abspath(out_dir), out_file_name.replace(".zip", "")
)
# if it is a tar file
if ".tar" in out_file_name:
if verbose:
print(f"Unzipping {out_file_name} ...")
with tarfile.open(out_file_path, "r") as tar_ref:
with tarfile.open(out_file_path, "r") as tar_ref:
def is_within_directory(directory, target):
abs_directory = os.path.abspath(directory)
abs_target = os.path.abspath(target)
prefix = os.path.commonprefix([abs_directory, abs_target])
return prefix == abs_directory
def safe_extract(
tar, path=".", members=None, *, numeric_owner=False
):
for member in tar.getmembers():
member_path = os.path.join(path, member.name)
if not is_within_directory(path, member_path):
raise Exception("Attempted Path Traversal in Tar File")
tar.extractall(path, members, numeric_owner=numeric_owner)
safe_extract(tar_ref, out_dir)
final_path = os.path.join(
os.path.abspath(out_dir), out_file_name.replace(".tar", "")
)
if verbose:
print(f"Data downloaded to: {final_path}")
return
download_ned(region, out_dir=None, return_url=False, download_args={}, **kwargs)
¶
Download the US National Elevation Datasets (NED) for a region.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
region |
str | list |
A filepath to a vector dataset or a list of bounds in the form of [minx, miny, maxx, maxy]. |
required |
out_dir |
str |
The directory to download the files to. Defaults to None, which uses the current working directory. |
None |
return_url |
bool |
Whether to return the download URLs of the files. Defaults to False. |
False |
download_args |
dict |
A dictionary of arguments to pass to the download_file function. Defaults to {}. |
{} |
Returns:
Type | Description |
---|---|
list |
A list of the download URLs of the files if return_url is True. |
Source code in geemap/common.py
def download_ned(region, out_dir=None, return_url=False, download_args={}, **kwargs):
"""Download the US National Elevation Datasets (NED) for a region.
Args:
region (str | list): A filepath to a vector dataset or a list of bounds in the form of [minx, miny, maxx, maxy].
out_dir (str, optional): The directory to download the files to. Defaults to None, which uses the current working directory.
return_url (bool, optional): Whether to return the download URLs of the files. Defaults to False.
download_args (dict, optional): A dictionary of arguments to pass to the download_file function. Defaults to {}.
Returns:
list: A list of the download URLs of the files if return_url is True.
"""
import geopandas as gpd
if out_dir is None:
out_dir = os.getcwd()
else:
out_dir = os.path.abspath(out_dir)
if isinstance(region, str):
if region.startswith("http"):
region = github_raw_url(region)
region = download_file(region)
elif not os.path.exists(region):
raise ValueError("region must be a path or a URL to a vector dataset.")
roi = gpd.read_file(region, **kwargs)
roi = roi.to_crs(epsg=4326)
bounds = roi.total_bounds
elif isinstance(region, list):
bounds = region
else:
raise ValueError(
"region must be a filepath or a list of bounds in the form of [minx, miny, maxx, maxy]."
)
minx, miny, maxx, maxy = [float(x) for x in bounds]
tiles = []
left = abs(math.floor(minx))
right = abs(math.floor(maxx)) - 1
upper = math.ceil(maxy)
bottom = math.ceil(miny) - 1
for y in range(upper, bottom, -1):
for x in range(left, right, -1):
tile_id = "n{}w{}".format(str(y).zfill(2), str(x).zfill(3))
tiles.append(tile_id)
links = []
filepaths = []
for index, tile in enumerate(tiles):
tif_url = f"https://prd-tnm.s3.amazonaws.com/StagedProducts/Elevation/13/TIFF/current/{tile}/USGS_13_{tile}.tif"
r = requests.head(tif_url)
if r.status_code == 200:
tif = os.path.join(out_dir, os.path.basename(tif_url))
links.append(tif_url)
filepaths.append(tif)
else:
print(f"{tif_url} does not exist.")
if return_url:
return links
else:
for index, link in enumerate(links):
print(f"Downloading {index + 1} of {len(links)}: {os.path.basename(link)}")
download_file(link, filepaths[index], **download_args)
dynamic_world(region=None, start_date='2020-01-01', end_date='2021-01-01', clip=False, reducer=None, projection='EPSG:3857', scale=10, return_type='hillshade')
¶
Create 10-m land cover composite based on Dynamic World. The source code is adapted from the following tutorial by Spatial Thoughts: https://developers.google.com/earth-engine/tutorials/community/introduction-to-dynamic-world-pt-1
Parameters:
Name | Type | Description | Default |
---|---|---|---|
region |
ee.Geometry | ee.FeatureCollection |
The region of interest. |
None |
start_date |
str | ee.Date |
The start date of the query. Default to "2020-01-01". |
'2020-01-01' |
end_date |
str | ee.Date |
The end date of the query. Default to "2021-01-01". |
'2021-01-01' |
clip |
bool |
Whether to clip the image to the region. Default to False. |
False |
reducer |
ee.Reducer |
The reducer to be used. Default to None. |
None |
projection |
str |
The projection to be used for creating hillshade. Default to "EPSG:3857". |
'EPSG:3857' |
scale |
int |
The scale to be used for creating hillshade. Default to 10. |
10 |
return_type |
str |
The type of image to be returned. Can be one of 'hillshade', 'visualize', 'class', or 'probability'. Default to "hillshade". |
'hillshade' |
Returns:
Type | Description |
---|---|
ee.Image |
The image with the specified return_type. |
Source code in geemap/common.py
def dynamic_world(
region=None,
start_date="2020-01-01",
end_date="2021-01-01",
clip=False,
reducer=None,
projection="EPSG:3857",
scale=10,
return_type="hillshade",
):
"""Create 10-m land cover composite based on Dynamic World. The source code is adapted from the following tutorial by Spatial Thoughts:
https://developers.google.com/earth-engine/tutorials/community/introduction-to-dynamic-world-pt-1
Args:
region (ee.Geometry | ee.FeatureCollection): The region of interest.
start_date (str | ee.Date): The start date of the query. Default to "2020-01-01".
end_date (str | ee.Date): The end date of the query. Default to "2021-01-01".
clip (bool, optional): Whether to clip the image to the region. Default to False.
reducer (ee.Reducer, optional): The reducer to be used. Default to None.
projection (str, optional): The projection to be used for creating hillshade. Default to "EPSG:3857".
scale (int, optional): The scale to be used for creating hillshade. Default to 10.
return_type (str, optional): The type of image to be returned. Can be one of 'hillshade', 'visualize', 'class', or 'probability'. Default to "hillshade".
Returns:
ee.Image: The image with the specified return_type.
"""
if return_type not in ["hillshade", "visualize", "class", "probability"]:
raise ValueError(
f"{return_type} must be one of 'hillshade', 'visualize', 'class', or 'probability'."
)
if reducer is None:
reducer = ee.Reducer.mode()
dw = ee.ImageCollection("GOOGLE/DYNAMICWORLD/V1").filter(
ee.Filter.date(start_date, end_date)
)
if isinstance(region, ee.FeatureCollection) or isinstance(region, ee.Geometry):
dw = dw.filterBounds(region)
else:
raise ValueError("region must be an ee.FeatureCollection or ee.Geometry.")
# Create a Mode Composite
classification = dw.select("label")
dwComposite = classification.reduce(reducer)
if clip and (region is not None):
if isinstance(region, ee.Geometry):
dwComposite = dwComposite.clip(region)
elif isinstance(region, ee.FeatureCollection):
dwComposite = dwComposite.clipToCollection(region)
elif isinstance(region, ee.Feature):
dwComposite = dwComposite.clip(region.geometry())
dwVisParams = {
"min": 0,
"max": 8,
"palette": [
"#419BDF",
"#397D49",
"#88B053",
"#7A87C6",
"#E49635",
"#DFC35A",
"#C4281B",
"#A59B8F",
"#B39FE1",
],
}
if return_type == "class":
return dwComposite
elif return_type == "visualize":
return dwComposite.visualize(**dwVisParams)
else:
# Create a Top-1 Probability Hillshade Visualization
probabilityBands = [
"water",
"trees",
"grass",
"flooded_vegetation",
"crops",
"shrub_and_scrub",
"built",
"bare",
"snow_and_ice",
]
# Select probability bands
probabilityCol = dw.select(probabilityBands)
# Create a multi-band image with the average pixel-wise probability
# for each band across the time-period
meanProbability = probabilityCol.reduce(ee.Reducer.mean())
# Composites have a default projection that is not suitable
# for hillshade computation.
# Set a EPSG:3857 projection with 10m scale
proj = ee.Projection(projection).atScale(scale)
meanProbability = meanProbability.setDefaultProjection(proj)
# Create the Top1 Probability Hillshade
top1Probability = meanProbability.reduce(ee.Reducer.max())
if clip and (region is not None):
if isinstance(region, ee.Geometry):
top1Probability = top1Probability.clip(region)
elif isinstance(region, ee.FeatureCollection):
top1Probability = top1Probability.clipToCollection(region)
elif isinstance(region, ee.Feature):
top1Probability = top1Probability.clip(region.geometry())
if return_type == "probability":
return top1Probability
else:
top1Confidence = top1Probability.multiply(100).int()
hillshade = ee.Terrain.hillshade(top1Confidence).divide(255)
rgbImage = dwComposite.visualize(**dwVisParams).divide(255)
probabilityHillshade = rgbImage.multiply(hillshade)
return probabilityHillshade
dynamic_world_s2(region=None, start_date='2020-01-01', end_date='2021-01-01', clip=False, cloud_pct=0.35, reducer=None)
¶
Create Sentinel-2 composite for the Dynamic World Land Cover product.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
region |
ee.Geometry | ee.FeatureCollection |
The region of interest. Default to None. |
None |
start_date |
str | ee.Date |
The start date of the query. Default to "2020-01-01". |
'2020-01-01' |
end_date |
str | ee.Date |
The end date of the query. Default to "2021-01-01". |
'2021-01-01' |
clip |
bool |
Whether to clip the image to the region. Default to False. |
False |
cloud_pct |
float |
The percentage of cloud cover to be used for filtering. Default to 0.35. |
0.35 |
reducer |
ee.Reducer |
The reducer to be used for creating image composite. Default to None. |
None |
Returns:
Type | Description |
---|---|
ee.Image |
The Sentinel-2 composite. |
Source code in geemap/common.py
def dynamic_world_s2(
region=None,
start_date="2020-01-01",
end_date="2021-01-01",
clip=False,
cloud_pct=0.35,
reducer=None,
):
"""Create Sentinel-2 composite for the Dynamic World Land Cover product.
Args:
region (ee.Geometry | ee.FeatureCollection): The region of interest. Default to None.
start_date (str | ee.Date): The start date of the query. Default to "2020-01-01".
end_date (str | ee.Date): The end date of the query. Default to "2021-01-01".
clip (bool, optional): Whether to clip the image to the region. Default to False.
cloud_pct (float, optional): The percentage of cloud cover to be used for filtering. Default to 0.35.
reducer (ee.Reducer, optional): The reducer to be used for creating image composite. Default to None.
Returns:
ee.Image: The Sentinel-2 composite.
"""
s2 = (
ee.ImageCollection("COPERNICUS/S2_HARMONIZED")
.filterDate(start_date, end_date)
.filter(ee.Filter.lt("CLOUDY_PIXEL_PERCENTAGE", cloud_pct * 100))
)
if isinstance(region, ee.FeatureCollection) or isinstance(region, ee.Geometry):
s2 = s2.filterBounds(region)
else:
raise ValueError("region must be an ee.FeatureCollection or ee.Geometry.")
if reducer is None:
reducer = ee.Reducer.median()
image = s2.reduce(reducer).rename(s2.first().bandNames())
if clip and (region is not None):
if isinstance(region, ee.Geometry):
image = image.clip(region)
elif isinstance(region, ee.FeatureCollection):
image = image.clipToCollection(region)
return image
edit_download_html(htmlWidget, filename, title='Click here to download: ')
¶
Downloads a file from voila. Adopted from https://github.com/voila-dashboards/voila/issues/578#issuecomment-617668058
Parameters:
Name | Type | Description | Default |
---|---|---|---|
htmlWidget |
object |
The HTML widget to display the URL. |
required |
filename |
str |
File path to download. |
required |
title |
str |
Download description. Defaults to "Click here to download: ". |
'Click here to download: ' |
Source code in geemap/common.py
def edit_download_html(htmlWidget, filename, title="Click here to download: "):
"""Downloads a file from voila. Adopted from https://github.com/voila-dashboards/voila/issues/578#issuecomment-617668058
Args:
htmlWidget (object): The HTML widget to display the URL.
filename (str): File path to download.
title (str, optional): Download description. Defaults to "Click here to download: ".
"""
# from IPython.display import HTML
# import ipywidgets as widgets
import base64
# Change widget html temporarily to a font-awesome spinner
htmlWidget.value = '<i class="fa fa-spinner fa-spin fa-2x fa-fw"></i><span class="sr-only">Loading...</span>'
# Process raw data
data = open(filename, "rb").read()
b64 = base64.b64encode(data)
payload = b64.decode()
basename = os.path.basename(filename)
# Create and assign html to widget
html = '<a download="{filename}" href="data:text/csv;base64,{payload}" target="_blank">{title}</a>'
htmlWidget.value = html.format(
payload=payload, title=title + basename, filename=basename
)
# htmlWidget = widgets.HTML(value = '')
# htmlWidget
ee_api_to_csv(outfile=None, timeout=300, proxies=None)
¶
Extracts Earth Engine API documentation from https://developers.google.com/earth-engine/api_docs as a csv file.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
outfile |
str |
The output file path to a csv file. Defaults to None. |
None |
timeout |
int |
Timeout in seconds. Defaults to 300. |
300 |
proxies |
dict |
Proxy settings. Defaults to None. |
None |
Source code in geemap/common.py
def ee_api_to_csv(outfile=None, timeout=300, proxies=None):
"""Extracts Earth Engine API documentation from https://developers.google.com/earth-engine/api_docs as a csv file.
Args:
outfile (str, optional): The output file path to a csv file. Defaults to None.
timeout (int, optional): Timeout in seconds. Defaults to 300.
proxies (dict, optional): Proxy settings. Defaults to None.
"""
import pkg_resources
from bs4 import BeautifulSoup
pkg_dir = os.path.dirname(pkg_resources.resource_filename("geemap", "geemap.py"))
data_dir = os.path.join(pkg_dir, "data")
template_dir = os.path.join(data_dir, "template")
csv_file = os.path.join(template_dir, "ee_api_docs.csv")
if outfile is None:
outfile = csv_file
else:
if not outfile.endswith(".csv"):
print("The output file must end with .csv")
return
else:
out_dir = os.path.dirname(outfile)
if not os.path.exists(out_dir):
os.makedirs(out_dir)
url = "https://developers.google.com/earth-engine/api_docs"
try:
r = requests.get(url, timeout=timeout, proxies=proxies)
soup = BeautifulSoup(r.content, "html.parser")
names = []
descriptions = []
functions = []
returns = []
arguments = []
types = []
details = []
names = [h2.text for h2 in soup.find_all("h2")]
descriptions = [h2.next_sibling.next_sibling.text for h2 in soup.find_all("h2")]
func_tables = soup.find_all("table", class_="blue")
functions = [func_table.find("code").text for func_table in func_tables]
returns = [func_table.find_all("td")[1].text for func_table in func_tables]
detail_tables = []
tables = soup.find_all("table", class_="blue")
for table in tables:
item = table.next_sibling
if item.attrs == {"class": ["details"]}:
detail_tables.append(item)
else:
detail_tables.append("")
for detail_table in detail_tables:
if detail_table != "":
items = [item.text for item in detail_table.find_all("code")]
else:
items = ""
arguments.append(items)
for detail_table in detail_tables:
if detail_table != "":
items = [item.text for item in detail_table.find_all("td")]
items = items[1::3]
else:
items = ""
types.append(items)
for detail_table in detail_tables:
if detail_table != "":
items = [item.text for item in detail_table.find_all("p")]
else:
items = ""
details.append(items)
with open(outfile, "w", encoding="utf-8") as csv_file:
csv_writer = csv.writer(csv_file, delimiter="\t")
csv_writer.writerow(
[
"name",
"description",
"function",
"returns",
"argument",
"type",
"details",
]
)
for i in range(len(names)):
name = names[i]
description = descriptions[i]
function = functions[i]
return_type = returns[i]
argument = "|".join(arguments[i])
argu_type = "|".join(types[i])
detail = "|".join(details[i])
csv_writer.writerow(
[
name,
description,
function,
return_type,
argument,
argu_type,
detail,
]
)
except Exception as e:
print(e)
ee_data_html(asset)
¶
Generates HTML from an asset to be used in the HTML widget.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
asset |
dict |
A dictionary containing an Earth Engine asset. |
required |
Returns:
Type | Description |
---|---|
str |
A string containing HTML. |
Source code in geemap/common.py
def ee_data_html(asset):
"""Generates HTML from an asset to be used in the HTML widget.
Args:
asset (dict): A dictionary containing an Earth Engine asset.
Returns:
str: A string containing HTML.
"""
try:
asset_title = asset.get("title", "Unknown")
asset_dates = asset.get("dates", "Unknown")
ee_id_snippet = asset.get("id", "Unknown")
asset_uid = asset.get("uid", None)
asset_url = asset.get("asset_url", "")
code_url = asset.get("sample_code", None)
thumbnail_url = asset.get("thumbnail_url", None)
asset_type = asset.get("type", "Unknown")
if asset_type == "image":
ee_id_snippet = "ee.Image('{}')".format(ee_id_snippet)
elif asset_type == "image_collection":
ee_id_snippet = "ee.ImageCollection('{}')".format(ee_id_snippet)
elif asset_type == "table":
ee_id_snippet = "ee.FeatureCollection('{}')".format(ee_id_snippet)
if not code_url and asset_uid:
coder_url = f"""https://code.earthengine.google.com/?scriptPath=Examples%3ADatasets%2F{asset_uid}"""
else:
coder_url = code_url
## ee datasets always have a asset_url, and should have a thumbnail
catalog = (
bool(asset_url)
* f"""
<h4>Data Catalog</h4>
<p style="margin-left: 40px"><a href="{asset_url.replace('terms-of-use','description')}" target="_blank">Description</a></p>
<p style="margin-left: 40px"><a href="{asset_url.replace('terms-of-use','bands')}" target="_blank">Bands</a></p>
<p style="margin-left: 40px"><a href="{asset_url.replace('terms-of-use','image-properties')}" target="_blank">Properties</a></p>
<p style="margin-left: 40px"><a href="{coder_url}" target="_blank">Example</a></p>
"""
)
thumbnail = (
bool(thumbnail_url)
* f"""
<h4>Dataset Thumbnail</h4>
<img src="{thumbnail_url}">
"""
)
## only community datasets have a code_url
alternative = (
bool(code_url)
* f"""
<h4>Community Catalog</h4>
<p style="margin-left: 40px">{asset.get('provider','Provider unknown')}</p>
<p style="margin-left: 40px">{asset.get('tags','Tags unknown')}</p>
<p style="margin-left: 40px"><a href="{coder_url}" target="_blank">Example</a></p>
"""
)
template = f"""
<html>
<body>
<h3>{asset_title}</h3>
<h4>Dataset Availability</h4>
<p style="margin-left: 40px">{asset_dates}</p>
<h4>Earth Engine Snippet</h4>
<p style="margin-left: 40px">{ee_id_snippet}</p>
{catalog}
{alternative}
{thumbnail}
</body>
</html>
"""
return template
except Exception as e:
print(e)
ee_data_thumbnail(asset_id, timeout=300, proxies=None)
¶
Retrieves the thumbnail URL of an Earth Engine asset.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
asset_id |
str |
An Earth Engine asset id. |
required |
timeout |
int |
Timeout in seconds. Defaults to 300. |
300 |
proxies |
dict |
Proxy settings. Defaults to None. |
None |
Returns:
Type | Description |
---|---|
str |
An http url of the thumbnail. |
Source code in geemap/common.py
def ee_data_thumbnail(asset_id, timeout=300, proxies=None):
"""Retrieves the thumbnail URL of an Earth Engine asset.
Args:
asset_id (str): An Earth Engine asset id.
timeout (int, optional): Timeout in seconds. Defaults to 300.
proxies (dict, optional): Proxy settings. Defaults to None.
Returns:
str: An http url of the thumbnail.
"""
import urllib
from bs4 import BeautifulSoup
asset_uid = asset_id.replace("/", "_")
asset_url = "https://developers.google.com/earth-engine/datasets/catalog/{}".format(
asset_uid
)
thumbnail_url = "https://mw1.google.com/ges/dd/images/{}_sample.png".format(
asset_uid
)
r = requests.get(thumbnail_url, timeout=timeout, proxies=proxies)
try:
if r.status_code != 200:
html_page = urllib.request.urlopen(asset_url)
soup = BeautifulSoup(html_page, features="html.parser")
for img in soup.findAll("img"):
if "sample.png" in img.get("src"):
thumbnail_url = img.get("src")
return thumbnail_url
return thumbnail_url
except Exception as e:
print(e)
ee_export_geojson(ee_object, filename=None, selectors=None, timeout=300, proxies=None)
¶
Exports Earth Engine FeatureCollection to geojson.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
ee_object |
object |
ee.FeatureCollection to export. |
required |
filename |
str |
Output file name. Defaults to None. |
None |
selectors |
list |
A list of attributes to export. Defaults to None. |
None |
timeout |
int |
Timeout in seconds. Defaults to 300 seconds. |
300 |
proxies |
dict |
Proxy settings. Defaults to None. |
None |
Source code in geemap/common.py
def ee_export_geojson(
ee_object, filename=None, selectors=None, timeout=300, proxies=None
):
"""Exports Earth Engine FeatureCollection to geojson.
Args:
ee_object (object): ee.FeatureCollection to export.
filename (str): Output file name. Defaults to None.
selectors (list, optional): A list of attributes to export. Defaults to None.
timeout (int, optional): Timeout in seconds. Defaults to 300 seconds.
proxies (dict, optional): Proxy settings. Defaults to None.
"""
if not isinstance(ee_object, ee.FeatureCollection):
print("The ee_object must be an ee.FeatureCollection.")
return
if filename is None:
out_dir = os.path.join(os.path.expanduser("~"), "Downloads")
filename = os.path.join(out_dir, random_string(6) + ".geojson")
allowed_formats = ["geojson"]
filename = os.path.abspath(filename)
basename = os.path.basename(filename)
name = os.path.splitext(basename)[0]
filetype = os.path.splitext(basename)[1][1:].lower()
if not (filetype.lower() in allowed_formats):
print("The output file type must be geojson.")
return
if selectors is None:
selectors = ee_object.first().propertyNames().getInfo()
selectors = [".geo"] + selectors
elif not isinstance(selectors, list):
print("selectors must be a list, such as ['attribute1', 'attribute2']")
return
else:
allowed_attributes = ee_object.first().propertyNames().getInfo()
for attribute in selectors:
if not (attribute in allowed_attributes):
print(
"Attributes must be one chosen from: {} ".format(
", ".join(allowed_attributes)
)
)
return
try:
# print('Generating URL ...')
url = ee_object.getDownloadURL(
filetype=filetype, selectors=selectors, filename=name
)
# print('Downloading data from {}\nPlease wait ...'.format(url))
r = None
r = requests.get(url, stream=True, timeout=timeout, proxies=proxies)
if r.status_code != 200:
print("An error occurred while downloading. \n Retrying ...")
try:
new_ee_object = ee_object.map(filter_polygons)
print("Generating URL ...")
url = new_ee_object.getDownloadURL(
filetype=filetype, selectors=selectors, filename=name
)
print(f"Downloading data from {url}\nPlease wait ...")
r = requests.get(url, stream=True, timeout=timeout, proxies=proxies)
except Exception as e:
print(e)
with open(filename, "wb") as fd:
for chunk in r.iter_content(chunk_