Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion autotest/test_binaryfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,7 +275,7 @@ def test_load_binary_head_file(example_data_path):

def test_plot_binary_head_file(example_data_path):
hf = HeadFile(example_data_path / "freyberg" / "freyberg.githds")
hf.mg.set_coord_info(xoff=1000.0, yoff=200.0, angrot=15.0)
hf.modelgrid.set_coord_info(xoff=1000.0, yoff=200.0, angrot=15.0)

assert isinstance(hf.plot(), Axes)
plt.close()
Expand Down
2 changes: 1 addition & 1 deletion autotest/test_formattedfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def test_headfile_build_index(example_data_path):
def test_formattedfile_reference(example_data_path):
h = FormattedHeadFile(example_data_path / "mf2005_test" / "test1tr.githds")
assert isinstance(h, FormattedHeadFile)
h.mg.set_coord_info(xoff=1000.0, yoff=200.0, angrot=15.0)
h.modelgrid.set_coord_info(xoff=1000.0, yoff=200.0, angrot=15.0)

assert isinstance(h.plot(masked_values=[6999.000]), Axes)
plt.close()
Expand Down
24 changes: 8 additions & 16 deletions autotest/test_modpathfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -306,11 +306,10 @@ def test_get_destination_endpoint_data(
)


@pytest.mark.parametrize("longfieldname", [True, False])
@requires_exe("mf6", "mp7")
@requires_pkg("pyshp", "shapely", name_map={"pyshp": "shapefile"})
def test_write_shapefile(function_tmpdir, mp7_small, longfieldname):
from shapefile import Reader
@requires_pkg("geopandas", "shapely")
def test_write_shapefile(function_tmpdir, mp7_small):
import geopandas as gpd

# setup and run model, then copy outputs to function_tmpdir
sim, forward_model_name, _, _, _ = mp7_small
Expand All @@ -330,13 +329,7 @@ def test_write_shapefile(function_tmpdir, mp7_small, longfieldname):
# define shapefile path
shp_file = ws / "pathlines.shp"

# add a column to the pathline recarray
fieldname = "newfield" + ("longname" if longfieldname else "")
fieldval = "x"
pathlines = [
rfn.append_fields(pl, fieldname, list(repeat(fieldval, len(pl))), dtypes="|S1")
for pl in pathlines
]
pline_names = [name[:10] for name in pathlines[0].dtype.names]

# write the pathline recarray to shapefile
pathline_file.write_shapefile(
Expand All @@ -350,8 +343,7 @@ def test_write_shapefile(function_tmpdir, mp7_small, longfieldname):
assert shp_file.is_file()

# load shapefile
with Reader(shp_file) as reader:
fieldnames = [f[0] for f in reader.fields[1:]]
fieldname = "newfiname_" if longfieldname else fieldname
assert fieldname in fieldnames
assert all(r[fieldname] == fieldval for r in reader.iterRecords())
gdf = gpd.read_file(shp_file)
fieldnames = list(gdf)
for fname in pline_names:
assert fname in fieldnames
33 changes: 33 additions & 0 deletions flopy/mbase.py
Original file line number Diff line number Diff line change
Expand Up @@ -759,6 +759,39 @@ def _output_msg(self, i, add=True):
f"{txt2} the output list."
)

def to_geo_dataframe(self, gdf=None, kper=0):
"""
Method to build a Geodataframe from model inputs. Note: transient data
will only be exported for a single stress period.

Parameters
----------
gdf : GeoDataFrame
optional geopandas geodataframe object to add data to. Default is None
kper : int
stress period to get transient data from

Returns
-------
gdf : GeoDataFrame
"""
if gdf is None:
modelgrid = self.modelgrid
if modelgrid is not None:
gdf = modelgrid.geo_dataframe
else:
raise AttributeError(
"model does not have a grid instance, please supply a geodataframe"
)

for package in self.packagelist:
if package.package_type in ("hfb6",):
continue
if callable(getattr(package, "to_geo_dataframe", None)):
gdf = package.to_geo_dataframe(gdf, kper=kper, sparse=False)

return gdf

def add_output_file(
self,
unit,
Expand Down
102 changes: 102 additions & 0 deletions flopy/mf6/data/mfdataarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -323,6 +323,64 @@ def data(self):
"""Returns array data. Calls get_data with default parameters."""
return self._get_data()

def to_geo_dataframe(self, gdf=None, name=None, forgive=False, **kwargs):
"""
Method to add an input array to a geopandas GeoDataFrame

Parameters
----------
gdf : GeoDataFrame
optional GeoDataFrame object
name : str
optional attribute name, default uses util2d name
forgive : bool
optional flag to continue if data shape not compatible with GeoDataFrame

Returns
-------
geopandas GeoDataFrame
"""
if self.model is None:
return gdf
else:
modelgrid = self.model.modelgrid
if gdf is None:
if modelgrid is None:
return gdf
gdf = modelgrid.geo_dataframe

if modelgrid is not None:
if modelgrid.grid_type != "unstructured":
ncpl = modelgrid.ncpl
else:
ncpl = modelgrid.nnodes
else:
ncpl = len(gdf)

if name is None:
name = self.name

data = self.array
if data is None:
return gdf

if data.size == ncpl:
gdf[name] = data.ravel()

elif data.size % ncpl == 0:
data = data.reshape((-1, ncpl))
for ix, arr in enumerate(data):
aname = f"{name}_{ix}"
gdf[aname] = arr
elif forgive:
return gdf
else:
raise ValueError(
f"Data size {data.size} not compatible with dataframe length {ncpl}"
)

return gdf

def new_simulation(self, sim_data):
"""Initialize MFArray object for a new simulation

Expand Down Expand Up @@ -1890,6 +1948,50 @@ def _build_period_data(
output[sp] = data
return output

def to_geo_dataframe(self, gdf=None, kper=0, forgive=False, **kwargs):
"""

"""
if self.model is None:
return gdf
else:
modelgrid = self.model.modelgrid
if gdf is None:
if modelgrid is None:
return gdf
gdf = modelgrid.geo_dataframe

if modelgrid is not None:
if modelgrid.grid_type != "unstructured":
ncpl = modelgrid.ncpl
else:
ncpl = modelgrid.nnodes
else:
ncpl = len(gdf)

if self.array is None:
return gdf

name = f"{self.path[1]}_{self.name}"

data = self.get_data(key=kper, apply_mult=True)
if data.size == ncpl:
gdf[name] = data.ravel()

elif data.size % ncpl == 0:
data = data.reshape((-1, ncpl))
for ix, arr in enumerate(data):
aname = f"{name}_{ix}"
gdf[aname] = arr
elif forgive:
return gdf
else:
raise ValueError(
f"Data size {data.size} not compatible with dataframe length {ncpl}"
)

return gdf

def set_record(self, data_record):
"""Sets data and metadata at layer `layer` and time `key` to
`data_record`. For unlayered data do not pass in `layer`.
Expand Down
100 changes: 100 additions & 0 deletions flopy/mf6/data/mfdatalist.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,6 +145,55 @@ def to_array(self, kper=0, mask=False):
model_grid = self.data_dimensions.get_model_grid()
return list_to_array(sarr, model_grid, kper, mask)

def to_geo_dataframe(self, gdf=None, sparse=False, **kwargs):
"""
Method to add data to a GeoDataFrame for exporting as a geospatial file

Parameters
----------
gdf : GeoDataFrame
optional GeoDataFrame instance. If GeoDataFrame is None, one will be
constructed from modelgrid information
sparse : bool
boolean flag for sparse dataframe construction. Default is False

Returns
-------
GeoDataFrame
"""
if self.model is None:
return gdf
else:
modelgrid = self.model.modelgrid
if modelgrid is None:
return gdf

if gdf is None:
gdf = modelgrid.geo_dataframe

data = self.to_array(mask=True)
if data is None:
return gdf

col_names = []
for name, array3d in data.items():
aname = f"{self.path[1].lower()}_{name}"
if modelgrid.grid_type == "unstructured":
array = array3d.ravel()
gdf[aname] = array
col_names.append(aname)
else:
for lay in range(modelgrid.nlay):
arr = array3d[lay].ravel()
gdf[f"{aname}_{lay}"] = arr.ravel()
col_names.append(f"{aname}_{lay}")

if sparse:
gdf = gdf.dropna(subset=col_names, how="all")
gdf = gdf.dropna(axis="columns", how="all")

return gdf

def new_simulation(self, sim_data):
"""Initialize MFList object for a new simulation.

Expand Down Expand Up @@ -1596,6 +1645,57 @@ def to_array(self, kper=0, mask=False):
"""Returns list data as an array."""
return super().to_array(kper, mask)

def to_geo_dataframe(self, gdf=None, kper=0, sparse=False, **kwargs):
"""
Method to add data to a GeoDataFrame for exporting as a geospatial file

Parameters
----------
gdf : GeoDataFrame
optional GeoDataFrame instance. If GeoDataFrame is None, one will be
constructed from modelgrid information
kper : int
stress period to export
sparse : bool
boolean flag for sparse dataframe construction. Default is False

Returns
-------
GeoDataFrame
"""
if self.model is None:
return gdf
else:
modelgrid = self.model.modelgrid
if modelgrid is None:
return gdf

if gdf is None:
gdf = modelgrid.geo_dataframe

data = self.to_array(kper=kper, mask=True)
if data is None:
return gdf

col_names = []
for name, array3d in data.items():
aname = f"{self.path[1].lower()}_{name}"
if modelgrid.grid_type == "unstructured":
array = array3d.ravel()
gdf[aname] = array
col_names.append(aname)
else:
for lay in range(modelgrid.nlay):
arr = array3d[lay].ravel()
gdf[f"{aname}_{lay}"] = arr.ravel()
col_names.append(f"{aname}_{lay}")

if sparse:
gdf = gdf.dropna(subset=col_names, how="all")
gdf = gdf.dropna(axis="columns", how="all")

return gdf

def remove_transient_key(self, transient_key):
"""Remove transient stress period key. Method is used
internally by FloPy and is not intended to the end user.
Expand Down
Loading
Loading