Skip to content
Open

0.8.0 #174

Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,5 @@
*.pyc
_dumpster
test.py
pyProm.egg*
pyProm.egg*
docs/_build
3 changes: 3 additions & 0 deletions HISTORY.rst
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
Release History

0.8.0 ( ?????????????? )
++++++++++++++++++++++++

0.7.3 (October, 6, 2020)
++++++++++++++++++++++++
* update to python 3.8 and change tests for compatibility
Expand Down
2 changes: 1 addition & 1 deletion pyprom/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

PyProm: This library includes tools for surface network analysis.
"""
version_info = (0, 7, 3)
version_info = (0, 8, 0)
__name__ = 'pyProm'
__doc__ = 'A python surface network analysis script'
__author__ = 'Marc Howes'
Expand Down
5 changes: 3 additions & 2 deletions pyprom/dataload.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@
import os
import numpy
import logging
import gdal
import osr
from osgeo import gdal
from osgeo import osr

from .lib.datamap import ProjectionDataMap

Expand Down Expand Up @@ -87,6 +87,7 @@ def __init__(self, filename, epsg_alias="WGS84"):
if spatialRef.IsProjected:
# Create target Spatial Reference for converting coordinates.
target = osr.SpatialReference()
target.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)
target.ImportFromEPSG(epsg_code)
transform = osr.CoordinateTransformation(spatialRef, target)
# create a reverse transform for translating back
Expand Down
15 changes: 9 additions & 6 deletions pyprom/domain_map.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
from .lib.containers.summit_domain import SummitDomain
from .lib.logic.basin_saddle_finder import BasinSaddleFinder
from .lib.logic.summit_domain_walk import Walk
from .lib.contexts.manager import FeatureContextManager
from .lib.constants import DOMAIN_EXTENSION
from . import version_info

Expand All @@ -39,12 +40,15 @@ class DomainMap:
required to calculate the surface network.
"""

def __init__(self, data,
def __init__(self,
data,
summits=SummitsContainer([]),
saddles=SaddlesContainer([]),
runoffs=RunoffsContainer([]),
summit_domains=[],
linkers=[]):
linkers=[],
context=None
):
"""
A DomainMap consumes either a :class:`pyprom.lib.datamap.DataMap` object or
a :class:`pyprom.dataload.Loader` child object.
Expand Down Expand Up @@ -73,6 +77,7 @@ def __init__(self, data,
self.runoffs = runoffs
self.linkers = linkers
self.summit_domains = summit_domains
self.context = context
self.extent = 'LL: {}\n LR: {}\n UL: {}\n UR: {}\n'.format(
self.datamap.lower_left,
self.datamap.lower_right,
Expand All @@ -93,10 +98,11 @@ def run(self, sparse=False, superSparse=False, rebuildSaddles=False):
:param bool superSparse: just do feature discovery
:param bool rebuildSaddles: command AnalyzeData to rebuild saddles
"""
# Expunge any existing saddles, runoffs, summits, and linkers
# Expunge any existing saddles, runoffs, summits, and contexts
self.saddles = SaddlesContainer([])
self.summits = SummitsContainer([])
self.runoffs = RunoffsContainer([])
self.context = None
self.linkers = list()
# Find Features
self.summits, self.saddles, self.runoffs =\
Expand Down Expand Up @@ -223,9 +229,6 @@ def to_dict(self):
"""
Returns dict representation of this :class:`DomainMap`

:param bool noWalkPath: exclude
:class:`pyprom.lib.containers.walkpath.WalkPath` from member
:class:`pyprom.lib.containers.linker.Linker`
:return: dict() representation of :class:`DomainMap`
:rtype: dict()
"""
Expand Down
22 changes: 16 additions & 6 deletions pyprom/feature_discovery.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,9 @@
from timeit import default_timer
from datetime import timedelta
from math import floor
from .lib.locations.gridpoint import GridPoint
from .lib.locations.saddle import Saddle
from .lib.locations.summit import Summit
from .lib.locations.runoff import Runoff
from .lib.locations.base_gridpoint import BaseGridPoint
from .lib.containers.saddles import SaddlesContainer
from .lib.containers.summits import SummitsContainer
from .lib.containers.runoffs import RunoffsContainer
Expand All @@ -27,6 +25,7 @@
from .lib.logic.contiguous_neighbors import contiguous_neighbors, touching_neighborhoods
from .lib.logic.shortest_path_by_points import high_perimeter_neighborhood_shortest_path
from .lib.logic.tuple_funcs import highest
from .lib.contexts.manager import FeatureContextManager

from .lib.constants import METERS_TO_FEET

Expand All @@ -50,6 +49,7 @@ def __init__(self, datamap):
self.max_y = self.datamap.max_y
self.max_x = self.datamap.max_x
self.explored = defaultdict(dict)
self.context = FeatureContextManager([], [], [])

def run(self, rebuildSaddles=True):
"""
Expand All @@ -66,7 +66,7 @@ def run(self, rebuildSaddles=True):
"""
_, _, _ = self.analyze()
# Corners also are runoffs.
self.runoffObjects.extend(make_corner_runoffs(self.datamap))
self.runoffObjects.extend(make_corner_runoffs(self.datamap, self.context))

if rebuildSaddles:
self.logger.info("Rebuilding Saddles")
Expand All @@ -93,10 +93,14 @@ def analyze(self):
index = 0
start = default_timer()
then = start
current_x = 0
# Iterate through numpy grid, and keep track of GridPoint coordinates.
while not iterator.finished:
x, y = iterator.multi_index
# core storage is always in metric.
if current_x != x:
del self.explored[current_x]
current_x = x
if self.datamap.unit == "FEET":
self.elevation = float(METERS_TO_FEET * iterator[0])
else:
Expand Down Expand Up @@ -127,10 +131,13 @@ def analyze(self):
for result in results:
if isinstance(result, Summit):
self.summitObjects.append(result)
self.context.add_summit(result)
if isinstance(result, Runoff):
self.runoffObjects.append(result)
self.context.add_saddle(result)
elif isinstance(result, Saddle):
self.saddleObjects.append(result)
self.context.add_saddle(result)
# Reset variables, and go to next gridpoint.
iterator.iternext()
# Free some memory.
Expand Down Expand Up @@ -378,7 +385,7 @@ def edge_feature_analysis(self, x, y, perimeter,
if multipoint:
pts = multipoint.points
# this gets the closest single highPerimeterNeighborhood point to our midpoint
highPerimeterNeighborhoods.append([high_perimeter_neighborhood_shortest_path(mid, pts, highPerimeter, self.datamap)])
highPerimeterNeighborhoods.append((high_perimeter_neighborhood_shortest_path(mid, pts, highPerimeter, self.datamap),))
else:
# just use the regular highPerimeterNeighborhoods if not a multipoint
highPerimeterNeighborhoods = highPerimeter
Expand Down Expand Up @@ -426,7 +433,7 @@ def edge_feature_analysis(self, x, y, perimeter,
returnable_features.append(saddle)
return returnable_features

def make_corner_runoffs(datamap):
def make_corner_runoffs(datamap, ctx):
"""
Dumb function for generating single point corner runoffs.

Expand All @@ -448,4 +455,7 @@ def make_corner_runoffs(datamap):
datamap.get(0, datamap.max_y))
rur.edgePoints.append((0, datamap.max_y, datamap.get(0, datamap.max_y)))

return [rll, rlr, rul, rur]
corners = [rll, rlr, rul, rur]
for corner in corners:
ctx.add_saddle(corner)
return corners
12 changes: 12 additions & 0 deletions pyprom/lib/containers/linker.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,18 @@ def linkers_to_summits_connected_via_saddle(self, excludeSelf=True,
if _linker_ok(linker, skipDisqualified, {}) and
self._help_exclude_self(linker, excludeSelf)]

def linker_other_side_of_saddle(self):
"""
Much faster, but less robust than linkers_to_summits_connected_via_saddle
Uses linker ids.

:return: list of linkers to summits connected to the saddle this
linker links
:rtype: list(:class:`Linker`)
"""

return [x for x in self.saddle.summits if x.id != self.id and not x.disqualified]

def add_to_remote_saddle_and_summit(self, ignoreDuplicates=True):
"""
Safely adds this linker to the remote
Expand Down
10 changes: 9 additions & 1 deletion pyprom/lib/containers/spot_elevation.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,15 @@ def __init__(self, spotElevationList):
"""
super(SpotElevationContainer, self).__init__()
self.points = spotElevationList
self.fast_lookup = {point.id: point for point in self.points}
self.fast_lookup = self.generate_fast_lookup()

def generate_fast_lookup(self):
"""
Produces a fast lookup dict of this Container.

:return: {id: SpotElevation} fast lookup dict.
"""
return {point.id: point for point in self.points}

@property
def lowest(self):
Expand Down
Empty file added pyprom/lib/contexts/__init__.py
Empty file.
26 changes: 26 additions & 0 deletions pyprom/lib/contexts/exceptions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
"""
pyProm: Copyright 2020.

This software is distributed under a license that is described in
the LICENSE file that accompanies it.
"""

class SaddleContextException(Exception):
pass

class SummitContextException(Exception):
pass

class LinkageException(Exception):

def __init__(self, added_saddle, added_summit, recorded_saddle, recorded_summit, duplicate):
self.added_saddle = added_saddle
self.added_summit = added_summit
self.recorded_saddle = recorded_saddle
self.recorded_summit = recorded_summit
self.duplicate = duplicate
msg = lambda x: f"{'OK' if x else 'FAILED'},"
super().__init__(f"added_saddle: {msg(self.added_saddle)} added_summit: {msg(self.added_summit)} recorded_saddle: {msg(self.recorded_saddle)} recorded_summit: {msg(self.recorded_summit)} duplicate: {self.duplicate}")



41 changes: 41 additions & 0 deletions pyprom/lib/contexts/feature_context.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
"""
pyProm: Copyright 2020.

This software is distributed under a license that is described in
the LICENSE file that accompanies it.
"""

from ..util import randomString

class FeatureContext:

def __init__(self, feature, manager, id=None, disabled=False):
"""
A Base Feature Context. Basically sets up id, disabled.
"""

self.manager = manager
self.id = id
if not self.id:
self.id = randomString()
self._feature = feature
self._disabled = disabled

@property
def disabled(self):
return self._disabled

def disable(self):
"""
Disable this feature
"""
self._disabled = True
self.manager.disabled_tracker[self._feature.id] = True

def enable(self):
"""
Enable this feature
"""
self._disabled = False
if self.manager.disabled_tracker.get(self._feature.id):
del self.manager.disabled_tracker[self._feature.id]
Loading