Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Archive] replace column 'bbox' with 'geometry' #296

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions environment-dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ dependencies:
- pillow
- lxml
- packaging
- libspatialite>=5.1.0
- coveralls
- coverage
- pytest
1 change: 1 addition & 0 deletions environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,3 +17,4 @@ dependencies:
- pillow
- lxml
- packaging
- libspatialite>=5.1.0
40 changes: 24 additions & 16 deletions pyroSAR/drivers.py
Original file line number Diff line number Diff line change
Expand Up @@ -2285,10 +2285,14 @@ class Archive(object):
def __init__(self, dbfile, custom_fields=None, postgres=False, user='postgres',
password='1234', host='localhost', port=5432, cleanup=True,
legacy=False):

if dbfile.endswith('.csv'):
raise RuntimeError("Please create a new Archive database and import the"
"CSV file using db.import_outdated('<file>.csv').")
# check for driver, if postgres then check if server is reachable
if not postgres:
self.driver = 'sqlite'
dirname = os.path.dirname(dbfile)
dirname = os.path.dirname(os.path.abspath(dbfile))
w_ok = os.access(dirname, os.W_OK)
if not w_ok:
raise RuntimeError('cannot write to directory {}'.format(dirname))
Expand Down Expand Up @@ -2328,7 +2332,7 @@ def __init__(self, dbfile, custom_fields=None, postgres=False, user='postgres',
self.engine = create_engine(url=self.url, echo=False,
connect_args=connect_args)

# call to ____load_spatialite() for sqlite, to load mod_spatialite via event handler listen()
# call to __load_spatialite() for sqlite, to load mod_spatialite via event handler listen()
if self.driver == 'sqlite':
log.debug('loading spatialite extension')
listen(target=self.engine, identifier='connect', fn=self.__load_spatialite)
Expand Down Expand Up @@ -2362,11 +2366,15 @@ def __init__(self, dbfile, custom_fields=None, postgres=False, user='postgres',
self.__init_data_table()
self.__init_duplicates_table()

msg = ("the 'data' table is missing {}. Please create a new database "
"and import the old one opened in legacy mode using "
"Archive.import_outdated.")
pk = sql_inspect(self.data_schema).primary_key
if 'product' not in pk.columns.keys() and not legacy:
raise RuntimeError("the 'data' table is missing a primary key 'product'. "
"Please create a new database and import the old one "
"opened in legacy mode using Archive.import_outdated.")
raise RuntimeError(msg.format("a primary key 'product'"))

if 'geometry' not in self.get_colnames() and not legacy:
raise RuntimeError(msg.format("the 'geometry' column"))

self.Base = automap_base(metadata=self.meta)
self.Base.prepare(self.engine, reflect=True)
Expand All @@ -2387,7 +2395,7 @@ def add_tables(self, tables):
.. note::

Columns using Geometry must have setting management=True for SQLite,
for example: ``bbox = Column(Geometry('POLYGON', management=True, srid=4326))``
for example: ``geometry = Column(Geometry('POLYGON', management=True, srid=4326))``

Parameters
----------
Expand Down Expand Up @@ -2437,8 +2445,8 @@ def __init_data_table(self):
Column('vv', Integer),
Column('hv', Integer),
Column('vh', Integer),
Column('bbox', Geometry(geometry_type='POLYGON',
management=True, srid=4326)))
Column('geometry', Geometry(geometry_type='POLYGON',
management=True, srid=4326)))
# add custom fields
if self.custom_fields is not None:
for key, val in self.custom_fields.items():
Expand Down Expand Up @@ -2513,13 +2521,13 @@ def __prepare_insertion(self, scene):
insertion = self.Data()
colnames = self.get_colnames()
for attribute in colnames:
if attribute == 'bbox':
geom = id.bbox()
if attribute == 'geometry':
geom = id.geometry()
geom.reproject(4326)
geom = geom.convert2wkt(set3D=False)[0]
geom = 'SRID=4326;' + str(geom)
# set attributes of the Data object according to input
setattr(insertion, 'bbox', geom)
setattr(insertion, 'geometry', geom)
elif attribute in ['hh', 'vv', 'hv', 'vh']:
setattr(insertion, attribute, int(attribute in pols))
else:
Expand Down Expand Up @@ -2873,9 +2881,9 @@ def import_outdated(self, dbfile):
scenes.append(row['scene'])
self.insert(scenes)
elif isinstance(dbfile, Archive):
select = dbfile.conn.execute('SELECT * from data')
self.conn.execute(insert(self.Data).values(*select))
# duplicates in older databases may fit into the new data table
scenes = dbfile.conn.execute('SELECT scene from data')
scenes = [s.scene for s in scenes]
self.insert(scenes)
reinsert = dbfile.select_duplicates(value='scene')
if reinsert is not None:
self.insert(reinsert)
Expand Down Expand Up @@ -3030,11 +3038,11 @@ def select(self, vectorobject=None, mindate=None, maxdate=None, date_strict=True
site_geom = vec.convert2wkt(set3D=False)[0]
# postgres has a different way to store geometries
if self.driver == 'postgresql':
arg_format.append("st_intersects(bbox, 'SRID=4326; {}')".format(
arg_format.append("st_intersects(geometry, 'SRID=4326; {}')".format(
site_geom
))
else:
arg_format.append('st_intersects(GeomFromText(?, 4326), bbox) = 1')
arg_format.append('st_intersects(GeomFromText(?, 4326), geometry) = 1')
vals.append(site_geom)
else:
log.info('WARNING: argument vectorobject is ignored, must be of type spatialist.vector.Vector')
Expand Down
3 changes: 2 additions & 1 deletion tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,8 @@ def testdata(testdir):
# used in test_osv
's1_orbit': os.path.join(testdir, 'S1A_IW_GRDH_1SDV_20210119T031653_20210119T031718_036201_043ED0_8255.zip'),
'tif': os.path.join(testdir, 'S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db.tif'),
'archive_old': os.path.join(testdir, 'archive_outdated.csv'),
'archive_old_csv': os.path.join(testdir, 'archive_outdated.csv'),
'archive_old_bbox': os.path.join(testdir, 'archive_outdated_bbox.db'),
'dempar': os.path.join(testdir, 'dem.par'),
'mlipar': os.path.join(testdir, 'mli.par')
}
Expand Down
Binary file added tests/data/archive_outdated_bbox.db
Binary file not shown.
47 changes: 43 additions & 4 deletions tests/test_drivers.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,9 +201,26 @@ def test_archive2(tmpdir, testdata):
os.remove(dbfile)
assert not os.path.isfile(dbfile)
assert Vector(shp).nfeatures == 1

with pytest.raises(OSError):
with pyroSAR.Archive(dbfile) as db:
db.import_outdated(testdata['archive_old'])
db.import_outdated(testdata['archive_old_csv'])

# the archive_old_bbox database contains a relative file name for the scene
# so that it can be reimported into the new database. The working directory
# is changed temporarily so that the scene can be found.
cwd = os.getcwd()
folder = os.path.dirname(os.path.realpath(__file__))
os.chdir(os.path.join(folder, 'data'))
with pyroSAR.Archive(dbfile) as db:
with pyroSAR.Archive(testdata['archive_old_bbox'], legacy=True) as db_old:
db.import_outdated(db_old)
os.chdir(cwd)

with pytest.raises(RuntimeError):
db = pyroSAR.Archive(testdata['archive_old_csv'])
with pytest.raises(RuntimeError):
db = pyroSAR.Archive(testdata['archive_old_bbox'])


def test_archive_postgres(tmpdir, testdata):
Expand Down Expand Up @@ -237,16 +254,38 @@ def test_archive_postgres(tmpdir, testdata):
with pytest.raises(TypeError):
db.filter_scenelist([1])
db.close()
with pyroSAR.Archive('test', postgres=True, port=pgport, user=pguser, password=pgpassword) as db:
with pyroSAR.Archive('test', postgres=True, port=pgport,
user=pguser, password=pgpassword) as db:
assert db.size == (1, 0)
shp = os.path.join(str(tmpdir), 'db.shp')
db.export2shp(shp)
pyroSAR.drop_archive(db)
assert Vector(shp).nfeatures == 1

with pyroSAR.Archive('test', postgres=True, port=pgport, user=pguser, password=pgpassword) as db:
with pyroSAR.Archive('test', postgres=True, port=pgport,
user=pguser, password=pgpassword) as db:
with pytest.raises(OSError):
db.import_outdated(testdata['archive_old'])
db.import_outdated(testdata['archive_old_csv'])
pyroSAR.drop_archive(db)

# the archive_old_bbox database contains a relative file name for the scene
# so that it can be reimported into the new database. The working directory
# is changed temporarily so that the scene can be found.
cwd = os.getcwd()
folder = os.path.dirname(os.path.realpath(__file__))
os.chdir(os.path.join(folder, 'data'))
with pyroSAR.Archive('test', postgres=True, port=pgport,
user=pguser, password=pgpassword) as db:
with pyroSAR.Archive(testdata['archive_old_bbox'], legacy=True) as db_old:
db.import_outdated(db_old)
pyroSAR.drop_archive(db)
os.chdir(cwd)

dbfile = os.path.join(str(tmpdir), 'scenes.db')
with pyroSAR.Archive('test', postgres=True, port=pgport,
user=pguser, password=pgpassword) as db:
with pyroSAR.Archive(dbfile, legacy=True) as db_sqlite:
db.import_outdated(db_sqlite)
pyroSAR.drop_archive(db)

with pytest.raises(SystemExit) as pytest_wrapped_e:
Expand Down