Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,6 @@
.env
.venv
*.geojson
*.egg-info
coords.txt
__pycache__
5 changes: 5 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ dependencies = [
"numpy==1.26.4",
"packaging==24.0",
"pandas==2.2.1",
"psycopg[binary]>=3.2.7",
"pyproj==3.6.1",
"python-dateutil==2.9.0.post0",
"python-dotenv>=1.1.0",
Expand All @@ -35,3 +36,7 @@ dev = [
"pytest>=8.3.5",
"pytest-cov>=6.1.1",
]
[project.scripts]
coord-buffer = "coord_buffer.cli:main"
[tool.uv]
package = true
File renamed without changes.
44 changes: 36 additions & 8 deletions src/cli.py → src/coord_buffer/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,17 @@

import geopandas as gpd

from .config import OUTPUT_FOLDER
from .coords import to_dms_coords, to_wgs84
from .fetcher import fetch_tmas
from .processor import buffer_polygon, create_geojson_files, read_coords
from .utils import logger
from coord_buffer.config import DB_PARAMS, OUTPUT_FOLDER
from coord_buffer.coords import to_dms_coords, to_wgs84
from coord_buffer.fetcher import fetch_tmas
from coord_buffer.processor import (
buffer_polygon,
create_geojson_files,
insert_tmas_to_db,
is_airac_current,
read_coords,
)
from coord_buffer.utils import logger


def parse_args():
Expand All @@ -23,25 +29,47 @@ def parse_args():
parser.add_argument(
"--buffer", type=float, default=0, help="Buffer size in NM (default: 0)"
)
parser.add_argument(
"--check-airac",
type=str,
help="Check if the provided AIRAC date (YYYY-MM-DD) is the latest in the database",
)
return parser.parse_args()


def main():
args = parse_args()
# Create output folder if it doesn't exist

if args.check_airac:
try:
is_current = is_airac_current(DB_PARAMS, args.check_airac)
if is_current:
logger.info(
f"AIRAC date {args.check_airac} is current or newer than the latest in the database"
)
else:
logger.warning(
f"AIRAC date {args.check_airac} is older than the latest in the database"
)
sys.exit(0 if is_current else 1)
except Exception as e:
logger.error(f"Error checking AIRAC date: {e}")
sys.exit(1)

if not os.path.exists(OUTPUT_FOLDER):
logger.info(f"Creating {OUTPUT_FOLDER} folder")
os.makedirs(OUTPUT_FOLDER)

# If no input is provided, fetch TMAs
# If no input is provided, fetch TMAs & save to DB
if args.input_file is None:
logger.info("No input file provided, fetching TMAs")
try:
tmas = fetch_tmas()
gdf = gpd.read_file(BytesIO(tmas))
gdf = to_wgs84(gdf)
create_geojson_files(gdf, OUTPUT_FOLDER)
logger.info(f"TMAs saved to {OUTPUT_FOLDER}")
insert_tmas_to_db(gdf, DB_PARAMS)
logger.info(f"TMAs saved to {OUTPUT_FOLDER} & inserted into database")
except Exception as e:
logger.info(f"Error fetching or processing TMA data: {e}")
sys.exit(1)
Expand Down
7 changes: 7 additions & 0 deletions src/config.py → src/coord_buffer/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,3 +10,10 @@
METRIC_EPSG = 3006
TMA_URL = os.getenv("TMA_URL", "https://daim.lfv.se/geoserver/wfs")
OUTPUT_FOLDER = os.getenv("OUTPUT_FOLDER", "POLYGONES")
DB_PARAMS = {
"dbname": os.getenv("POSTGRES_DB"),
"user": os.getenv("POSTGRES_USER"),
"password": os.getenv("POSTGRES_PASSWORD"),
"host": os.getenv("POSTGRES_HOST"),
"port": os.getenv("5432"),
}
4 changes: 2 additions & 2 deletions src/coords.py → src/coord_buffer/coords.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import re

from .config import DEFAULT_EPSG
from coord_buffer.config import DEFAULT_EPSG


def dms_to_dd_coords(coord):
Expand All @@ -23,7 +23,7 @@ def to_dms_coords(coord):
lat, lon = coord
lat_dir = "N" if lat >= 0 else "S"
lon_dir = "E" if lon >= 0 else "W"
return f"{dd_to_dms(lat)}{lat_dir} {dd_to_dms(lon)}{lon_dir}"
return f"{dd_to_dms(lat)}{lat_dir} 0{dd_to_dms(lon)}{lon_dir}"


def to_wgs84(geo_df):
Expand Down
44 changes: 44 additions & 0 deletions src/coord_buffer/db.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
import psycopg

from coord_buffer.config import DB_PARAMS


def create_tmas_table():
with psycopg.connect(**DB_PARAMS) as conn:
cursor = conn.cursor()
cursor.execute("CREATE EXTENSION IF NOT EXISTS postgis;")
cursor.execute("""
CREATE TABLE tmas (
id SERIAL PRIMARY KEY,
name_of_area VARCHAR(255) NOT NULL,
geometry GEOMETRY(POLYGON, 4326) NOT NULL,
wef DATE NOT NULL,
type_of_area VARCHAR(50),
position_indicator VARCHAR(50),
date_time_of_chg TIMESTAMP,
name_of_operator VARCHAR(255),
origin VARCHAR(50),
location VARCHAR(255),
upper_limit VARCHAR(50),
lower_limit VARCHAR(50),
comment_1 TEXT,
comment_2 TEXT,
quality VARCHAR(50),
crc_id VARCHAR(50),
crc_pos VARCHAR(50),
crc_tot VARCHAR(50),
msid INTEGER,
idnr INTEGER,
mi_style TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT unique_name_wef UNIQUE (name_of_area, wef)
);
CREATE INDEX tmas_geometry_idx ON tmas USING GIST (geometry);
CREATE INDEX tmas_wef_idx ON tmas (wef);
""")
conn.commit()


if __name__ == "__main__":
create_tmas_table()
7 changes: 4 additions & 3 deletions src/fetcher.py → src/coord_buffer/fetcher.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,19 @@
import requests
from tenacity import retry, stop_after_attempt, wait_fixed

from .config import TMA_URL
from coord_buffer.config import DEFAULT_EPSG, TMA_URL


def fetch_tmas(EPSG=3857, url=None):
def fetch_tmas(epsg=DEFAULT_EPSG, url=None):
"""Fetch TMAs from WFS service."""
url = url or TMA_URL
params = {
"service": "WFS",
"version": "1.1.0",
"request": "GetFeature",
"typename": "mais:TMAS,mais:TMAW",
"outputFormat": "application/json",
"srsName": f"EPSG:{EPSG}",
"srsName": f"EPSG:{epsg}",
}

@retry(stop=stop_after_attempt(3), wait=wait_fixed(2))
Expand Down
181 changes: 181 additions & 0 deletions src/coord_buffer/processor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,181 @@
import json
import os
from datetime import datetime

import geopandas as gpd
import psycopg
from shapely.geometry import Polygon

from coord_buffer.config import BUFFER_MULTIPLIER, DB_PARAMS, DEFAULT_EPSG, METRIC_EPSG
from coord_buffer.utils import clean_file_name


def create_geojson_files(geo_df, folder_name):
"""Create individual GeoJSON files for each TMA."""
if not os.path.exists(folder_name):
os.makedirs(folder_name)
for _, row in geo_df.iterrows():
name = clean_file_name(row["NAMEOFAREA"])
if "TMA_" in name:
continue
single_gdf = gpd.GeoDataFrame(
{"NAMEOFAREA": [name], "geometry": [row["geometry"]]},
crs=f"EPSG:{DEFAULT_EPSG}",
)
filename = f"{folder_name}/{name}.geojson"
single_gdf.to_file(filename, driver="GeoJSON")


def buffer_polygon(coords, buffer_size_nm):
"""Buffer a polygon by a distance in nautical miles."""
gdf = gpd.GeoDataFrame(geometry=[Polygon(coords)], crs=f"EPSG:{DEFAULT_EPSG}")
gdf = gdf.to_crs(epsg=METRIC_EPSG)
buffered = gdf.buffer(
distance=buffer_size_nm * BUFFER_MULTIPLIER, single_sided=True, join_style=2
)
return buffered.to_crs(epsg=DEFAULT_EPSG)


def read_coords(filename):
"""Read coordinates from a GeoJSON file."""
with open(filename, "r") as file:
geojson_data = json.load(file)
if not geojson_data.get("features"):
raise ValueError("GeoJSON file has no features")
coords = []
for feature in geojson_data["features"]:
if feature["geometry"]["type"] != "Polygon":
raise ValueError(
f"Unsupported geometry type: {feature['geometry']['type']}"
)
for polygon in feature["geometry"]["coordinates"]:
for coord in polygon:
if not isinstance(coord, list) or len(coord) != 2:
raise ValueError(f"Invalid coordinate format: {coord}")
coords.append(coord)
return coords


def insert_tmas_to_db(geo_df, conn_params=DB_PARAMS):
"""Insert TMA GeoJSON features into PostgreSQL database."""
conn = None
try:
conn = psycopg.connect(**conn_params)
cursor = conn.cursor()

insert_query = """
INSERT INTO tmas (
name_of_area, geometry, wef, type_of_area, position_indicator,
date_time_of_chg, name_of_operator, origin, location,
upper_limit, lower_limit, comment_1, comment_2, quality,
crc_id, crc_pos, crc_tot, msid, idnr, mi_style, updated_at
) VALUES (
%s, ST_SetSRID(ST_GeomFromGeoJSON(%s), 4326), %s, %s, %s, %s, %s, %s, %s,
%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, CURRENT_TIMESTAMP
)
ON CONFLICT (name_of_area, wef)
DO UPDATE SET
geometry = EXCLUDED.geometry,
type_of_area = EXCLUDED.type_of_area,
position_indicator = EXCLUDED.position_indicator,
date_time_of_chg = EXCLUDED.date_time_of_chg,
name_of_operator = EXCLUDED.name_of_operator,
origin = EXCLUDED.origin,
location = EXCLUDED.location,
upper_limit = EXCLUDED.upper_limit,
lower_limit = EXCLUDED.lower_limit,
comment_1 = EXCLUDED.comment_1,
comment_2 = EXCLUDED.comment_2,
quality = EXCLUDED.quality,
crc_id = EXCLUDED.crc_id,
crc_pos = EXCLUDED.crc_pos,
crc_tot = EXCLUDED.crc_tot,
msid = EXCLUDED.msid,
idnr = EXCLUDED.idnr,
mi_style = EXCLUDED.mi_style,
updated_at = CURRENT_TIMESTAMP
"""

for _, row in geo_df.iterrows():
name = clean_file_name(row["NAMEOFAREA"])
if "TMA_" in name:
continue

geometry_series = gpd.GeoSeries(
[row["geometry"]], crs=f"EPSG:{DEFAULT_EPSG}"
)
geometry_json = geometry_series.to_json()
feature_geometry = json.loads(geometry_json)["features"][0]["geometry"]

properties = row.to_dict()
wef = properties.get("WEF")
if wef:
try:
wef = datetime.strptime(wef, "%Y-%m-%d").date()
except ValueError:
wef = None

cursor.execute(
insert_query,
(
name,
json.dumps(feature_geometry),
wef,
properties.get("TYPEOFAREA"),
properties.get("POSITIONINDICATOR"),
properties.get("DATETIMEOFCHG"),
properties.get("NAMEOFOPERATOR"),
properties.get("ORIGIN"),
properties.get("LOCATION"),
properties.get("UPPER"),
properties.get("LOWER"),
properties.get("COMMENT_1"),
properties.get("COMMENT_2"),
properties.get("QUALITY"),
properties.get("CRC_ID"),
properties.get("CRC_POS"),
properties.get("CRC_TOT"),
properties.get("MSID"),
properties.get("IDNR"),
properties.get("MI_STYLE"),
),
)

conn.commit()
cursor.close()
except Exception as e:
if conn:
conn.rollback()
raise RuntimeError(f"Failed to insert TMAs into database: {e}")
finally:
if conn:
conn.close()


def get_latest_airac_date(conn_params):
"""Get the latest AIRAC effective date from the database."""
conn = None
try:
conn = psycopg.connect(**conn_params)
cursor = conn.cursor()
cursor.execute("SELECT MAX(wef) FROM tmas")
latest_wef = cursor.fetchone()[0]
cursor.close()
return latest_wef
except Exception as e:
raise RuntimeError(f"Failed to fetch latest AIRAC date: {e}")
finally:
if conn:
conn.close()


def is_airac_current(conn_params, airac_date):
"""Check if the provided AIRAC date is the latest in the database."""
latest_wef = get_latest_airac_date(conn_params)
if not latest_wef:
return False
try:
airac_date = datetime.strptime(airac_date, "%Y-%m-%d").date()
return airac_date >= latest_wef
except ValueError:
raise ValueError("Invalid AIRAC date format, expected YYYY-MM-DD")
File renamed without changes.
Loading