aprs_tool/api_app.py

253 lines
8.7 KiB
Python
Raw Normal View History

2023-06-24 11:17:53 -05:00
from flask import Flask, request, render_template
2023-07-09 22:06:57 -05:00
from flask_restful import Resource, Api, reqparse, url_for
from datetime import date, timedelta
import configparser
2023-04-06 14:34:59 -05:00
import csv
2023-07-09 22:06:57 -05:00
import datetime
2023-08-26 16:05:09 -05:00
import timeago
2023-04-05 16:28:39 -05:00
import ast
import glob
2023-04-16 16:59:09 -05:00
import json, operator
2023-06-24 11:17:53 -05:00
import requests
2023-04-15 13:27:00 -05:00
import sqlite3
2023-04-08 17:07:09 -05:00
api_app = Flask(__name__)
api = Api(api_app)
2023-04-05 16:58:35 -05:00
# TODO this is duplicated from kiss_and_db.py, can I avoid that?
import constants
def read_config():
config = configparser.ConfigParser()
config.read('config.ini')
return config
2023-04-15 13:27:00 -05:00
def dict_factory(cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
d[col[0]] = row[idx]
return d
def get_db_connection():
conn = sqlite3.connect('database.db')
conn.row_factory = dict_factory
return conn
def select_all_frames(conn):
"""
Query all rows in the frames table
:param conn: the Connection object
:return:
"""
cur = conn.cursor()
cur.execute("SELECT * FROM frames")
rows = cur.fetchall()
return rows
2023-07-09 11:22:23 -05:00
def select_all_stations(conn):
"""
Query all rows in the stations table
:param conn: the Connection object
:return:
"""
cur = conn.cursor()
cur.execute("SELECT * FROM stations ORDER BY last_heard_unix DESC")
rows = cur.fetchall()
return rows
2023-06-24 19:06:37 -05:00
def unique_stations(conn):
"""
Query all rows in the frames table
:param conn: the Connection object
:return:
"""
cur = conn.cursor()
cur.execute('SELECT *, MAX(id), COUNT(id) FROM frames GROUP BY "from" ORDER BY MAX(id) DESC')
2023-06-24 19:06:37 -05:00
rows = cur.fetchall()
return rows
2023-06-24 18:44:08 -05:00
def select_frames(conn, n, url_params):
# Should pass this a dict of fields and values (request.args)
# TODO clean data before sending to DB
# Filter out any keys that don't match db fields
# From https://stackoverflow.com/a/20256491
dictfilt = lambda x, y: dict([ (i,x[i]) for i in x if i in set(y) ])
field_where = dictfilt(url_params, constants.db_frames_fields)
# Then loop through fields to create query parts
# From https://stackoverflow.com/a/73512269/2152245
2023-05-13 11:14:21 -05:00
field_where_str = ' AND '.join([f'"{k}" LIKE \'{v}\'' for k,v in field_where.items()])
cur = conn.cursor()
# Workaround to deal with missing value in WHERE
field_where_query = "" if field_where_str == "" else "WHERE "+field_where_str
sql = 'SELECT * FROM frames {field_where_query} ORDER BY id DESC LIMIT {n}'.format(field_where_query=field_where_query, n=n)
print(sql)
cur.execute(sql)
rows = cur.fetchall()
return rows
2024-11-20 10:52:58 -06:00
def select_stations(conn, n):
"""
Query rows in the stations table
:param conn: the Connection object
:return:
"""
cur = conn.cursor()
sql = 'SELECT * FROM stations ORDER BY last_heard_unix DESC LIMIT {n}'.format(n=n)
print(sql)
cur.execute(sql)
rows = cur.fetchall()
return rows
2023-06-24 11:17:53 -05:00
@api_app.route('/')
def index():
# Get list of recent packets using API
# TODO use relative path
#frames = json.loads(requests.get(url_for("packets", _external=True)).text)['data']
#frames = json.loads(requests.get("https://digi.w1cdn.net/aprs_api/packets").text)['data']
frames = json.loads(requests.get(config['Settings']['base_url']+"/packets").text)['data']
2023-08-26 16:05:09 -05:00
for frame in frames:
if frame['created'] != None:
frame['time_ago'] = timeago.format(frame['created_unix'], datetime.datetime.now())
2023-06-24 19:06:37 -05:00
# Play with function to create station list
2023-07-09 22:06:57 -05:00
#stations = select_all_stations(get_db_connection())
#print(url_for("static", filename="test.txt", _external=True))
2023-07-09 22:17:50 -05:00
# this should work: stations = json.loads(requests.get(url_for("stations", _external=True)).text)['data']
#stations = json.loads(requests.get(url_for("stations", _external=True)).text)['data']
#stations = json.loads(requests.get("https://digi.w1cdn.net/aprs_api/stations").text)['data']
stations = json.loads(requests.get(config['Settings']['base_url']+"/stations").text)['data']
2023-07-09 22:06:57 -05:00
# Convert unix time to datetime on the fly because I'm lazy right now
for station in stations:
2023-07-09 22:15:03 -05:00
if station['last_heard_unix'] != None:
2023-07-12 12:34:50 -05:00
station['last_heard'] = datetime.datetime.utcfromtimestamp(station['last_heard_unix'])
2023-08-26 16:05:09 -05:00
station['time_ago'] = timeago.format(station['last_heard_unix'], datetime.datetime.now())
2023-06-24 19:06:37 -05:00
2024-01-15 17:04:13 -06:00
# Map stuff
frames_locs = list(filter(lambda x: x['latitude'] != None, frames))
# Make a GeoJSON
geojs = json.dumps({
"type": "FeatureCollection",
"features":[
{
"type":"Feature",
"geometry": {
"type":"Point",
"coordinates":[frame['longitude'], frame['latitude']],
},
"properties":frame,
} for frame in frames_locs
]
})
2023-06-24 11:17:53 -05:00
return render_template('index.html',
station_call = config['Settings']['station_call'],
station_lat = config['Settings']['station_lat'],
station_lon = config['Settings']['station_lon'],
2023-08-26 16:05:09 -05:00
frames = frames,
2024-01-15 17:04:13 -06:00
stations = stations,
geojs = geojs)
2023-06-24 11:17:53 -05:00
2024-01-15 10:17:02 -06:00
@api_app.route('/map')
def map():
2024-01-15 11:00:56 -06:00
# Get the default list of frames from the API
frames = json.loads(requests.get(config['Settings']['base_url']+"/packets").text)['data']
2024-01-15 17:04:13 -06:00
frames_locs = list(filter(lambda x: x['latitude'] != None, frames))
# Make a GeoJSON
geojs = json.dumps({
"type": "FeatureCollection",
"features":[
{
"type":"Feature",
"geometry": {
"type":"Point",
"coordinates":[frame['longitude'], frame['latitude']],
},
"properties":frame,
} for frame in frames_locs
]
})
2024-01-15 11:00:56 -06:00
2024-01-15 17:04:13 -06:00
# Make markers for all the frames
# id_counter = 0
# markers = ''
# marker_ids = []
# for frame in frames:
# if frame['latitude'] != None:
# # Create unique ID for each marker
# idd = 'frame' + str(id_counter)
# id_counter += 1
# # Create each marker
# markers += "var {idd} = L.marker([{latitude}, {longitude}]);\
# {idd}.addTo(map).bindTooltip('{from_ssid}', permanent=true).openTooltip();".format(idd=idd, latitude=frame['latitude'],\
# longitude=frame['longitude'],
# from_ssid=frame['from'],
# created=frame['created'])
# # Try to make a list of markers for Leaflet, but not working
# marker_ids.append(idd)
2024-01-15 11:00:56 -06:00
return render_template('map.html',
station_lat = config['Settings']['station_lat'],
station_lon = config['Settings']['station_lon'],
2024-01-15 17:04:13 -06:00
station_call = config['Settings']['station_call'],
#markers = markers,
geojs = geojs)
2024-01-15 10:17:02 -06:00
class Packets(Resource):
def get(self):
2023-04-16 19:29:09 -05:00
# Handle arguments that may or may not exist
try:
n = int(request.args.get('n'))
except:
n = 10
2023-04-16 19:29:09 -05:00
2023-04-15 13:27:00 -05:00
conn = get_db_connection()
2023-04-16 19:29:09 -05:00
# Limit to number of records requested
2023-06-24 18:44:08 -05:00
data = select_frames(conn, n = n, url_params = request.args.to_dict())
2023-04-16 16:59:09 -05:00
# Sort by created date, descending (https://stackoverflow.com/a/45266808)
2023-04-25 14:19:29 -05:00
#data.sort(key=operator.itemgetter('created'), reverse=True)
2023-06-24 11:43:06 -05:00
return {'data':data}, 200 # return data and 200 OK code
2023-07-09 22:06:57 -05:00
class Stations(Resource):
def get(self):
# Handle arguments that may or may not exist
try:
n = int(request.args.get('n'))
except:
n = 10
conn = get_db_connection()
# Limit to number of records requested
2024-11-20 10:52:58 -06:00
data = select_stations(conn, n = n)
2023-07-09 22:06:57 -05:00
# Sort by created date, descending (https://stackoverflow.com/a/45266808)
#data.sort(key=operator.itemgetter('created'), reverse=True)
return {'data':data}, 200 # return data and 200 OK code
# Read config
config = read_config()
# Start subprocess to watch KISS connection
2023-05-13 20:59:17 -05:00
import subprocess
#proc = subprocess.Popen(["python3","kiss_and_db.py"])
# Combine under one process https://stackoverflow.com/a/13143013/2152245
proc = subprocess.Popen("exec " + "python3 kiss_and_db.py", stdout=subprocess.PIPE, shell=True)
print("kiss_and_db.py as subprocess pid "+str(proc.pid))
2023-07-09 22:06:57 -05:00
# The packets endpoint
2023-06-24 11:17:53 -05:00
api.add_resource(Packets, '/packets')
2023-07-09 22:06:57 -05:00
# The stations endpoint
api.add_resource(Stations, '/stations')
2023-04-05 16:58:35 -05:00
if __name__ == '__main__':
api_app.run(debug=True, host='0.0.0.0', port=5001) # run our Flask app