2023-06-24 11:17:53 -05:00
|
|
|
from flask import Flask, request, render_template
|
2023-07-09 22:06:57 -05:00
|
|
|
from flask_restful import Resource, Api, reqparse, url_for
|
2023-04-05 20:36:19 -05:00
|
|
|
from datetime import date, timedelta
|
|
|
|
import configparser
|
2023-04-06 14:34:59 -05:00
|
|
|
import csv
|
2023-07-09 22:06:57 -05:00
|
|
|
import datetime
|
2023-08-26 16:05:09 -05:00
|
|
|
import timeago
|
2023-04-05 16:28:39 -05:00
|
|
|
import ast
|
2023-04-05 18:21:37 -05:00
|
|
|
import glob
|
2023-04-16 16:59:09 -05:00
|
|
|
import json, operator
|
2023-06-24 11:17:53 -05:00
|
|
|
import requests
|
2023-04-15 13:27:00 -05:00
|
|
|
import sqlite3
|
2023-04-08 17:07:09 -05:00
|
|
|
api_app = Flask(__name__)
|
|
|
|
api = Api(api_app)
|
2023-04-05 16:58:35 -05:00
|
|
|
|
2023-05-13 11:09:41 -05:00
|
|
|
# TODO this is duplicated from kiss_and_db.py, can I avoid that?
|
2023-05-13 17:26:38 -05:00
|
|
|
import constants
|
2023-05-13 11:09:41 -05:00
|
|
|
|
2023-04-05 20:36:19 -05:00
|
|
|
def read_config():
|
|
|
|
config = configparser.ConfigParser()
|
|
|
|
config.read('config.ini')
|
|
|
|
return config
|
|
|
|
|
2023-04-15 13:27:00 -05:00
|
|
|
def dict_factory(cursor, row):
|
|
|
|
d = {}
|
|
|
|
for idx, col in enumerate(cursor.description):
|
|
|
|
d[col[0]] = row[idx]
|
|
|
|
return d
|
|
|
|
|
|
|
|
def get_db_connection():
|
|
|
|
conn = sqlite3.connect('database.db')
|
|
|
|
conn.row_factory = dict_factory
|
|
|
|
return conn
|
|
|
|
|
|
|
|
def select_all_frames(conn):
|
|
|
|
"""
|
|
|
|
Query all rows in the frames table
|
|
|
|
:param conn: the Connection object
|
|
|
|
:return:
|
|
|
|
"""
|
|
|
|
cur = conn.cursor()
|
|
|
|
cur.execute("SELECT * FROM frames")
|
|
|
|
rows = cur.fetchall()
|
|
|
|
return rows
|
|
|
|
|
2023-07-09 11:22:23 -05:00
|
|
|
def select_all_stations(conn):
|
|
|
|
"""
|
|
|
|
Query all rows in the stations table
|
|
|
|
:param conn: the Connection object
|
|
|
|
:return:
|
|
|
|
"""
|
|
|
|
cur = conn.cursor()
|
|
|
|
cur.execute("SELECT * FROM stations ORDER BY last_heard_unix DESC")
|
|
|
|
rows = cur.fetchall()
|
|
|
|
return rows
|
|
|
|
|
2023-06-24 19:06:37 -05:00
|
|
|
def unique_stations(conn):
|
|
|
|
"""
|
|
|
|
Query all rows in the frames table
|
|
|
|
:param conn: the Connection object
|
|
|
|
:return:
|
|
|
|
"""
|
|
|
|
cur = conn.cursor()
|
2023-06-24 21:30:05 -05:00
|
|
|
cur.execute('SELECT *, MAX(id), COUNT(id) FROM frames GROUP BY "from" ORDER BY MAX(id) DESC')
|
2023-06-24 19:06:37 -05:00
|
|
|
rows = cur.fetchall()
|
|
|
|
return rows
|
|
|
|
|
2023-06-24 18:44:08 -05:00
|
|
|
def select_frames(conn, n, url_params):
|
2023-05-13 11:09:41 -05:00
|
|
|
|
|
|
|
# Should pass this a dict of fields and values (request.args)
|
|
|
|
# TODO clean data before sending to DB
|
|
|
|
# Filter out any keys that don't match db fields
|
|
|
|
# From https://stackoverflow.com/a/20256491
|
|
|
|
dictfilt = lambda x, y: dict([ (i,x[i]) for i in x if i in set(y) ])
|
2023-05-13 17:26:38 -05:00
|
|
|
field_where = dictfilt(url_params, constants.db_frames_fields)
|
2023-05-13 11:09:41 -05:00
|
|
|
# Then loop through fields to create query parts
|
|
|
|
# From https://stackoverflow.com/a/73512269/2152245
|
2023-05-13 11:14:21 -05:00
|
|
|
field_where_str = ' AND '.join([f'"{k}" LIKE \'{v}\'' for k,v in field_where.items()])
|
2023-05-13 11:09:41 -05:00
|
|
|
|
2023-04-16 21:04:26 -05:00
|
|
|
cur = conn.cursor()
|
|
|
|
# Workaround to deal with missing value in WHERE
|
2023-05-13 11:09:41 -05:00
|
|
|
field_where_query = "" if field_where_str == "" else "WHERE "+field_where_str
|
|
|
|
sql = 'SELECT * FROM frames {field_where_query} ORDER BY created DESC LIMIT {n}'.format(field_where_query=field_where_query, n=n)
|
2023-04-16 21:04:26 -05:00
|
|
|
print(sql)
|
|
|
|
cur.execute(sql)
|
|
|
|
rows = cur.fetchall()
|
|
|
|
return rows
|
|
|
|
|
2023-06-24 11:17:53 -05:00
|
|
|
@api_app.route('/')
|
|
|
|
def index():
|
|
|
|
|
|
|
|
# Get list of recent packets using API
|
|
|
|
# TODO use relative path
|
2023-08-26 16:47:23 -05:00
|
|
|
#frames = json.loads(requests.get(url_for("packets", _external=True)).text)['data']
|
|
|
|
#frames = json.loads(requests.get("https://digi.w1cdn.net/aprs_api/packets").text)['data']
|
|
|
|
frames = json.loads(requests.get(config['Settings']['base_url']+"/packets").text)['data']
|
2023-08-26 16:05:09 -05:00
|
|
|
for frame in frames:
|
|
|
|
if frame['created'] != None:
|
|
|
|
frame['time_ago'] = timeago.format(frame['created_unix'], datetime.datetime.now())
|
|
|
|
|
2023-06-24 19:06:37 -05:00
|
|
|
|
|
|
|
# Play with function to create station list
|
2023-07-09 22:06:57 -05:00
|
|
|
#stations = select_all_stations(get_db_connection())
|
|
|
|
#print(url_for("static", filename="test.txt", _external=True))
|
2023-07-09 22:17:50 -05:00
|
|
|
# this should work: stations = json.loads(requests.get(url_for("stations", _external=True)).text)['data']
|
2023-08-26 16:47:23 -05:00
|
|
|
#stations = json.loads(requests.get(url_for("stations", _external=True)).text)['data']
|
|
|
|
#stations = json.loads(requests.get("https://digi.w1cdn.net/aprs_api/stations").text)['data']
|
|
|
|
stations = json.loads(requests.get(config['Settings']['base_url']+"/stations").text)['data']
|
2023-07-09 22:06:57 -05:00
|
|
|
# Convert unix time to datetime on the fly because I'm lazy right now
|
|
|
|
for station in stations:
|
2023-07-09 22:15:03 -05:00
|
|
|
if station['last_heard_unix'] != None:
|
2023-07-12 12:34:50 -05:00
|
|
|
station['last_heard'] = datetime.datetime.utcfromtimestamp(station['last_heard_unix'])
|
2023-08-26 16:05:09 -05:00
|
|
|
station['time_ago'] = timeago.format(station['last_heard_unix'], datetime.datetime.now())
|
2023-06-24 19:06:37 -05:00
|
|
|
|
2023-08-26 16:23:43 -05:00
|
|
|
|
2023-06-24 11:17:53 -05:00
|
|
|
return render_template('index.html',
|
|
|
|
station_call = config['Settings']['station_call'],
|
|
|
|
station_lat = config['Settings']['station_lat'],
|
|
|
|
station_lon = config['Settings']['station_lon'],
|
2023-08-26 16:05:09 -05:00
|
|
|
frames = frames,
|
2023-06-24 19:06:37 -05:00
|
|
|
stations = stations)
|
2023-06-24 11:17:53 -05:00
|
|
|
|
2024-01-15 10:17:02 -06:00
|
|
|
@api_app.route('/map')
|
|
|
|
def map():
|
2024-01-15 11:00:56 -06:00
|
|
|
|
|
|
|
# Get the default list of frames from the API
|
|
|
|
frames = json.loads(requests.get(config['Settings']['base_url']+"/packets").text)['data']
|
|
|
|
|
|
|
|
# Make markers for all the frames
|
|
|
|
id_counter = 0
|
|
|
|
markers = ''
|
|
|
|
for frame in frames:
|
|
|
|
if frame['latitude'] != None:
|
|
|
|
# Create unique ID for each marker
|
|
|
|
idd = 'frame' + str(id_counter)
|
|
|
|
id_counter += 1
|
|
|
|
|
|
|
|
# Create the marker and its pop-up for each shop
|
|
|
|
markers += "var {idd} = L.marker([{latitude}, {longitude}]);\
|
|
|
|
{idd}.addTo(map).bindPopup('{from_ssid}');".format(idd=idd, latitude=frame['latitude'],\
|
|
|
|
longitude=frame['longitude'],
|
|
|
|
from_ssid=frame['from'],
|
|
|
|
created=frame['created'])
|
|
|
|
|
|
|
|
|
|
|
|
return render_template('map.html',
|
|
|
|
station_lat = config['Settings']['station_lat'],
|
|
|
|
station_lon = config['Settings']['station_lon'],
|
|
|
|
markers = markers)
|
2024-01-15 10:17:02 -06:00
|
|
|
|
2023-04-05 18:21:37 -05:00
|
|
|
class Packets(Resource):
|
|
|
|
def get(self):
|
2023-04-16 19:29:09 -05:00
|
|
|
# Handle arguments that may or may not exist
|
2023-04-16 18:50:39 -05:00
|
|
|
try:
|
|
|
|
n = int(request.args.get('n'))
|
|
|
|
except:
|
|
|
|
n = 10
|
2023-04-16 19:29:09 -05:00
|
|
|
|
2023-04-15 13:27:00 -05:00
|
|
|
conn = get_db_connection()
|
2023-04-16 19:29:09 -05:00
|
|
|
# Limit to number of records requested
|
2023-06-24 18:44:08 -05:00
|
|
|
data = select_frames(conn, n = n, url_params = request.args.to_dict())
|
2023-04-16 16:59:09 -05:00
|
|
|
# Sort by created date, descending (https://stackoverflow.com/a/45266808)
|
2023-04-25 14:19:29 -05:00
|
|
|
#data.sort(key=operator.itemgetter('created'), reverse=True)
|
2023-06-24 11:43:06 -05:00
|
|
|
return {'data':data}, 200 # return data and 200 OK code
|
2023-04-05 18:21:37 -05:00
|
|
|
|
2023-07-09 22:06:57 -05:00
|
|
|
class Stations(Resource):
|
|
|
|
def get(self):
|
|
|
|
# Handle arguments that may or may not exist
|
|
|
|
try:
|
|
|
|
n = int(request.args.get('n'))
|
|
|
|
except:
|
|
|
|
n = 10
|
|
|
|
|
|
|
|
conn = get_db_connection()
|
|
|
|
# Limit to number of records requested
|
|
|
|
data = select_all_stations(conn)
|
|
|
|
# Sort by created date, descending (https://stackoverflow.com/a/45266808)
|
|
|
|
#data.sort(key=operator.itemgetter('created'), reverse=True)
|
|
|
|
return {'data':data}, 200 # return data and 200 OK code
|
|
|
|
|
2023-04-05 20:36:19 -05:00
|
|
|
# Read config
|
|
|
|
config = read_config()
|
|
|
|
|
2023-04-13 17:16:16 -05:00
|
|
|
# Start subprocess to watch KISS connection
|
2023-05-13 20:59:17 -05:00
|
|
|
import subprocess
|
2023-07-09 09:37:03 -05:00
|
|
|
#proc = subprocess.Popen(["python3","kiss_and_db.py"])
|
|
|
|
# Combine under one process https://stackoverflow.com/a/13143013/2152245
|
|
|
|
proc = subprocess.Popen("exec " + "python3 kiss_and_db.py", stdout=subprocess.PIPE, shell=True)
|
|
|
|
print("kiss_and_db.py as subprocess pid "+str(proc.pid))
|
2023-04-13 17:16:16 -05:00
|
|
|
|
2023-07-09 22:06:57 -05:00
|
|
|
# The packets endpoint
|
2023-06-24 11:17:53 -05:00
|
|
|
api.add_resource(Packets, '/packets')
|
2023-07-09 22:06:57 -05:00
|
|
|
# The stations endpoint
|
|
|
|
api.add_resource(Stations, '/stations')
|
2023-04-05 16:58:35 -05:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2023-04-11 20:52:59 -05:00
|
|
|
api_app.run(debug=True, host='0.0.0.0', port=5001) # run our Flask app
|