aprs_tool/api_app.py

188 lines
6.4 KiB
Python
Raw Normal View History

2023-06-24 11:17:53 -05:00
from flask import Flask, request, render_template
2023-07-09 22:06:57 -05:00
from flask_restful import Resource, Api, reqparse, url_for
from datetime import date, timedelta
import configparser
2023-04-06 14:34:59 -05:00
import csv
2023-07-09 22:06:57 -05:00
import datetime
2023-04-05 16:28:39 -05:00
import ast
import glob
2023-04-16 16:59:09 -05:00
import json, operator
2023-06-24 11:17:53 -05:00
import requests
2023-04-15 13:27:00 -05:00
import sqlite3
2023-04-08 17:07:09 -05:00
api_app = Flask(__name__)
api = Api(api_app)
2023-04-05 16:58:35 -05:00
# TODO this is duplicated from kiss_and_db.py, can I avoid that?
import constants
def read_config():
config = configparser.ConfigParser()
config.read('config.ini')
return config
def read_logs(log_folder):
# Read some log files
2023-04-06 20:45:31 -05:00
# UTC time, so let's look at tomorrow, today, and yesterday.
today = date.today()
yesterday = today - timedelta(days = 1)
tomorrow = today + timedelta(days = 1)
file_list = glob.glob(log_folder+str(yesterday)+"*") + \
glob.glob(log_folder+str(today)+"*") + \
glob.glob(log_folder+str(tomorrow)+"*")
2023-04-06 14:34:59 -05:00
# https://stackoverflow.com/a/66071962
json_array = []
for file in file_list:
2023-04-06 14:34:59 -05:00
with open(file, encoding='utf-8') as csvf:
csvReader = csv.DictReader(csvf)
for row in csvReader:
#add this python dict to json array
json_array.append(row)
2023-04-06 20:45:31 -05:00
# Add the call and location of this station to the packet info
config = read_config()
for item in json_array:
item['station_name'] = config['Settings']['station_call']
item['station_lat'] = config['Settings']['station_lat']
item['station_lon'] = config['Settings']['station_lon']
return(json_array)
2023-04-15 13:27:00 -05:00
def dict_factory(cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
d[col[0]] = row[idx]
return d
def get_db_connection():
conn = sqlite3.connect('database.db')
conn.row_factory = dict_factory
return conn
def select_all_frames(conn):
"""
Query all rows in the frames table
:param conn: the Connection object
:return:
"""
cur = conn.cursor()
cur.execute("SELECT * FROM frames")
rows = cur.fetchall()
return rows
2023-07-09 11:22:23 -05:00
def select_all_stations(conn):
"""
Query all rows in the stations table
:param conn: the Connection object
:return:
"""
cur = conn.cursor()
cur.execute("SELECT * FROM stations ORDER BY last_heard_unix DESC")
rows = cur.fetchall()
return rows
2023-06-24 19:06:37 -05:00
def unique_stations(conn):
"""
Query all rows in the frames table
:param conn: the Connection object
:return:
"""
cur = conn.cursor()
cur.execute('SELECT *, MAX(id), COUNT(id) FROM frames GROUP BY "from" ORDER BY MAX(id) DESC')
2023-06-24 19:06:37 -05:00
rows = cur.fetchall()
return rows
2023-06-24 18:44:08 -05:00
def select_frames(conn, n, url_params):
# Should pass this a dict of fields and values (request.args)
# TODO clean data before sending to DB
# Filter out any keys that don't match db fields
# From https://stackoverflow.com/a/20256491
dictfilt = lambda x, y: dict([ (i,x[i]) for i in x if i in set(y) ])
field_where = dictfilt(url_params, constants.db_frames_fields)
# Then loop through fields to create query parts
# From https://stackoverflow.com/a/73512269/2152245
2023-05-13 11:14:21 -05:00
field_where_str = ' AND '.join([f'"{k}" LIKE \'{v}\'' for k,v in field_where.items()])
cur = conn.cursor()
# Workaround to deal with missing value in WHERE
field_where_query = "" if field_where_str == "" else "WHERE "+field_where_str
sql = 'SELECT * FROM frames {field_where_query} ORDER BY created DESC LIMIT {n}'.format(field_where_query=field_where_query, n=n)
print(sql)
cur.execute(sql)
rows = cur.fetchall()
return rows
2023-06-24 11:17:53 -05:00
@api_app.route('/')
def index():
# Get list of recent packets using API
# TODO use relative path
2023-07-12 14:48:26 -05:00
response = json.loads(requests.get(url_for("packets", _external=True)).text)['data']
2023-06-24 19:06:37 -05:00
# Play with function to create station list
2023-07-09 22:06:57 -05:00
#stations = select_all_stations(get_db_connection())
#print(url_for("static", filename="test.txt", _external=True))
2023-07-09 22:17:50 -05:00
# this should work: stations = json.loads(requests.get(url_for("stations", _external=True)).text)['data']
2023-07-12 14:48:26 -05:00
stations = json.loads(requests.get(url_for("stations", _external=True)).text)['data']
2023-07-09 22:06:57 -05:00
# Convert unix time to datetime on the fly because I'm lazy right now
for station in stations:
2023-07-09 22:15:03 -05:00
if station['last_heard_unix'] != None:
2023-07-12 12:34:50 -05:00
station['last_heard'] = datetime.datetime.utcfromtimestamp(station['last_heard_unix'])
2023-06-24 19:06:37 -05:00
2023-06-24 11:17:53 -05:00
return render_template('index.html',
station_call = config['Settings']['station_call'],
station_lat = config['Settings']['station_lat'],
station_lon = config['Settings']['station_lon'],
2023-06-24 19:06:37 -05:00
frames = response,
stations = stations)
2023-06-24 11:17:53 -05:00
class Packets(Resource):
def get(self):
2023-04-16 19:29:09 -05:00
# Handle arguments that may or may not exist
try:
n = int(request.args.get('n'))
except:
n = 10
2023-04-16 19:29:09 -05:00
2023-04-15 13:27:00 -05:00
conn = get_db_connection()
2023-04-16 19:29:09 -05:00
# Limit to number of records requested
2023-06-24 18:44:08 -05:00
data = select_frames(conn, n = n, url_params = request.args.to_dict())
2023-04-16 16:59:09 -05:00
# Sort by created date, descending (https://stackoverflow.com/a/45266808)
2023-04-25 14:19:29 -05:00
#data.sort(key=operator.itemgetter('created'), reverse=True)
2023-06-24 11:43:06 -05:00
return {'data':data}, 200 # return data and 200 OK code
2023-07-09 22:06:57 -05:00
class Stations(Resource):
def get(self):
# Handle arguments that may or may not exist
try:
n = int(request.args.get('n'))
except:
n = 10
conn = get_db_connection()
# Limit to number of records requested
data = select_all_stations(conn)
# Sort by created date, descending (https://stackoverflow.com/a/45266808)
#data.sort(key=operator.itemgetter('created'), reverse=True)
return {'data':data}, 200 # return data and 200 OK code
# Read config
config = read_config()
log_folder = config['Settings']['log_folder']
# Start subprocess to watch KISS connection
2023-05-13 20:59:17 -05:00
import subprocess
#proc = subprocess.Popen(["python3","kiss_and_db.py"])
# Combine under one process https://stackoverflow.com/a/13143013/2152245
proc = subprocess.Popen("exec " + "python3 kiss_and_db.py", stdout=subprocess.PIPE, shell=True)
print("kiss_and_db.py as subprocess pid "+str(proc.pid))
2023-07-09 22:06:57 -05:00
# The packets endpoint
2023-06-24 11:17:53 -05:00
api.add_resource(Packets, '/packets')
2023-07-09 22:06:57 -05:00
# The stations endpoint
api.add_resource(Stations, '/stations')
2023-04-05 16:58:35 -05:00
if __name__ == '__main__':
api_app.run(debug=True, host='0.0.0.0', port=5001) # run our Flask app