Add status page #30

Merged
W1CDN merged 36 commits from dashboard-page into main 2023-08-26 19:05:45 -05:00
10 changed files with 242 additions and 157 deletions

3
.gitignore vendored
View File

@ -1 +1,4 @@
/logs/*
config.ini
*.db
*.log

View File

@ -1,11 +1,14 @@
from flask import Flask, request
from flask_restful import Resource, Api, reqparse
from flask import Flask, request, render_template
from flask_restful import Resource, Api, reqparse, url_for
from datetime import date, timedelta
import configparser
import csv
import datetime
import timeago
import ast
import glob
import json, operator
import requests
import sqlite3
api_app = Flask(__name__)
api = Api(api_app)
@ -18,34 +21,6 @@ def read_config():
config.read('config.ini')
return config
def read_logs(log_folder):
# Read some log files
# UTC time, so let's look at tomorrow, today, and yesterday.
today = date.today()
yesterday = today - timedelta(days = 1)
tomorrow = today + timedelta(days = 1)
file_list = glob.glob(log_folder+str(yesterday)+"*") + \
glob.glob(log_folder+str(today)+"*") + \
glob.glob(log_folder+str(tomorrow)+"*")
# https://stackoverflow.com/a/66071962
json_array = []
for file in file_list:
with open(file, encoding='utf-8') as csvf:
csvReader = csv.DictReader(csvf)
for row in csvReader:
#add this python dict to json array
json_array.append(row)
# Add the call and location of this station to the packet info
config = read_config()
for item in json_array:
item['station_name'] = config['Settings']['station_call']
item['station_lat'] = config['Settings']['station_lat']
item['station_lon'] = config['Settings']['station_lon']
return(json_array)
def dict_factory(cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
@ -68,7 +43,29 @@ def select_all_frames(conn):
rows = cur.fetchall()
return rows
def select_frames(conn, n, from_, url_params):
def select_all_stations(conn):
"""
Query all rows in the stations table
:param conn: the Connection object
:return:
"""
cur = conn.cursor()
cur.execute("SELECT * FROM stations ORDER BY last_heard_unix DESC")
rows = cur.fetchall()
return rows
def unique_stations(conn):
"""
Query all rows in the frames table
:param conn: the Connection object
:return:
"""
cur = conn.cursor()
cur.execute('SELECT *, MAX(id), COUNT(id) FROM frames GROUP BY "from" ORDER BY MAX(id) DESC')
rows = cur.fetchall()
return rows
def select_frames(conn, n, url_params):
# Should pass this a dict of fields and values (request.args)
# TODO clean data before sending to DB
@ -89,6 +86,40 @@ def select_frames(conn, n, from_, url_params):
rows = cur.fetchall()
return rows
@api_app.route('/')
def index():
# Get list of recent packets using API
# TODO use relative path
#frames = json.loads(requests.get(url_for("packets", _external=True)).text)['data']
#frames = json.loads(requests.get("https://digi.w1cdn.net/aprs_api/packets").text)['data']
frames = json.loads(requests.get(config['Settings']['base_url']+"/packets").text)['data']
for frame in frames:
if frame['created'] != None:
frame['time_ago'] = timeago.format(frame['created_unix'], datetime.datetime.now())
# Play with function to create station list
#stations = select_all_stations(get_db_connection())
#print(url_for("static", filename="test.txt", _external=True))
# this should work: stations = json.loads(requests.get(url_for("stations", _external=True)).text)['data']
#stations = json.loads(requests.get(url_for("stations", _external=True)).text)['data']
#stations = json.loads(requests.get("https://digi.w1cdn.net/aprs_api/stations").text)['data']
stations = json.loads(requests.get(config['Settings']['base_url']+"/stations").text)['data']
# Convert unix time to datetime on the fly because I'm lazy right now
for station in stations:
if station['last_heard_unix'] != None:
station['last_heard'] = datetime.datetime.utcfromtimestamp(station['last_heard_unix'])
station['time_ago'] = timeago.format(station['last_heard_unix'], datetime.datetime.now())
return render_template('index.html',
station_call = config['Settings']['station_call'],
station_lat = config['Settings']['station_lat'],
station_lon = config['Settings']['station_lon'],
frames = frames,
stations = stations)
class Packets(Resource):
def get(self):
# Handle arguments that may or may not exist
@ -96,24 +127,43 @@ class Packets(Resource):
n = int(request.args.get('n'))
except:
n = 10
from_ = None if request.args.get('from') == None else request.args.get('from')
conn = get_db_connection()
# Limit to number of records requested
data = select_frames(conn, n = n, from_ = from_, url_params = request.args.to_dict())
data = select_frames(conn, n = n, url_params = request.args.to_dict())
# Sort by created date, descending (https://stackoverflow.com/a/45266808)
#data.sort(key=operator.itemgetter('created'), reverse=True)
return {'data':data}, 200 # return data and 200 OK code
class Stations(Resource):
def get(self):
# Handle arguments that may or may not exist
try:
n = int(request.args.get('n'))
except:
n = 10
conn = get_db_connection()
# Limit to number of records requested
data = select_all_stations(conn)
# Sort by created date, descending (https://stackoverflow.com/a/45266808)
#data.sort(key=operator.itemgetter('created'), reverse=True)
return {'data':data}, 200 # return data and 200 OK code
# Read config
config = read_config()
log_folder = config['Settings']['log_folder']
# Start subprocess to watch KISS connection
import subprocess
subprocess.Popen(["python3","kiss_and_db.py"])
#proc = subprocess.Popen(["python3","kiss_and_db.py"])
# Combine under one process https://stackoverflow.com/a/13143013/2152245
proc = subprocess.Popen("exec " + "python3 kiss_and_db.py", stdout=subprocess.PIPE, shell=True)
print("kiss_and_db.py as subprocess pid "+str(proc.pid))
api.add_resource(Packets, '/packets') # and '/locations' is our entry point for Locations
# The packets endpoint
api.add_resource(Packets, '/packets')
# The stations endpoint
api.add_resource(Stations, '/stations')
if __name__ == '__main__':
api_app.run(debug=True, host='0.0.0.0', port=5001) # run our Flask app

View File

@ -1,13 +1,12 @@
[Settings]
# Path to direwolf log folder, include trailing slash
log_folder = logs/
#log_folder = /home/pi/logs/direwolf/
# Name and location of this station, for inclusion in the API
station_call = W1CDN-1
station_lat = 47.941500
station_lon = -97.027000
# Base URL for application (no trailing slash)
base_url = https://digi.w1cdn.net/aprs_api
# How long to keep packets (frames) e.g., "2 days", "5 minutes"
keep_time = "2 days"
@ -17,3 +16,4 @@ kiss_port = 8001
# Development settings (not operational yet)
mycall = W1CDN-15
log_path = aprs_api.log

View File

@ -6,6 +6,7 @@ import json
import aprslib
import configparser
import time
import logging
def read_config():
config = configparser.ConfigParser()
@ -25,10 +26,14 @@ def main():
# KISS_HOST = os.environ.get("KISS_HOST", "192.168.0.30")
# KISS_PORT = os.environ.get("KISS_PORT", "8001")
logging.basicConfig(filename=config['Settings']['log_path'], level=logging.DEBUG, \
format='%(asctime)s - %(message)s')
logging.debug('kiss_and_db.py running')
ki = aprs.TCPKISS(host=config['Settings']['kiss_host'], port=int(config['Settings']['kiss_port']))
ki.start()
# Make a simple frame and send it
frame = aprs.APRSFrame.ui(
destination="APZ001",
@ -60,20 +65,44 @@ def main():
# Build an INSERT statement based on the fields we have from the frame
attrib_names = ', '.join('"%s"' % w for w in a.keys())
attrib_values = ", ".join("?" * len(a.keys()))
sql = "INSERT INTO frames ("+attrib_names+") VALUES ("+attrib_values+")"
try:
# Insert data
sql = "INSERT INTO frames ("+attrib_names+") VALUES ("+attrib_values+")"
conn.execute(sql, list(a.values()))
logging.debug("Frames table updated")
# TODO update stations table here
# Original intent was to include the id from the frames table,
# but that would mean making another query.
# It's not immediately needed, so I'm skipping it.
# Build query
# "from" is wrappedin [] because it is a reserved word and using '' doesn't work.
# https://www.sqlite.org/lang_keywords.html
#try:
station_update = "'"+a['from'] +"', '"+ str(a['created_unix']) +"', '1'"
query3 = "INSERT INTO stations ([from], last_heard_unix, count) \
VALUES("+station_update+") \
ON CONFLICT([from]) \
DO UPDATE SET count = count + 1,\
last_heard_unix = excluded.last_heard_unix;"
# Insert/update data
conn.execute(query3)
logging.debug("Station table updated")
conn.commit()
#except:
# print("Stations table couldn't be updated.")
# TODO remove packets that are older ('created') than a limit set in config.ini
# "5 minutes" also works
conn.execute("DELETE FROM frames WHERE created < DATETIME('now', '"+config['Settings']['keep_time']+"')")
conn.commit()
#conn.execute("DELETE FROM frames WHERE created < DATETIME('now', '"+config['Settings']['keep_time']+"')")
#conn.commit()
except:
print("Error with SQLite!")
#print("Error with SQLite!")
logging.error("Error with SQLite!")
except:
print("Frame could not be parsed.")
#print("Frame could not be parsed.")
logging.error("Frame could not be parsed.")
conn.close()

View File

@ -1,6 +1,9 @@
flask
flask_restful
aprs
aprs3
kiss3
kiss
aprslib
sqlite3
json
timeago

View File

@ -50,3 +50,12 @@ CREATE TABLE frames (
weather TEXT,
wx_raw_timestamp TIMESTAMP
);
CREATE TABLE "stations" (
"id" INTEGER NOT NULL UNIQUE,
"from" TEXT UNIQUE,
"frames_id" INTEGER,
"last_heard_unix" INTEGER,
"count" INTEGER,
PRIMARY KEY("id" AUTOINCREMENT)
);

View File

@ -1,4 +1,4 @@
#!/bin/bash
# Run `chmod +x start-aprs_api.sh` so this can be run
screen -dmS aprs_api python3 /home/pi/aprs_tools/api_waitress.py
screen -dmS aprs_api python3 /home/pi/aprs_tool/api_waitress.py

View File

@ -1,109 +0,0 @@
#!/usr/bin/env python3
import os
import sqlite3
import aprs
import json
import aprslib
import configparser
MYCALL = os.environ.get("MYCALL", "W1CDN")
KISS_HOST = os.environ.get("KISS_HOST", "192.168.0.30")
KISS_PORT = os.environ.get("KISS_PORT", "8001")
db_fields = ("id",
"addresse",
"alive",
"altitude",
"comment",
"course",
"created",
"format",
"frame",
"from",
"gpsfixstatus",
"latitude",
"longitude",
"mbits",
"messagecapable",
"message_text",
"mtype",
"object_format",
"object_name",
"path",
"posambiguity",
"raw",
"raw_timestamp",
"speed",
"station_call",
"station_lat",
"station_lon",
"status",
"symbol",
"symbol_table",
"telemetry",
"timestamp",
"to",
"tEQNS",
"tPARM",
"tUNIT",
"via",
"weather",
"wx_raw_timestamp")
def read_config():
config = configparser.ConfigParser()
config.read('config.ini')
return config
def get_db_connection():
conn = sqlite3.connect('database.db')
conn.row_factory = sqlite3.Row
return conn
def main():
# Add the call and location of this station to the packet info
config = read_config()
ki = aprs.TCPKISS(host=KISS_HOST, port=int(KISS_PORT))
ki.start()
# Make a simple frame and send it
frame = aprs.APRSFrame.ui(
destination="APZ001",
source=MYCALL,
path=["WIDE1-1"],
info=b">Hello World!",
)
#ki.write(frame)
# Watch for new packets to come in
while True:
conn = get_db_connection()
for frame in ki.read(min_frames=1):
a = aprslib.parse(str(frame))
a['station_call'] = config['Settings']['station_call']
a['station_lat'] = config['Settings']['station_lat']
a['station_lon'] = config['Settings']['station_lon']
print(a)
# Make this a string and deal with it later (probably a mistake)
a['path'] = str(a['path'])
# Build an INSERT statement based on the fields we have from the frame
attrib_names = ', '.join(f'"{w}"' for w in a.keys())
attrib_values = ", ".join("?" * len(a.keys()))
sql = f"INSERT INTO frames ({attrib_names}) VALUES ({attrib_values})"
# Insert data
conn.execute(sql, list(a.values()))
conn.commit()
# TODO remove packets that are older ('created') than a limit set in config.ini
# "5 minutes" also works
conn.execute("DELETE FROM frames WHERE created < DATETIME('now', '"+config['Settings']['keep_time']+"')")
conn.commit()
conn.close()
if __name__ == "__main__":
main()

64
templates/index.html Normal file
View File

@ -0,0 +1,64 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>{{station_call}} Status</title>
<style>
table, th, td {
border: 1px solid black;
}
</style>
</head>
<body>
<h1>{{station_call}} Status</h1>
Station location: {{station_lat}}, {{station_lon}}
<h2> About </h2>
This is a work in progress. See <a href="https://amiok.net/gitea/W1CDN/aprs_tool">https://amiok.net/gitea/W1CDN/aprs_tool</a> for usage.
<h2> Recent RF Packets </h2>
<table>
<tr>
<th> from </th>
<th> object_name </th>
<th> raw </th>
<th> created (utc) </th>
<th> relative </th>
<th> more </th>
</tr>
{% for i in frames %}
<tr>
<td> <a href="https://digi.w1cdn.net/aprs_api/packets?from={{ i['from'] }}">{{ i['from'] }}</a> </td>
<td> {{ i['object_name'] }} </td>
<td> {{ i['raw'] }} </td>
<td> {{ i['created'] }} </td>
<td> {{ i['time_ago'] }} </td>
<td> <a href="https://digi.w1cdn.net/aprs_api/packets?id={{ i['id'] }}">query</a>,
<a href="https://aprs.fi/#!mt=roadmap&z=12&call=a%2F{{ i['from'] }}">aprs.fi</a></td>
</tr>
{% endfor %}
</table>
<h2> Recent Stations </h2>
<table>
<tr>
<th> from </th>
<th> last heard (utc) </th>
<th> relative </th>
<th> count </th>
<th> more </th>
</tr>
{% for i in stations %}
<tr>
<td> <a href="https://digi.w1cdn.net/aprs_api/packets?from={{ i['from'] }}">{{ i['from'] }}</a> </td>
<td> {{ i['last_heard'] }} </td>
<td> {{ i['time_ago'] }} </td>
<td> {{ i['count']}} </td>
<td> <a href="https://aprs.fi/#!mt=roadmap&z=12&call=a%2F{{ i['from'] }}">aprs.fi</a></td>
</tr>
{% endfor %}
</table>
</body>
</html>

36
test_db.py Normal file
View File

@ -0,0 +1,36 @@
# Learn how to update database
import sqlite3
def get_db_connection():
conn = sqlite3.connect('database.db')
conn.row_factory = sqlite3.Row
return conn
conn = get_db_connection()
# Grab a random row from frames table and pretend it is new
cur = conn.cursor()
cur.execute("SELECT [from], id, created_unix FROM frames ORDER BY RANDOM() LIMIT 1;")
rows = cur.fetchall()
results = dict(rows[0])
values = ', '.join('"%s"' % w for w in results.values())
# Build query
# "from" is wrappedin [] because it is a reserved word and using '' doesn't work.
query3 = "INSERT INTO stations ([from], frames_id, last_heard_unix, count) \
VALUES("+values+", 1) \
ON CONFLICT([from]) \
DO UPDATE SET count = count + 1;"
# example https://stackoverflow.com/a/50718957/2152245
# query2 = "INSERT INTO stations ([from], frames_id, last_heard_unix, count) \
# VALUES('KC9TZN-8', 4068, 1687623864, 1) \
# ON CONFLICT([from]) \
# DO UPDATE SET count = count + 1;"
conn.execute(query3)
conn.commit()
conn.close()