Merge pull request 'Sort, filter, and limit results at /packets' (#23) from sort-filter into main

Reviewed-on: #23
This commit is contained in:
W1CDN 2023-05-14 10:18:08 -05:00
commit 4203af5b81
6 changed files with 129 additions and 83 deletions

View File

@ -10,15 +10,19 @@ direwolf logs into a REST API in JSON format.
1. Set up database file with `python init_db.py`.
2. Run `app.py` with either a Python call or a real WSGI server.
You can use screen to detach the session.
- Default URL is http://127.0.0.1:5000
- Default URL is http://127.0.0.1:5001
- Example `waitress` and `screen` scripts are included, see
- `api_waitress.py` and
- `start-aprs_api.sh`
3. Access the API from whatever other system you want.
## Endpoints:
-`/packets` - gives the most recent packets, with the fields from the Dire Wolf
User Guide.
-`/packets` - gives the most recent packets, sorted descending by time received.
- argument `n` will return a specific number of packets, default 10. E.g.,
`https://digi.w1cdn.net/aprs_api/packets?n=1` returns one packet.
- argument `from` will return packets from the named station-SSID (no wildcards).
E.g., `https://digi.w1cdn.net/aprs_api/packets?n=1&from=W1CDN-1` returns
one packet from W1CDN-1.
Example of an object packet sent by W1CDN-1 and digipeated by K0UND-2:
```

View File

@ -1,15 +1,18 @@
from flask import Flask
from flask import Flask, request
from flask_restful import Resource, Api, reqparse
from datetime import date, timedelta
import configparser
import csv
import ast
import glob
import json
import json, operator
import sqlite3
api_app = Flask(__name__)
api = Api(api_app)
# TODO this is duplicated from kiss_and_db.py, can I avoid that?
import constants
def read_config():
config = configparser.ConfigParser()
config.read('config.ini')
@ -51,7 +54,6 @@ def dict_factory(cursor, row):
def get_db_connection():
conn = sqlite3.connect('database.db')
#conn.row_factory = sqlite3.Row
conn.row_factory = dict_factory
return conn
@ -66,18 +68,46 @@ def select_all_frames(conn):
rows = cur.fetchall()
return rows
def select_frames(conn, n, from_, url_params):
# Should pass this a dict of fields and values (request.args)
# TODO clean data before sending to DB
# Filter out any keys that don't match db fields
# From https://stackoverflow.com/a/20256491
dictfilt = lambda x, y: dict([ (i,x[i]) for i in x if i in set(y) ])
field_where = dictfilt(url_params, constants.db_frames_fields)
# Then loop through fields to create query parts
# From https://stackoverflow.com/a/73512269/2152245
field_where_str = ' AND '.join([f'"{k}" LIKE \'{v}\'' for k,v in field_where.items()])
cur = conn.cursor()
# Workaround to deal with missing value in WHERE
field_where_query = "" if field_where_str == "" else "WHERE "+field_where_str
sql = 'SELECT * FROM frames {field_where_query} ORDER BY created DESC LIMIT {n}'.format(field_where_query=field_where_query, n=n)
print(sql)
cur.execute(sql)
rows = cur.fetchall()
return rows
class Packets(Resource):
def get(self):
#data = read_logs(log_folder)
# Handle arguments that may or may not exist
try:
n = int(request.args.get('n'))
except:
n = 10
from_ = None if request.args.get('from') == None else request.args.get('from')
conn = get_db_connection()
data = select_all_frames(conn)
# Limit to number of records requested
data = select_frames(conn, n = n, from_ = from_, url_params = request.args.to_dict())
# Sort by created date, descending (https://stackoverflow.com/a/45266808)
#data.sort(key=operator.itemgetter('created'), reverse=True)
return {'data': data}, 200 # return data and 200 OK code
# Read config
config = read_config()
log_folder = config['Settings']['log_folder']
# Load logs first (just to check for errors before page loads)
#data = read_logs(log_folder)
# Start subprocess to watch KISS connection
import subprocess

49
constants.py Normal file
View File

@ -0,0 +1,49 @@
# Tuple of frames table fields
db_frames_fields = ("id",
"addresse",
"alive",
"altitude",
"comment",
"course",
"created",
"created_unix",
"format",
"frame",
"from",
"gpsfixstatus",
"latitude",
"longitude",
"mbits",
"messagecapable",
"message_text",
"msgNo",
"mtype",
"object_format",
"object_name",
"path",
"phg",
"phg_dir",
"phg_gain",
"phg_height",
"phg_power",
"phg_range",
"posambiguity",
"raw",
"raw_timestamp",
"speed",
"station_call",
"station_lat",
"station_lon",
"status",
"subpacket",
"symbol",
"symbol_table",
"telemetry",
"timestamp",
"to",
"tEQNS",
"tPARM",
"tUNIT",
"via",
"weather",
"wx_raw_timestamp")

View File

@ -5,53 +5,7 @@ import aprs
import json
import aprslib
import configparser
db_fields = ("id",
"addresse",
"alive",
"altitude",
"comment",
"course",
"created",
"format",
"frame",
"from",
"gpsfixstatus",
"latitude",
"longitude",
"mbits",
"messagecapable",
"message_text",
"mtype",
"object_format",
"object_name",
"path",
"phg",
"phg_dir",
"phg_gain",
"phg_height",
"phg_power",
"phg_range",
"posambiguity",
"raw",
"raw_timestamp",
"speed",
"station_call",
"station_lat",
"station_lon",
"status",
"subpacket",
"symbol",
"symbol_table",
"telemetry",
"timestamp",
"to",
"tEQNS",
"tPARM",
"tUNIT",
"via",
"weather",
"wx_raw_timestamp")
import time
def read_config():
config = configparser.ConfigParser()
@ -82,40 +36,45 @@ def main():
path=["WIDE1-1"],
info=b">Hello World!",
)
ki.write(frame)
#ki.write(frame)
# Watch for new packets to come in
while True:
conn = get_db_connection()
for frame in ki.read(min_frames=1):
a = aprslib.parse(str(frame))
a['station_call'] = config['Settings']['station_call']
a['station_lat'] = config['Settings']['station_lat']
a['station_lon'] = config['Settings']['station_lon']
print(a)
# Make this a string and deal with it later (probably a mistake)
a['path'] = str(a['path'])
# Store true/false as 1/0
if 'alive' in a:
if a['alive'] == True:
a['alive'] = 1
else:
a['alive'] = 0
# Build an INSERT statement based on the fields we have from the frame
attrib_names = ', '.join('"%s"' % w for w in a.keys())
attrib_values = ", ".join("?" * len(a.keys()))
sql = "INSERT INTO frames ("+attrib_names+") VALUES ("+attrib_values+")"
try:
# Insert data
conn.execute(sql, list(a.values()))
conn.commit()
a = aprslib.parse(str(frame))
a['station_call'] = config['Settings']['station_call']
a['station_lat'] = config['Settings']['station_lat']
a['station_lon'] = config['Settings']['station_lon']
a['created_unix'] = int(time.time())
print(a)
# Make this a string and deal with it later (probably a mistake)
a['path'] = str(a['path'])
# Store true/false as 1/0
if 'alive' in a:
if a['alive'] == True:
a['alive'] = 1
else:
a['alive'] = 0
# Build an INSERT statement based on the fields we have from the frame
attrib_names = ', '.join('"%s"' % w for w in a.keys())
attrib_values = ", ".join("?" * len(a.keys()))
sql = "INSERT INTO frames ("+attrib_names+") VALUES ("+attrib_values+")"
try:
# Insert data
conn.execute(sql, list(a.values()))
conn.commit()
# TODO remove packets that are older ('created') than a limit set in config.ini
# "5 minutes" also works
conn.execute("DELETE FROM frames WHERE created < DATETIME('now', '"+config['Settings']['keep_time']+"')")
conn.commit()
# TODO remove packets that are older ('created') than a limit set in config.ini
# "5 minutes" also works
conn.execute("DELETE FROM frames WHERE created < DATETIME('now', '"+config['Settings']['keep_time']+"')")
conn.commit()
except:
print("Error with SQLite!")
except:
print("Error with SQLite!")
print("Frame could not be parsed.")
conn.close()

View File

@ -2,3 +2,5 @@ flask
flask_restful
aprs
aprslib
sqlite3
json

View File

@ -8,6 +8,7 @@ CREATE TABLE frames (
comment TEXT,
course REAL,
created TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
created_unix INT,
format TEXT,
frame TEXT,
"from" TEXT,
@ -17,6 +18,7 @@ CREATE TABLE frames (
mbits INT,
messagecapable INT,
message_text TEXT,
msgNo INT,
mtype TEXT,
object_format TEXT,
object_name TEXT,