Compare commits
71 Commits
add-kiss
...
4642f45adc
Author | SHA1 | Date | |
---|---|---|---|
4642f45adc | |||
31fb381063 | |||
8f2425290c | |||
9797f29bf2 | |||
bfa7b91446 | |||
0592f45af8 | |||
c1f58966a6 | |||
4439ae680e | |||
25bffd9b61 | |||
1894de5d86 | |||
79da9de6f7 | |||
26b4081720 | |||
50085de7db | |||
e7002c712c | |||
307c1fcd86 | |||
dc7d4ed8a3 | |||
e1211fe108 | |||
6686cba26d | |||
1b0494c45a | |||
f694e65c2a | |||
78641d0eef | |||
50e8324786 | |||
acdee84d3e | |||
fcd776174c | |||
ef539e2aa9 | |||
9fb3d28cdc | |||
f447a807b2 | |||
875546040f | |||
1a5df46eca | |||
ebd237d9d3 | |||
9c11d8d494 | |||
e3cb68551b | |||
d2cdaa820a | |||
93156311e2 | |||
d382a2b8f7 | |||
40513cc488 | |||
c0ff61063f | |||
b0f0a4f8dc | |||
8d94794c90 | |||
ee75cccc68 | |||
1c057a5555 | |||
c25a10ae77 | |||
e1cd7ccaae | |||
00ede8860f | |||
8f3b2ae707 | |||
e05a3790d6 | |||
fb4d89cd9b | |||
8972c8d447 | |||
1362558deb | |||
f6a71e4851 | |||
1d8699df94 | |||
1ad8c848c4 | |||
9deb160989 | |||
e2c3adf5c0 | |||
4203af5b81 | |||
494f53bd62 | |||
e19a8c777c | |||
cb9af0f5b8 | |||
863efdd84c | |||
a99de3a859 | |||
efe61ae4c5 | |||
cc89ab1a4c | |||
f396fe87af | |||
ab850a76a3 | |||
2121119365 | |||
6957219468 | |||
cd5d24b641 | |||
1cffde2903 | |||
5793e57aa9 | |||
19b3a54d98 | |||
467ec11522 |
3
.gitignore
vendored
3
.gitignore
vendored
@ -1 +1,4 @@
|
|||||||
/logs/*
|
/logs/*
|
||||||
|
config.ini
|
||||||
|
*.db
|
||||||
|
*.log
|
||||||
|
80
README.md
80
README.md
@ -7,44 +7,72 @@ direwolf logs into a REST API in JSON format.
|
|||||||
## Setup
|
## Setup
|
||||||
1. Run direwolf with logging to CSV on by using `-l`. (`-L` not yet implemented).
|
1. Run direwolf with logging to CSV on by using `-l`. (`-L` not yet implemented).
|
||||||
1. Install requirements using `pip install -r requirements.txt`.
|
1. Install requirements using `pip install -r requirements.txt`.
|
||||||
|
1. Set up database file with `python init_db.py`.
|
||||||
2. Run `app.py` with either a Python call or a real WSGI server.
|
2. Run `app.py` with either a Python call or a real WSGI server.
|
||||||
You can use screen to detach the session.
|
You can use screen to detach the session.
|
||||||
- Default URL is http://127.0.0.1:5000
|
- Default URL is http://127.0.0.1:5001
|
||||||
- Example `waitress` and `screen` scripts are included, see
|
- Example `waitress` and `screen` scripts are included, see
|
||||||
- `api_waitress.py` and
|
- `api_waitress.py` and
|
||||||
- `start-aprs_api.sh`
|
- `start-aprs_api.sh`
|
||||||
3. Access the API from whatever other system you want.
|
3. Access the API from whatever other system you want.
|
||||||
|
|
||||||
## Endpoints:
|
## Endpoints:
|
||||||
-`/packets` - gives the most recent packets, with the fields from the Dire Wolf
|
-`/packets` - gives the most recent packets, sorted descending by time received.
|
||||||
User Guide.
|
- argument `n` will return a specific number of packets, default 10. E.g.,
|
||||||
|
`https://digi.w1cdn.net/aprs_api/packets?n=1` returns one packet.
|
||||||
|
- argument `from` will return packets from the named station-SSID (no wildcards).
|
||||||
|
E.g., `https://digi.w1cdn.net/aprs_api/packets?n=1&from=W1CDN-1` returns
|
||||||
|
one packet from W1CDN-1.
|
||||||
|
|
||||||
Example of an object packet sent by W1CDN-1 and digipeated by K0UND-2:
|
Example of an object packet sent by W1CDN-1 and digipeated by K0UND-2:
|
||||||
```
|
```
|
||||||
{
|
{
|
||||||
"chan": 0,
|
"id": 1,
|
||||||
"utime": 1680566406,
|
"addresse": null,
|
||||||
"isotime": "2023-04-04T00:00:06Z",
|
"alive": null,
|
||||||
"source": "W1CDN-1",
|
"altitude": null,
|
||||||
"heard": "K0UND-2",
|
"comment": "Leave a message to say hi!",
|
||||||
"level": "113(71/42)",
|
"course": null,
|
||||||
"error": 0,
|
"created": "2023-04-16 15:04:03",
|
||||||
"dti": ";",
|
"format": "uncompressed",
|
||||||
"name": "147.390GF",
|
"frame": null,
|
||||||
"symbol": "/r",
|
"from": "W1CDN-2",
|
||||||
"latitude": 47.924167,
|
"gpsfixstatus": null,
|
||||||
"longitude": -97.009667,
|
"latitude": 47.94133333333333,
|
||||||
"speed": 0.0,
|
"longitude": -97.02683333333333,
|
||||||
"course": 0.0,
|
"mbits": null,
|
||||||
"altitude": 0.0,
|
"messagecapable": 1,
|
||||||
"frequency": 147.39,
|
"message_text": null,
|
||||||
"offset": 600.0,
|
"mtype": null,
|
||||||
"tone": 0.0,
|
"object_format": null,
|
||||||
"system": "DireWolf, WB2OSZ",
|
"object_name": null,
|
||||||
"status": 0,
|
"path": "['K0UND-2', 'WIDE2-2']",
|
||||||
"telemetry": 0.0,
|
"phg": null,
|
||||||
"comment": " https://www.wa0jxt.org/"
|
"phg_dir": null,
|
||||||
},
|
"phg_gain": null,
|
||||||
|
"phg_height": null,
|
||||||
|
"phg_power": null,
|
||||||
|
"phg_range": null,
|
||||||
|
"posambiguity": 0,
|
||||||
|
"raw": "W1CDN-2>APQTH1,K0UND-2,WIDE2-2:@150321h4756.48N/09701.61W-Leave a message to say hi!",
|
||||||
|
"raw_timestamp": "150321h",
|
||||||
|
"speed": null,
|
||||||
|
"station_call": "W1CDN-1",
|
||||||
|
"station_lat": 47.9415,
|
||||||
|
"station_lon": -97.027,
|
||||||
|
"status": null,
|
||||||
|
"symbol": "-",
|
||||||
|
"symbol_table": "/",
|
||||||
|
"telemetry": null,
|
||||||
|
"timestamp": 1681657401,
|
||||||
|
"to": "APQTH1",
|
||||||
|
"tEQNS": null,
|
||||||
|
"tPARM": null,
|
||||||
|
"tUNIT": null,
|
||||||
|
"via": "",
|
||||||
|
"weather": null,
|
||||||
|
"wx_raw_timestamp": null
|
||||||
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
# Contributing
|
# Contributing
|
||||||
|
160
api_app.py
160
api_app.py
@ -1,48 +1,26 @@
|
|||||||
from flask import Flask
|
from flask import Flask, request, render_template
|
||||||
from flask_restful import Resource, Api, reqparse
|
from flask_restful import Resource, Api, reqparse, url_for
|
||||||
from datetime import date, timedelta
|
from datetime import date, timedelta
|
||||||
import configparser
|
import configparser
|
||||||
import csv
|
import csv
|
||||||
|
import datetime
|
||||||
|
import timeago
|
||||||
import ast
|
import ast
|
||||||
import glob
|
import glob
|
||||||
import json
|
import json, operator
|
||||||
|
import requests
|
||||||
import sqlite3
|
import sqlite3
|
||||||
api_app = Flask(__name__)
|
api_app = Flask(__name__)
|
||||||
api = Api(api_app)
|
api = Api(api_app)
|
||||||
|
|
||||||
|
# TODO this is duplicated from kiss_and_db.py, can I avoid that?
|
||||||
|
import constants
|
||||||
|
|
||||||
def read_config():
|
def read_config():
|
||||||
config = configparser.ConfigParser()
|
config = configparser.ConfigParser()
|
||||||
config.read('config.ini')
|
config.read('config.ini')
|
||||||
return config
|
return config
|
||||||
|
|
||||||
def read_logs(log_folder):
|
|
||||||
# Read some log files
|
|
||||||
# UTC time, so let's look at tomorrow, today, and yesterday.
|
|
||||||
today = date.today()
|
|
||||||
yesterday = today - timedelta(days = 1)
|
|
||||||
tomorrow = today + timedelta(days = 1)
|
|
||||||
file_list = glob.glob(log_folder+str(yesterday)+"*") + \
|
|
||||||
glob.glob(log_folder+str(today)+"*") + \
|
|
||||||
glob.glob(log_folder+str(tomorrow)+"*")
|
|
||||||
|
|
||||||
# https://stackoverflow.com/a/66071962
|
|
||||||
json_array = []
|
|
||||||
for file in file_list:
|
|
||||||
with open(file, encoding='utf-8') as csvf:
|
|
||||||
csvReader = csv.DictReader(csvf)
|
|
||||||
for row in csvReader:
|
|
||||||
#add this python dict to json array
|
|
||||||
json_array.append(row)
|
|
||||||
|
|
||||||
# Add the call and location of this station to the packet info
|
|
||||||
config = read_config()
|
|
||||||
for item in json_array:
|
|
||||||
item['station_name'] = config['Settings']['station_call']
|
|
||||||
item['station_lat'] = config['Settings']['station_lat']
|
|
||||||
item['station_lon'] = config['Settings']['station_lon']
|
|
||||||
|
|
||||||
return(json_array)
|
|
||||||
|
|
||||||
def dict_factory(cursor, row):
|
def dict_factory(cursor, row):
|
||||||
d = {}
|
d = {}
|
||||||
for idx, col in enumerate(cursor.description):
|
for idx, col in enumerate(cursor.description):
|
||||||
@ -51,7 +29,6 @@ def dict_factory(cursor, row):
|
|||||||
|
|
||||||
def get_db_connection():
|
def get_db_connection():
|
||||||
conn = sqlite3.connect('database.db')
|
conn = sqlite3.connect('database.db')
|
||||||
#conn.row_factory = sqlite3.Row
|
|
||||||
conn.row_factory = dict_factory
|
conn.row_factory = dict_factory
|
||||||
return conn
|
return conn
|
||||||
|
|
||||||
@ -66,24 +43,127 @@ def select_all_frames(conn):
|
|||||||
rows = cur.fetchall()
|
rows = cur.fetchall()
|
||||||
return rows
|
return rows
|
||||||
|
|
||||||
|
def select_all_stations(conn):
|
||||||
|
"""
|
||||||
|
Query all rows in the stations table
|
||||||
|
:param conn: the Connection object
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
cur = conn.cursor()
|
||||||
|
cur.execute("SELECT * FROM stations ORDER BY last_heard_unix DESC")
|
||||||
|
rows = cur.fetchall()
|
||||||
|
return rows
|
||||||
|
|
||||||
|
def unique_stations(conn):
|
||||||
|
"""
|
||||||
|
Query all rows in the frames table
|
||||||
|
:param conn: the Connection object
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
cur = conn.cursor()
|
||||||
|
cur.execute('SELECT *, MAX(id), COUNT(id) FROM frames GROUP BY "from" ORDER BY MAX(id) DESC')
|
||||||
|
rows = cur.fetchall()
|
||||||
|
return rows
|
||||||
|
|
||||||
|
def select_frames(conn, n, url_params):
|
||||||
|
|
||||||
|
# Should pass this a dict of fields and values (request.args)
|
||||||
|
# TODO clean data before sending to DB
|
||||||
|
# Filter out any keys that don't match db fields
|
||||||
|
# From https://stackoverflow.com/a/20256491
|
||||||
|
dictfilt = lambda x, y: dict([ (i,x[i]) for i in x if i in set(y) ])
|
||||||
|
field_where = dictfilt(url_params, constants.db_frames_fields)
|
||||||
|
# Then loop through fields to create query parts
|
||||||
|
# From https://stackoverflow.com/a/73512269/2152245
|
||||||
|
field_where_str = ' AND '.join([f'"{k}" LIKE \'{v}\'' for k,v in field_where.items()])
|
||||||
|
|
||||||
|
cur = conn.cursor()
|
||||||
|
# Workaround to deal with missing value in WHERE
|
||||||
|
field_where_query = "" if field_where_str == "" else "WHERE "+field_where_str
|
||||||
|
sql = 'SELECT * FROM frames {field_where_query} ORDER BY created DESC LIMIT {n}'.format(field_where_query=field_where_query, n=n)
|
||||||
|
print(sql)
|
||||||
|
cur.execute(sql)
|
||||||
|
rows = cur.fetchall()
|
||||||
|
return rows
|
||||||
|
|
||||||
|
@api_app.route('/')
|
||||||
|
def index():
|
||||||
|
|
||||||
|
# Get list of recent packets using API
|
||||||
|
# TODO use relative path
|
||||||
|
#frames = json.loads(requests.get(url_for("packets", _external=True)).text)['data']
|
||||||
|
#frames = json.loads(requests.get("https://digi.w1cdn.net/aprs_api/packets").text)['data']
|
||||||
|
frames = json.loads(requests.get(config['Settings']['base_url']+"/packets").text)['data']
|
||||||
|
for frame in frames:
|
||||||
|
if frame['created'] != None:
|
||||||
|
frame['time_ago'] = timeago.format(frame['created_unix'], datetime.datetime.now())
|
||||||
|
|
||||||
|
|
||||||
|
# Play with function to create station list
|
||||||
|
#stations = select_all_stations(get_db_connection())
|
||||||
|
#print(url_for("static", filename="test.txt", _external=True))
|
||||||
|
# this should work: stations = json.loads(requests.get(url_for("stations", _external=True)).text)['data']
|
||||||
|
#stations = json.loads(requests.get(url_for("stations", _external=True)).text)['data']
|
||||||
|
#stations = json.loads(requests.get("https://digi.w1cdn.net/aprs_api/stations").text)['data']
|
||||||
|
stations = json.loads(requests.get(config['Settings']['base_url']+"/stations").text)['data']
|
||||||
|
# Convert unix time to datetime on the fly because I'm lazy right now
|
||||||
|
for station in stations:
|
||||||
|
if station['last_heard_unix'] != None:
|
||||||
|
station['last_heard'] = datetime.datetime.utcfromtimestamp(station['last_heard_unix'])
|
||||||
|
station['time_ago'] = timeago.format(station['last_heard_unix'], datetime.datetime.now())
|
||||||
|
|
||||||
|
|
||||||
|
return render_template('index.html',
|
||||||
|
station_call = config['Settings']['station_call'],
|
||||||
|
station_lat = config['Settings']['station_lat'],
|
||||||
|
station_lon = config['Settings']['station_lon'],
|
||||||
|
frames = frames,
|
||||||
|
stations = stations)
|
||||||
|
|
||||||
class Packets(Resource):
|
class Packets(Resource):
|
||||||
def get(self):
|
def get(self):
|
||||||
#data = read_logs(log_folder)
|
# Handle arguments that may or may not exist
|
||||||
|
try:
|
||||||
|
n = int(request.args.get('n'))
|
||||||
|
except:
|
||||||
|
n = 10
|
||||||
|
|
||||||
conn = get_db_connection()
|
conn = get_db_connection()
|
||||||
data = select_all_frames(conn)
|
# Limit to number of records requested
|
||||||
return {'data': data}, 200 # return data and 200 OK code
|
data = select_frames(conn, n = n, url_params = request.args.to_dict())
|
||||||
|
# Sort by created date, descending (https://stackoverflow.com/a/45266808)
|
||||||
|
#data.sort(key=operator.itemgetter('created'), reverse=True)
|
||||||
|
return {'data':data}, 200 # return data and 200 OK code
|
||||||
|
|
||||||
|
class Stations(Resource):
|
||||||
|
def get(self):
|
||||||
|
# Handle arguments that may or may not exist
|
||||||
|
try:
|
||||||
|
n = int(request.args.get('n'))
|
||||||
|
except:
|
||||||
|
n = 10
|
||||||
|
|
||||||
|
conn = get_db_connection()
|
||||||
|
# Limit to number of records requested
|
||||||
|
data = select_all_stations(conn)
|
||||||
|
# Sort by created date, descending (https://stackoverflow.com/a/45266808)
|
||||||
|
#data.sort(key=operator.itemgetter('created'), reverse=True)
|
||||||
|
return {'data':data}, 200 # return data and 200 OK code
|
||||||
|
|
||||||
# Read config
|
# Read config
|
||||||
config = read_config()
|
config = read_config()
|
||||||
log_folder = config['Settings']['log_folder']
|
|
||||||
# Load logs first (just to check for errors before page loads)
|
|
||||||
#data = read_logs(log_folder)
|
|
||||||
|
|
||||||
# Start subprocess to watch KISS connection
|
# Start subprocess to watch KISS connection
|
||||||
import subprocess
|
import subprocess
|
||||||
subprocess.Popen(["python3","kiss_and_db.py"])
|
#proc = subprocess.Popen(["python3","kiss_and_db.py"])
|
||||||
|
# Combine under one process https://stackoverflow.com/a/13143013/2152245
|
||||||
|
proc = subprocess.Popen("exec " + "python3 kiss_and_db.py", stdout=subprocess.PIPE, shell=True)
|
||||||
|
print("kiss_and_db.py as subprocess pid "+str(proc.pid))
|
||||||
|
|
||||||
api.add_resource(Packets, '/packets') # and '/locations' is our entry point for Locations
|
# The packets endpoint
|
||||||
|
api.add_resource(Packets, '/packets')
|
||||||
|
# The stations endpoint
|
||||||
|
api.add_resource(Stations, '/stations')
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
api_app.run(debug=True, host='0.0.0.0', port=5001) # run our Flask app
|
api_app.run(debug=True, host='0.0.0.0', port=5001) # run our Flask app
|
||||||
|
@ -1,13 +1,12 @@
|
|||||||
[Settings]
|
[Settings]
|
||||||
# Path to direwolf log folder, include trailing slash
|
|
||||||
log_folder = logs/
|
|
||||||
#log_folder = /home/pi/logs/direwolf/
|
|
||||||
|
|
||||||
# Name and location of this station, for inclusion in the API
|
# Name and location of this station, for inclusion in the API
|
||||||
station_call = W1CDN-1
|
station_call = W1CDN-1
|
||||||
station_lat = 47.941500
|
station_lat = 47.941500
|
||||||
station_lon = -97.027000
|
station_lon = -97.027000
|
||||||
|
|
||||||
|
# Base URL for application (no trailing slash)
|
||||||
|
base_url = https://digi.w1cdn.net/aprs_api
|
||||||
|
|
||||||
# How long to keep packets (frames) e.g., "2 days", "5 minutes"
|
# How long to keep packets (frames) e.g., "2 days", "5 minutes"
|
||||||
keep_time = "2 days"
|
keep_time = "2 days"
|
||||||
|
|
||||||
@ -17,3 +16,4 @@ kiss_port = 8001
|
|||||||
|
|
||||||
# Development settings (not operational yet)
|
# Development settings (not operational yet)
|
||||||
mycall = W1CDN-15
|
mycall = W1CDN-15
|
||||||
|
log_path = aprs_api.log
|
54
constants.py
Normal file
54
constants.py
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
# Tuple of frames table fields
|
||||||
|
db_frames_fields = ("id",
|
||||||
|
"addresse",
|
||||||
|
"alive",
|
||||||
|
"altitude",
|
||||||
|
"body",
|
||||||
|
"comment",
|
||||||
|
"course",
|
||||||
|
"created",
|
||||||
|
"created_unix",
|
||||||
|
"format",
|
||||||
|
"frame",
|
||||||
|
"from",
|
||||||
|
"gpsfixstatus",
|
||||||
|
"header_raw",
|
||||||
|
"latitude",
|
||||||
|
"longitude",
|
||||||
|
"mbits",
|
||||||
|
"messagecapable",
|
||||||
|
"message_text",
|
||||||
|
"msgNo",
|
||||||
|
"mtype",
|
||||||
|
"object_format",
|
||||||
|
"object_name",
|
||||||
|
"party",
|
||||||
|
"path",
|
||||||
|
"phg",
|
||||||
|
"phg_dir",
|
||||||
|
"phg_gain",
|
||||||
|
"phg_height",
|
||||||
|
"phg_power",
|
||||||
|
"phg_range",
|
||||||
|
"posambiguity",
|
||||||
|
"raw",
|
||||||
|
"raw_timestamp",
|
||||||
|
"rng",
|
||||||
|
"speed",
|
||||||
|
"station_call",
|
||||||
|
"station_lat",
|
||||||
|
"station_lon",
|
||||||
|
"status",
|
||||||
|
"subpacket",
|
||||||
|
"symbol",
|
||||||
|
"symbol_table",
|
||||||
|
"telemetry",
|
||||||
|
"timestamp",
|
||||||
|
"to",
|
||||||
|
"tEQNS",
|
||||||
|
"tPARM",
|
||||||
|
"tUNIT",
|
||||||
|
"type",
|
||||||
|
"via",
|
||||||
|
"weather",
|
||||||
|
"wx_raw_timestamp")
|
201
kiss_and_db.py
201
kiss_and_db.py
@ -5,53 +5,10 @@ import aprs
|
|||||||
import json
|
import json
|
||||||
import aprslib
|
import aprslib
|
||||||
import configparser
|
import configparser
|
||||||
|
import time
|
||||||
db_fields = ("id",
|
import logging
|
||||||
"addresse",
|
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||||
"alive",
|
import time
|
||||||
"altitude",
|
|
||||||
"comment",
|
|
||||||
"course",
|
|
||||||
"created",
|
|
||||||
"format",
|
|
||||||
"frame",
|
|
||||||
"from",
|
|
||||||
"gpsfixstatus",
|
|
||||||
"latitude",
|
|
||||||
"longitude",
|
|
||||||
"mbits",
|
|
||||||
"messagecapable",
|
|
||||||
"message_text",
|
|
||||||
"mtype",
|
|
||||||
"object_format",
|
|
||||||
"object_name",
|
|
||||||
"path",
|
|
||||||
"phg",
|
|
||||||
"phg_dir",
|
|
||||||
"phg_gain",
|
|
||||||
"phg_height",
|
|
||||||
"phg_power",
|
|
||||||
"phg_range",
|
|
||||||
"posambiguity",
|
|
||||||
"raw",
|
|
||||||
"raw_timestamp",
|
|
||||||
"speed",
|
|
||||||
"station_call",
|
|
||||||
"station_lat",
|
|
||||||
"station_lon",
|
|
||||||
"status",
|
|
||||||
"subpacket",
|
|
||||||
"symbol",
|
|
||||||
"symbol_table",
|
|
||||||
"telemetry",
|
|
||||||
"timestamp",
|
|
||||||
"to",
|
|
||||||
"tEQNS",
|
|
||||||
"tPARM",
|
|
||||||
"tUNIT",
|
|
||||||
"via",
|
|
||||||
"weather",
|
|
||||||
"wx_raw_timestamp")
|
|
||||||
|
|
||||||
def read_config():
|
def read_config():
|
||||||
config = configparser.ConfigParser()
|
config = configparser.ConfigParser()
|
||||||
@ -63,6 +20,15 @@ def get_db_connection():
|
|||||||
conn.row_factory = sqlite3.Row
|
conn.row_factory = sqlite3.Row
|
||||||
return conn
|
return conn
|
||||||
|
|
||||||
|
def refresh_kiss_connection(kiss_conn):
|
||||||
|
logging.debug("Restarting KISS connection on schedule")
|
||||||
|
logging.debug("Stopping current connection")
|
||||||
|
kiss_conn.stop()
|
||||||
|
#logging.debug("Waiting 5 seconds")
|
||||||
|
#time.sleep(5)
|
||||||
|
logging.debug("Starting new connection")
|
||||||
|
kiss_conn.start()
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
# Add the call and location of this station to the packet info
|
# Add the call and location of this station to the packet info
|
||||||
@ -71,51 +37,126 @@ def main():
|
|||||||
# KISS_HOST = os.environ.get("KISS_HOST", "192.168.0.30")
|
# KISS_HOST = os.environ.get("KISS_HOST", "192.168.0.30")
|
||||||
# KISS_PORT = os.environ.get("KISS_PORT", "8001")
|
# KISS_PORT = os.environ.get("KISS_PORT", "8001")
|
||||||
|
|
||||||
|
logging.basicConfig(filename=config['Settings']['log_path'], level=logging.DEBUG, \
|
||||||
|
format='%(asctime)s - %(message)s')
|
||||||
|
logging.debug('============= kiss_and_db.py running =============')
|
||||||
|
|
||||||
ki = aprs.TCPKISS(host=config['Settings']['kiss_host'], port=int(config['Settings']['kiss_port']))
|
ki = aprs.TCPKISS(host=config['Settings']['kiss_host'], port=int(config['Settings']['kiss_port']))
|
||||||
ki.start()
|
ki.start()
|
||||||
|
|
||||||
|
#scheduler = AsyncIOScheduler()
|
||||||
|
#scheduler.add_job(refresh_kiss_connection, 'interval', hours = 1, args = [ki])
|
||||||
|
#scheduler.start()
|
||||||
|
|
||||||
# Make a simple frame and send it
|
# Make a simple frame and send it
|
||||||
frame = aprs.APRSFrame.ui(
|
# frame = aprs.APRSFrame.ui(
|
||||||
destination="APZ001",
|
# destination="APZ001",
|
||||||
source=config['Settings']['mycall'],
|
# source=config['Settings']['mycall'],
|
||||||
path=["WIDE1-1"],
|
# path=["WIDE1-1"],
|
||||||
info=b">Hello World!",
|
# info=b">Hello World!",
|
||||||
)
|
# )
|
||||||
ki.write(frame)
|
#ki.write(frame)
|
||||||
|
|
||||||
# Watch for new packets to come in
|
# Watch for new packets to come in
|
||||||
while True:
|
while True:
|
||||||
conn = get_db_connection()
|
conn = get_db_connection()
|
||||||
for frame in ki.read(min_frames=1):
|
for frame in ki.read(min_frames=1):
|
||||||
a = aprslib.parse(str(frame))
|
logging.debug("New packet, trying to parse")
|
||||||
a['station_call'] = config['Settings']['station_call']
|
logging.debug(str(frame))
|
||||||
a['station_lat'] = config['Settings']['station_lat']
|
|
||||||
a['station_lon'] = config['Settings']['station_lon']
|
|
||||||
print(a)
|
|
||||||
# Make this a string and deal with it later (probably a mistake)
|
|
||||||
a['path'] = str(a['path'])
|
|
||||||
# Store true/false as 1/0
|
|
||||||
if 'alive' in a:
|
|
||||||
if a['alive'] == True:
|
|
||||||
a['alive'] = 1
|
|
||||||
else:
|
|
||||||
a['alive'] = 0
|
|
||||||
# Build an INSERT statement based on the fields we have from the frame
|
|
||||||
attrib_names = ', '.join('"%s"' % w for w in a.keys())
|
|
||||||
attrib_values = ", ".join("?" * len(a.keys()))
|
|
||||||
sql = "INSERT INTO frames ("+attrib_names+") VALUES ("+attrib_values+")"
|
|
||||||
try:
|
try:
|
||||||
# Insert data
|
try:
|
||||||
conn.execute(sql, list(a.values()))
|
a = aprslib.parse(str(frame))
|
||||||
conn.commit()
|
except Exception as error:
|
||||||
|
logging.error("Error with aprslib:", exc_info = error)
|
||||||
|
else:
|
||||||
|
a['station_call'] = config['Settings']['station_call']
|
||||||
|
a['station_lat'] = config['Settings']['station_lat']
|
||||||
|
a['station_lon'] = config['Settings']['station_lon']
|
||||||
|
a['created_unix'] = int(time.time())
|
||||||
|
a['party'] = 1
|
||||||
|
|
||||||
|
# Make this a string and deal with it later (probably a mistake)
|
||||||
|
a['path'] = str(a['path'])
|
||||||
|
# Process 3rd-party data
|
||||||
|
if 'subpacket' in a:
|
||||||
|
b = a['subpacket'] # make a copy
|
||||||
|
a['subpacket'] = str(a['subpacket']) # turn the original to a string
|
||||||
|
b['party'] = 3
|
||||||
|
b['path'] = str(b['path'])
|
||||||
|
b['header_raw'] = a['raw'].split("}", 1)[0] # just the first part
|
||||||
|
b['station_call'] = config['Settings']['station_call']
|
||||||
|
b['station_lat'] = config['Settings']['station_lat']
|
||||||
|
b['station_lon'] = config['Settings']['station_lon']
|
||||||
|
b['created_unix'] = int(time.time())
|
||||||
|
#logging.debug(a['path'])
|
||||||
|
# Store true/false as 1/0
|
||||||
|
if 'alive' in a:
|
||||||
|
if a['alive'] == True:
|
||||||
|
a['alive'] = 1
|
||||||
|
else:
|
||||||
|
a['alive'] = 0
|
||||||
|
# Build an INSERT statement based on the fields we have from the frame
|
||||||
|
attrib_names = ', '.join('"%s"' % w for w in a.keys())
|
||||||
|
attrib_values = ", ".join("?" * len(a.keys()))
|
||||||
|
logging.debug(attrib_names)
|
||||||
|
logging.debug(a.values())
|
||||||
|
if 'subpacket' in a:
|
||||||
|
# 3rd-party
|
||||||
|
b_attrib_names = ', '.join('"%s"' % w for w in b.keys())
|
||||||
|
b_attrib_values = ", ".join("?" * len(b.keys()))
|
||||||
|
logging.debug(b_attrib_names)
|
||||||
|
logging.debug(b.values())
|
||||||
|
|
||||||
|
try:
|
||||||
|
logging.debug("Inserting into database")
|
||||||
|
# Insert data
|
||||||
|
sql = "INSERT INTO frames ("+attrib_names+") VALUES ("+attrib_values+")"
|
||||||
|
conn.execute(sql, list(a.values()))
|
||||||
|
if 'subpacket' in a:
|
||||||
|
b_sql = "INSERT INTO frames ("+b_attrib_names+") VALUES ("+b_attrib_values+")"
|
||||||
|
conn.execute(b_sql, list(b.values()))
|
||||||
|
logging.debug("Frames table updated")
|
||||||
|
# TODO update stations table here
|
||||||
|
# Original intent was to include the id from the frames table,
|
||||||
|
# but that would mean making another query.
|
||||||
|
# It's not immediately needed, so I'm skipping it.
|
||||||
|
# Build query
|
||||||
|
# "from" is wrappedin [] because it is a reserved word and using '' doesn't work.
|
||||||
|
# https://www.sqlite.org/lang_keywords.html
|
||||||
|
#try:
|
||||||
|
station_update = "'"+a['from'] +"', '"+ str(a['created_unix']) +"', '1'"
|
||||||
|
query3 = "INSERT INTO stations ([from], last_heard_unix, count) \
|
||||||
|
VALUES("+station_update+") \
|
||||||
|
ON CONFLICT([from]) \
|
||||||
|
DO UPDATE SET count = count + 1,\
|
||||||
|
last_heard_unix = excluded.last_heard_unix;"
|
||||||
|
# Insert/update data
|
||||||
|
conn.execute(query3)
|
||||||
|
if 'subpacket' in a:
|
||||||
|
b_station_update = "'"+b['from'] +"', '"+ str(b['created_unix']) +"', '1'"
|
||||||
|
b_query3 = "INSERT INTO stations ([from], last_heard_unix, count) \
|
||||||
|
VALUES("+b_station_update+") \
|
||||||
|
ON CONFLICT([from]) \
|
||||||
|
DO UPDATE SET count = count + 1,\
|
||||||
|
last_heard_unix = excluded.last_heard_unix;"
|
||||||
|
# Insert/update data
|
||||||
|
conn.execute(b_query3)
|
||||||
|
logging.debug("Station table updated")
|
||||||
|
conn.commit()
|
||||||
|
#except:
|
||||||
|
# print("Stations table couldn't be updated.")
|
||||||
|
|
||||||
|
# TODO remove packets that are older ('created') than a limit set in config.ini
|
||||||
|
# "5 minutes" also works
|
||||||
|
#conn.execute("DELETE FROM frames WHERE created < DATETIME('now', '"+config['Settings']['keep_time']+"')")
|
||||||
|
#conn.commit()
|
||||||
|
except Exception as error:
|
||||||
|
#print("Error with SQLite!")
|
||||||
|
logging.error("Error with SQLite!", exc_info = error)
|
||||||
|
except Exception as error:
|
||||||
|
#print("Frame could not be parsed.")
|
||||||
|
logging.error("Frame could not be parsed:", exc_info = error)
|
||||||
|
|
||||||
# TODO remove packets that are older ('created') than a limit set in config.ini
|
|
||||||
# "5 minutes" also works
|
|
||||||
conn.execute("DELETE FROM frames WHERE created < DATETIME('now', '"+config['Settings']['keep_time']+"')")
|
|
||||||
conn.commit()
|
|
||||||
except:
|
|
||||||
print("Error with SQLite!")
|
|
||||||
|
|
||||||
conn.close()
|
conn.close()
|
||||||
|
|
||||||
|
@ -1,4 +1,9 @@
|
|||||||
flask
|
flask
|
||||||
flask_restful
|
flask_restful
|
||||||
aprs
|
aprs3
|
||||||
|
kiss3
|
||||||
|
kiss
|
||||||
aprslib
|
aprslib
|
||||||
|
sqlite3
|
||||||
|
json
|
||||||
|
timeago
|
16
schema.sql
16
schema.sql
@ -5,21 +5,26 @@ CREATE TABLE frames (
|
|||||||
addresse TEXT,
|
addresse TEXT,
|
||||||
alive INT,
|
alive INT,
|
||||||
altitude REAL,
|
altitude REAL,
|
||||||
|
body TEXT,
|
||||||
comment TEXT,
|
comment TEXT,
|
||||||
course REAL,
|
course REAL,
|
||||||
created TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
created TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
created_unix INT,
|
||||||
format TEXT,
|
format TEXT,
|
||||||
frame TEXT,
|
frame TEXT,
|
||||||
"from" TEXT,
|
"from" TEXT,
|
||||||
gpsfixstatus TEXT,
|
gpsfixstatus TEXT,
|
||||||
|
header_raw TEXT,
|
||||||
latitude REAL,
|
latitude REAL,
|
||||||
longitude REAL,
|
longitude REAL,
|
||||||
mbits INT,
|
mbits INT,
|
||||||
messagecapable INT,
|
messagecapable INT,
|
||||||
message_text TEXT,
|
message_text TEXT,
|
||||||
|
msgNo INT,
|
||||||
mtype TEXT,
|
mtype TEXT,
|
||||||
object_format TEXT,
|
object_format TEXT,
|
||||||
object_name TEXT,
|
object_name TEXT,
|
||||||
|
party INT,
|
||||||
path TEXT,
|
path TEXT,
|
||||||
phg REAL,
|
phg REAL,
|
||||||
phg_dir TEXT,
|
phg_dir TEXT,
|
||||||
@ -30,6 +35,7 @@ CREATE TABLE frames (
|
|||||||
posambiguity INT,
|
posambiguity INT,
|
||||||
raw TEXT,
|
raw TEXT,
|
||||||
raw_timestamp TEXT,
|
raw_timestamp TEXT,
|
||||||
|
rng REAL,
|
||||||
speed REAL,
|
speed REAL,
|
||||||
station_call TEXT,
|
station_call TEXT,
|
||||||
station_lat REAL,
|
station_lat REAL,
|
||||||
@ -44,7 +50,17 @@ CREATE TABLE frames (
|
|||||||
tEQNS TEXT,
|
tEQNS TEXT,
|
||||||
tPARM TEXT,
|
tPARM TEXT,
|
||||||
tUNIT TEXT,
|
tUNIT TEXT,
|
||||||
|
type TEXT,
|
||||||
via TEXT,
|
via TEXT,
|
||||||
weather TEXT,
|
weather TEXT,
|
||||||
wx_raw_timestamp TIMESTAMP
|
wx_raw_timestamp TIMESTAMP
|
||||||
);
|
);
|
||||||
|
|
||||||
|
CREATE TABLE "stations" (
|
||||||
|
"id" INTEGER NOT NULL UNIQUE,
|
||||||
|
"from" TEXT UNIQUE,
|
||||||
|
"frames_id" INTEGER,
|
||||||
|
"last_heard_unix" INTEGER,
|
||||||
|
"count" INTEGER,
|
||||||
|
PRIMARY KEY("id" AUTOINCREMENT)
|
||||||
|
);
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
# Run `chmod +x start-aprs_api.sh` so this can be run
|
# Run `chmod +x start-aprs_api.sh` so this can be run
|
||||||
|
|
||||||
screen -dmS aprs_api python3 /home/pi/aprs_tools/api_waitress.py
|
screen -dmS aprs_api python3 /home/pi/aprs_tool/api_waitress.py
|
||||||
|
@ -1,109 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
import os
|
|
||||||
import sqlite3
|
|
||||||
import aprs
|
|
||||||
import json
|
|
||||||
import aprslib
|
|
||||||
import configparser
|
|
||||||
|
|
||||||
MYCALL = os.environ.get("MYCALL", "W1CDN")
|
|
||||||
KISS_HOST = os.environ.get("KISS_HOST", "192.168.0.30")
|
|
||||||
KISS_PORT = os.environ.get("KISS_PORT", "8001")
|
|
||||||
|
|
||||||
db_fields = ("id",
|
|
||||||
"addresse",
|
|
||||||
"alive",
|
|
||||||
"altitude",
|
|
||||||
"comment",
|
|
||||||
"course",
|
|
||||||
"created",
|
|
||||||
"format",
|
|
||||||
"frame",
|
|
||||||
"from",
|
|
||||||
"gpsfixstatus",
|
|
||||||
"latitude",
|
|
||||||
"longitude",
|
|
||||||
"mbits",
|
|
||||||
"messagecapable",
|
|
||||||
"message_text",
|
|
||||||
"mtype",
|
|
||||||
"object_format",
|
|
||||||
"object_name",
|
|
||||||
"path",
|
|
||||||
"posambiguity",
|
|
||||||
"raw",
|
|
||||||
"raw_timestamp",
|
|
||||||
"speed",
|
|
||||||
"station_call",
|
|
||||||
"station_lat",
|
|
||||||
"station_lon",
|
|
||||||
"status",
|
|
||||||
"symbol",
|
|
||||||
"symbol_table",
|
|
||||||
"telemetry",
|
|
||||||
"timestamp",
|
|
||||||
"to",
|
|
||||||
"tEQNS",
|
|
||||||
"tPARM",
|
|
||||||
"tUNIT",
|
|
||||||
"via",
|
|
||||||
"weather",
|
|
||||||
"wx_raw_timestamp")
|
|
||||||
|
|
||||||
def read_config():
|
|
||||||
config = configparser.ConfigParser()
|
|
||||||
config.read('config.ini')
|
|
||||||
return config
|
|
||||||
|
|
||||||
def get_db_connection():
|
|
||||||
conn = sqlite3.connect('database.db')
|
|
||||||
conn.row_factory = sqlite3.Row
|
|
||||||
return conn
|
|
||||||
|
|
||||||
def main():
|
|
||||||
|
|
||||||
# Add the call and location of this station to the packet info
|
|
||||||
config = read_config()
|
|
||||||
|
|
||||||
ki = aprs.TCPKISS(host=KISS_HOST, port=int(KISS_PORT))
|
|
||||||
ki.start()
|
|
||||||
|
|
||||||
# Make a simple frame and send it
|
|
||||||
frame = aprs.APRSFrame.ui(
|
|
||||||
destination="APZ001",
|
|
||||||
source=MYCALL,
|
|
||||||
path=["WIDE1-1"],
|
|
||||||
info=b">Hello World!",
|
|
||||||
)
|
|
||||||
#ki.write(frame)
|
|
||||||
|
|
||||||
# Watch for new packets to come in
|
|
||||||
while True:
|
|
||||||
conn = get_db_connection()
|
|
||||||
for frame in ki.read(min_frames=1):
|
|
||||||
a = aprslib.parse(str(frame))
|
|
||||||
a['station_call'] = config['Settings']['station_call']
|
|
||||||
a['station_lat'] = config['Settings']['station_lat']
|
|
||||||
a['station_lon'] = config['Settings']['station_lon']
|
|
||||||
print(a)
|
|
||||||
# Make this a string and deal with it later (probably a mistake)
|
|
||||||
a['path'] = str(a['path'])
|
|
||||||
# Build an INSERT statement based on the fields we have from the frame
|
|
||||||
attrib_names = ', '.join(f'"{w}"' for w in a.keys())
|
|
||||||
attrib_values = ", ".join("?" * len(a.keys()))
|
|
||||||
sql = f"INSERT INTO frames ({attrib_names}) VALUES ({attrib_values})"
|
|
||||||
# Insert data
|
|
||||||
conn.execute(sql, list(a.values()))
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
# TODO remove packets that are older ('created') than a limit set in config.ini
|
|
||||||
# "5 minutes" also works
|
|
||||||
conn.execute("DELETE FROM frames WHERE created < DATETIME('now', '"+config['Settings']['keep_time']+"')")
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
64
templates/index.html
Normal file
64
templates/index.html
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<title>{{station_call}} Status</title>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
table, th, td {
|
||||||
|
border: 1px solid black;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<h1>{{station_call}} Status</h1>
|
||||||
|
Station location: {{station_lat}}, {{station_lon}}
|
||||||
|
|
||||||
|
<h2> About </h2>
|
||||||
|
This is a work in progress. See <a href="https://amiok.net/gitea/W1CDN/aprs_tool">https://amiok.net/gitea/W1CDN/aprs_tool</a> for usage.
|
||||||
|
|
||||||
|
<h2> Recent RF Packets </h2>
|
||||||
|
<table>
|
||||||
|
<tr>
|
||||||
|
<th> from </th>
|
||||||
|
<th> object_name </th>
|
||||||
|
<th> raw </th>
|
||||||
|
<th> created (utc) </th>
|
||||||
|
<th> relative </th>
|
||||||
|
<th> more </th>
|
||||||
|
</tr>
|
||||||
|
{% for i in frames %}
|
||||||
|
<tr>
|
||||||
|
<td> <a href="https://digi.w1cdn.net/aprs_api/packets?from={{ i['from'] }}">{{ i['from'] }}</a> </td>
|
||||||
|
<td> {{ i['object_name'] }} </td>
|
||||||
|
<td> {{ i['raw'] }} </td>
|
||||||
|
<td> {{ i['created'] }} </td>
|
||||||
|
<td> {{ i['time_ago'] }} </td>
|
||||||
|
<td> <a href="https://digi.w1cdn.net/aprs_api/packets?id={{ i['id'] }}">query</a>,
|
||||||
|
<a href="https://aprs.fi/#!mt=roadmap&z=12&call=a%2F{{ i['from'] }}">aprs.fi</a></td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
</table>
|
||||||
|
|
||||||
|
<h2> Recent Stations </h2>
|
||||||
|
<table>
|
||||||
|
<tr>
|
||||||
|
<th> from </th>
|
||||||
|
<th> last heard (utc) </th>
|
||||||
|
<th> relative </th>
|
||||||
|
<th> count </th>
|
||||||
|
<th> more </th>
|
||||||
|
</tr>
|
||||||
|
{% for i in stations %}
|
||||||
|
<tr>
|
||||||
|
<td> <a href="https://digi.w1cdn.net/aprs_api/packets?from={{ i['from'] }}">{{ i['from'] }}</a> </td>
|
||||||
|
<td> {{ i['last_heard'] }} </td>
|
||||||
|
<td> {{ i['time_ago'] }} </td>
|
||||||
|
<td> {{ i['count']}} </td>
|
||||||
|
<td> <a href="https://aprs.fi/#!mt=roadmap&z=12&call=a%2F{{ i['from'] }}">aprs.fi</a></td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
</table>
|
||||||
|
|
||||||
|
</body>
|
||||||
|
</html>
|
11
test_async.py
Normal file
11
test_async.py
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
import asyncio
|
||||||
|
import aprs
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
transport, protocol = await aprs.create_tcp_connection("192.168.0.30", 8001)
|
||||||
|
|
||||||
|
async for frame in protocol.read():
|
||||||
|
print(frame)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
asyncio.run(main())
|
36
test_db.py
Normal file
36
test_db.py
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
|
||||||
|
# Learn how to update database
|
||||||
|
|
||||||
|
import sqlite3
|
||||||
|
|
||||||
|
def get_db_connection():
|
||||||
|
conn = sqlite3.connect('database.db')
|
||||||
|
conn.row_factory = sqlite3.Row
|
||||||
|
return conn
|
||||||
|
|
||||||
|
conn = get_db_connection()
|
||||||
|
|
||||||
|
# Grab a random row from frames table and pretend it is new
|
||||||
|
cur = conn.cursor()
|
||||||
|
cur.execute("SELECT [from], id, created_unix FROM frames ORDER BY RANDOM() LIMIT 1;")
|
||||||
|
rows = cur.fetchall()
|
||||||
|
results = dict(rows[0])
|
||||||
|
values = ', '.join('"%s"' % w for w in results.values())
|
||||||
|
|
||||||
|
|
||||||
|
# Build query
|
||||||
|
# "from" is wrappedin [] because it is a reserved word and using '' doesn't work.
|
||||||
|
query3 = "INSERT INTO stations ([from], frames_id, last_heard_unix, count) \
|
||||||
|
VALUES("+values+", 1) \
|
||||||
|
ON CONFLICT([from]) \
|
||||||
|
DO UPDATE SET count = count + 1;"
|
||||||
|
|
||||||
|
# example https://stackoverflow.com/a/50718957/2152245
|
||||||
|
# query2 = "INSERT INTO stations ([from], frames_id, last_heard_unix, count) \
|
||||||
|
# VALUES('KC9TZN-8', 4068, 1687623864, 1) \
|
||||||
|
# ON CONFLICT([from]) \
|
||||||
|
# DO UPDATE SET count = count + 1;"
|
||||||
|
|
||||||
|
conn.execute(query3)
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
Reference in New Issue
Block a user