Compare commits
No commits in common. "main" and "add-kiss" have entirely different histories.
5
.gitignore
vendored
5
.gitignore
vendored
|
@ -1,6 +1 @@
|
||||||
/logs/*
|
/logs/*
|
||||||
config.ini
|
|
||||||
*.db
|
|
||||||
*.log
|
|
||||||
/temp/*
|
|
||||||
*.pyc
|
|
||||||
|
|
80
README.md
80
README.md
|
@ -7,72 +7,44 @@ direwolf logs into a REST API in JSON format.
|
||||||
## Setup
|
## Setup
|
||||||
1. Run direwolf with logging to CSV on by using `-l`. (`-L` not yet implemented).
|
1. Run direwolf with logging to CSV on by using `-l`. (`-L` not yet implemented).
|
||||||
1. Install requirements using `pip install -r requirements.txt`.
|
1. Install requirements using `pip install -r requirements.txt`.
|
||||||
1. Set up database file with `python init_db.py`.
|
|
||||||
2. Run `app.py` with either a Python call or a real WSGI server.
|
2. Run `app.py` with either a Python call or a real WSGI server.
|
||||||
You can use screen to detach the session.
|
You can use screen to detach the session.
|
||||||
- Default URL is http://127.0.0.1:5001
|
- Default URL is http://127.0.0.1:5000
|
||||||
- Example `waitress` and `screen` scripts are included, see
|
- Example `waitress` and `screen` scripts are included, see
|
||||||
- `api_waitress.py` and
|
- `api_waitress.py` and
|
||||||
- `start-aprs_api.sh`
|
- `start-aprs_api.sh`
|
||||||
3. Access the API from whatever other system you want.
|
3. Access the API from whatever other system you want.
|
||||||
|
|
||||||
## Endpoints:
|
## Endpoints:
|
||||||
-`/packets` - gives the most recent packets, sorted descending by time received.
|
-`/packets` - gives the most recent packets, with the fields from the Dire Wolf
|
||||||
- argument `n` will return a specific number of packets, default 10. E.g.,
|
User Guide.
|
||||||
`https://digi.w1cdn.net/aprs_api/packets?n=1` returns one packet.
|
|
||||||
- argument `from` will return packets from the named station-SSID (no wildcards).
|
|
||||||
E.g., `https://digi.w1cdn.net/aprs_api/packets?n=1&from=W1CDN-1` returns
|
|
||||||
one packet from W1CDN-1.
|
|
||||||
|
|
||||||
Example of an object packet sent by W1CDN-1 and digipeated by K0UND-2:
|
Example of an object packet sent by W1CDN-1 and digipeated by K0UND-2:
|
||||||
```
|
```
|
||||||
{
|
{
|
||||||
"id": 1,
|
"chan": 0,
|
||||||
"addresse": null,
|
"utime": 1680566406,
|
||||||
"alive": null,
|
"isotime": "2023-04-04T00:00:06Z",
|
||||||
"altitude": null,
|
"source": "W1CDN-1",
|
||||||
"comment": "Leave a message to say hi!",
|
"heard": "K0UND-2",
|
||||||
"course": null,
|
"level": "113(71/42)",
|
||||||
"created": "2023-04-16 15:04:03",
|
"error": 0,
|
||||||
"format": "uncompressed",
|
"dti": ";",
|
||||||
"frame": null,
|
"name": "147.390GF",
|
||||||
"from": "W1CDN-2",
|
"symbol": "/r",
|
||||||
"gpsfixstatus": null,
|
"latitude": 47.924167,
|
||||||
"latitude": 47.94133333333333,
|
"longitude": -97.009667,
|
||||||
"longitude": -97.02683333333333,
|
"speed": 0.0,
|
||||||
"mbits": null,
|
"course": 0.0,
|
||||||
"messagecapable": 1,
|
"altitude": 0.0,
|
||||||
"message_text": null,
|
"frequency": 147.39,
|
||||||
"mtype": null,
|
"offset": 600.0,
|
||||||
"object_format": null,
|
"tone": 0.0,
|
||||||
"object_name": null,
|
"system": "DireWolf, WB2OSZ",
|
||||||
"path": "['K0UND-2', 'WIDE2-2']",
|
"status": 0,
|
||||||
"phg": null,
|
"telemetry": 0.0,
|
||||||
"phg_dir": null,
|
"comment": " https://www.wa0jxt.org/"
|
||||||
"phg_gain": null,
|
},
|
||||||
"phg_height": null,
|
|
||||||
"phg_power": null,
|
|
||||||
"phg_range": null,
|
|
||||||
"posambiguity": 0,
|
|
||||||
"raw": "W1CDN-2>APQTH1,K0UND-2,WIDE2-2:@150321h4756.48N/09701.61W-Leave a message to say hi!",
|
|
||||||
"raw_timestamp": "150321h",
|
|
||||||
"speed": null,
|
|
||||||
"station_call": "W1CDN-1",
|
|
||||||
"station_lat": 47.9415,
|
|
||||||
"station_lon": -97.027,
|
|
||||||
"status": null,
|
|
||||||
"symbol": "-",
|
|
||||||
"symbol_table": "/",
|
|
||||||
"telemetry": null,
|
|
||||||
"timestamp": 1681657401,
|
|
||||||
"to": "APQTH1",
|
|
||||||
"tEQNS": null,
|
|
||||||
"tPARM": null,
|
|
||||||
"tUNIT": null,
|
|
||||||
"via": "",
|
|
||||||
"weather": null,
|
|
||||||
"wx_raw_timestamp": null
|
|
||||||
}
|
|
||||||
```
|
```
|
||||||
|
|
||||||
# Contributing
|
# Contributing
|
||||||
|
|
246
api_app.py
246
api_app.py
|
@ -1,26 +1,48 @@
|
||||||
from flask import Flask, request, render_template
|
from flask import Flask
|
||||||
from flask_restful import Resource, Api, reqparse, url_for
|
from flask_restful import Resource, Api, reqparse
|
||||||
from datetime import date, timedelta
|
from datetime import date, timedelta
|
||||||
import configparser
|
import configparser
|
||||||
import csv
|
import csv
|
||||||
import datetime
|
|
||||||
import timeago
|
|
||||||
import ast
|
import ast
|
||||||
import glob
|
import glob
|
||||||
import json, operator
|
import json
|
||||||
import requests
|
|
||||||
import sqlite3
|
import sqlite3
|
||||||
api_app = Flask(__name__)
|
api_app = Flask(__name__)
|
||||||
api = Api(api_app)
|
api = Api(api_app)
|
||||||
|
|
||||||
# TODO this is duplicated from kiss_and_db.py, can I avoid that?
|
|
||||||
import constants
|
|
||||||
|
|
||||||
def read_config():
|
def read_config():
|
||||||
config = configparser.ConfigParser()
|
config = configparser.ConfigParser()
|
||||||
config.read('config.ini')
|
config.read('config.ini')
|
||||||
return config
|
return config
|
||||||
|
|
||||||
|
def read_logs(log_folder):
|
||||||
|
# Read some log files
|
||||||
|
# UTC time, so let's look at tomorrow, today, and yesterday.
|
||||||
|
today = date.today()
|
||||||
|
yesterday = today - timedelta(days = 1)
|
||||||
|
tomorrow = today + timedelta(days = 1)
|
||||||
|
file_list = glob.glob(log_folder+str(yesterday)+"*") + \
|
||||||
|
glob.glob(log_folder+str(today)+"*") + \
|
||||||
|
glob.glob(log_folder+str(tomorrow)+"*")
|
||||||
|
|
||||||
|
# https://stackoverflow.com/a/66071962
|
||||||
|
json_array = []
|
||||||
|
for file in file_list:
|
||||||
|
with open(file, encoding='utf-8') as csvf:
|
||||||
|
csvReader = csv.DictReader(csvf)
|
||||||
|
for row in csvReader:
|
||||||
|
#add this python dict to json array
|
||||||
|
json_array.append(row)
|
||||||
|
|
||||||
|
# Add the call and location of this station to the packet info
|
||||||
|
config = read_config()
|
||||||
|
for item in json_array:
|
||||||
|
item['station_name'] = config['Settings']['station_call']
|
||||||
|
item['station_lat'] = config['Settings']['station_lat']
|
||||||
|
item['station_lon'] = config['Settings']['station_lon']
|
||||||
|
|
||||||
|
return(json_array)
|
||||||
|
|
||||||
def dict_factory(cursor, row):
|
def dict_factory(cursor, row):
|
||||||
d = {}
|
d = {}
|
||||||
for idx, col in enumerate(cursor.description):
|
for idx, col in enumerate(cursor.description):
|
||||||
|
@ -29,6 +51,7 @@ def dict_factory(cursor, row):
|
||||||
|
|
||||||
def get_db_connection():
|
def get_db_connection():
|
||||||
conn = sqlite3.connect('database.db')
|
conn = sqlite3.connect('database.db')
|
||||||
|
#conn.row_factory = sqlite3.Row
|
||||||
conn.row_factory = dict_factory
|
conn.row_factory = dict_factory
|
||||||
return conn
|
return conn
|
||||||
|
|
||||||
|
@ -43,213 +66,24 @@ def select_all_frames(conn):
|
||||||
rows = cur.fetchall()
|
rows = cur.fetchall()
|
||||||
return rows
|
return rows
|
||||||
|
|
||||||
def select_all_stations(conn):
|
|
||||||
"""
|
|
||||||
Query all rows in the stations table
|
|
||||||
:param conn: the Connection object
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
cur = conn.cursor()
|
|
||||||
cur.execute("SELECT * FROM stations ORDER BY last_heard_unix DESC")
|
|
||||||
rows = cur.fetchall()
|
|
||||||
return rows
|
|
||||||
|
|
||||||
def unique_stations(conn):
|
|
||||||
"""
|
|
||||||
Query all rows in the frames table
|
|
||||||
:param conn: the Connection object
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
cur = conn.cursor()
|
|
||||||
cur.execute('SELECT *, MAX(id), COUNT(id) FROM frames GROUP BY "from" ORDER BY MAX(id) DESC')
|
|
||||||
rows = cur.fetchall()
|
|
||||||
return rows
|
|
||||||
|
|
||||||
def select_frames(conn, n, url_params):
|
|
||||||
|
|
||||||
# Should pass this a dict of fields and values (request.args)
|
|
||||||
# TODO clean data before sending to DB
|
|
||||||
# Filter out any keys that don't match db fields
|
|
||||||
# From https://stackoverflow.com/a/20256491
|
|
||||||
dictfilt = lambda x, y: dict([ (i,x[i]) for i in x if i in set(y) ])
|
|
||||||
field_where = dictfilt(url_params, constants.db_frames_fields)
|
|
||||||
# Then loop through fields to create query parts
|
|
||||||
# From https://stackoverflow.com/a/73512269/2152245
|
|
||||||
field_where_str = ' AND '.join([f'"{k}" LIKE \'{v}\'' for k,v in field_where.items()])
|
|
||||||
|
|
||||||
cur = conn.cursor()
|
|
||||||
# Workaround to deal with missing value in WHERE
|
|
||||||
field_where_query = "" if field_where_str == "" else "WHERE "+field_where_str
|
|
||||||
sql = 'SELECT * FROM frames {field_where_query} ORDER BY id DESC LIMIT {n}'.format(field_where_query=field_where_query, n=n)
|
|
||||||
print(sql)
|
|
||||||
cur.execute(sql)
|
|
||||||
rows = cur.fetchall()
|
|
||||||
return rows
|
|
||||||
|
|
||||||
def select_stations(conn, n):
|
|
||||||
"""
|
|
||||||
Query rows in the stations table
|
|
||||||
:param conn: the Connection object
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
cur = conn.cursor()
|
|
||||||
sql = 'SELECT * FROM stations ORDER BY last_heard_unix DESC LIMIT {n}'.format(n=n)
|
|
||||||
print(sql)
|
|
||||||
cur.execute(sql)
|
|
||||||
rows = cur.fetchall()
|
|
||||||
return rows
|
|
||||||
|
|
||||||
@api_app.route('/')
|
|
||||||
def index():
|
|
||||||
|
|
||||||
path = config['Settings']['base_url']
|
|
||||||
|
|
||||||
# Get list of recent packets using API
|
|
||||||
# TODO use relative path
|
|
||||||
#frames = json.loads(requests.get(url_for("packets", _external=True)).text)['data']
|
|
||||||
#frames = json.loads(requests.get("https://digi.w1cdn.net/aprs_api/packets").text)['data']
|
|
||||||
frames = json.loads(requests.get(config['Settings']['base_url']+"/packets").text)['data']
|
|
||||||
for frame in frames:
|
|
||||||
if frame['created'] != None:
|
|
||||||
frame['time_ago'] = timeago.format(frame['created_unix'], datetime.datetime.now())
|
|
||||||
|
|
||||||
|
|
||||||
# Play with function to create station list
|
|
||||||
#stations = select_all_stations(get_db_connection())
|
|
||||||
#print(url_for("static", filename="test.txt", _external=True))
|
|
||||||
# this should work: stations = json.loads(requests.get(url_for("stations", _external=True)).text)['data']
|
|
||||||
#stations = json.loads(requests.get(url_for("stations", _external=True)).text)['data']
|
|
||||||
#stations = json.loads(requests.get("https://digi.w1cdn.net/aprs_api/stations").text)['data']
|
|
||||||
stations = json.loads(requests.get(config['Settings']['base_url']+"/stations").text)['data']
|
|
||||||
# Convert unix time to datetime on the fly because I'm lazy right now
|
|
||||||
for station in stations:
|
|
||||||
if station['last_heard_unix'] != None:
|
|
||||||
station['last_heard'] = datetime.datetime.utcfromtimestamp(station['last_heard_unix'])
|
|
||||||
station['time_ago'] = timeago.format(station['last_heard_unix'], datetime.datetime.now())
|
|
||||||
|
|
||||||
|
|
||||||
# Map stuff
|
|
||||||
frames_locs = list(filter(lambda x: x['latitude'] != None, frames))
|
|
||||||
# Make a GeoJSON
|
|
||||||
geojs = json.dumps({
|
|
||||||
"type": "FeatureCollection",
|
|
||||||
"features":[
|
|
||||||
{
|
|
||||||
"type":"Feature",
|
|
||||||
"geometry": {
|
|
||||||
"type":"Point",
|
|
||||||
"coordinates":[frame['longitude'], frame['latitude']],
|
|
||||||
},
|
|
||||||
"properties":frame,
|
|
||||||
|
|
||||||
} for frame in frames_locs
|
|
||||||
]
|
|
||||||
})
|
|
||||||
|
|
||||||
return render_template('index.html',
|
|
||||||
station_call = config['Settings']['station_call'],
|
|
||||||
station_lat = config['Settings']['station_lat'],
|
|
||||||
station_lon = config['Settings']['station_lon'],
|
|
||||||
frames = frames,
|
|
||||||
stations = stations,
|
|
||||||
geojs = geojs,
|
|
||||||
path = path)
|
|
||||||
|
|
||||||
@api_app.route('/map')
|
|
||||||
def map():
|
|
||||||
|
|
||||||
# Get the default list of frames from the API
|
|
||||||
frames = json.loads(requests.get(config['Settings']['base_url']+"/packets").text)['data']
|
|
||||||
|
|
||||||
frames_locs = list(filter(lambda x: x['latitude'] != None, frames))
|
|
||||||
|
|
||||||
# Make a GeoJSON
|
|
||||||
geojs = json.dumps({
|
|
||||||
"type": "FeatureCollection",
|
|
||||||
"features":[
|
|
||||||
{
|
|
||||||
"type":"Feature",
|
|
||||||
"geometry": {
|
|
||||||
"type":"Point",
|
|
||||||
"coordinates":[frame['longitude'], frame['latitude']],
|
|
||||||
},
|
|
||||||
"properties":frame,
|
|
||||||
|
|
||||||
} for frame in frames_locs
|
|
||||||
]
|
|
||||||
})
|
|
||||||
|
|
||||||
# Make markers for all the frames
|
|
||||||
# id_counter = 0
|
|
||||||
# markers = ''
|
|
||||||
# marker_ids = []
|
|
||||||
# for frame in frames:
|
|
||||||
# if frame['latitude'] != None:
|
|
||||||
# # Create unique ID for each marker
|
|
||||||
# idd = 'frame' + str(id_counter)
|
|
||||||
# id_counter += 1
|
|
||||||
|
|
||||||
# # Create each marker
|
|
||||||
# markers += "var {idd} = L.marker([{latitude}, {longitude}]);\
|
|
||||||
# {idd}.addTo(map).bindTooltip('{from_ssid}', permanent=true).openTooltip();".format(idd=idd, latitude=frame['latitude'],\
|
|
||||||
# longitude=frame['longitude'],
|
|
||||||
# from_ssid=frame['from'],
|
|
||||||
# created=frame['created'])
|
|
||||||
# # Try to make a list of markers for Leaflet, but not working
|
|
||||||
# marker_ids.append(idd)
|
|
||||||
|
|
||||||
|
|
||||||
return render_template('map.html',
|
|
||||||
station_lat = config['Settings']['station_lat'],
|
|
||||||
station_lon = config['Settings']['station_lon'],
|
|
||||||
station_call = config['Settings']['station_call'],
|
|
||||||
#markers = markers,
|
|
||||||
geojs = geojs)
|
|
||||||
|
|
||||||
class Packets(Resource):
|
class Packets(Resource):
|
||||||
def get(self):
|
def get(self):
|
||||||
# Handle arguments that may or may not exist
|
#data = read_logs(log_folder)
|
||||||
try:
|
|
||||||
n = int(request.args.get('n'))
|
|
||||||
except:
|
|
||||||
n = 20
|
|
||||||
|
|
||||||
conn = get_db_connection()
|
conn = get_db_connection()
|
||||||
# Limit to number of records requested
|
data = select_all_frames(conn)
|
||||||
data = select_frames(conn, n = n, url_params = request.args.to_dict())
|
return {'data': data}, 200 # return data and 200 OK code
|
||||||
# Sort by created date, descending (https://stackoverflow.com/a/45266808)
|
|
||||||
#data.sort(key=operator.itemgetter('created'), reverse=True)
|
|
||||||
return {'data':data}, 200 # return data and 200 OK code
|
|
||||||
|
|
||||||
class Stations(Resource):
|
|
||||||
def get(self):
|
|
||||||
# Handle arguments that may or may not exist
|
|
||||||
try:
|
|
||||||
n = int(request.args.get('n'))
|
|
||||||
except:
|
|
||||||
n = 20
|
|
||||||
|
|
||||||
conn = get_db_connection()
|
|
||||||
# Limit to number of records requested
|
|
||||||
data = select_stations(conn, n = n)
|
|
||||||
# Sort by created date, descending (https://stackoverflow.com/a/45266808)
|
|
||||||
#data.sort(key=operator.itemgetter('created'), reverse=True)
|
|
||||||
return {'data':data}, 200 # return data and 200 OK code
|
|
||||||
|
|
||||||
# Read config
|
# Read config
|
||||||
config = read_config()
|
config = read_config()
|
||||||
|
log_folder = config['Settings']['log_folder']
|
||||||
|
# Load logs first (just to check for errors before page loads)
|
||||||
|
#data = read_logs(log_folder)
|
||||||
|
|
||||||
# Start subprocess to watch KISS connection
|
# Start subprocess to watch KISS connection
|
||||||
import subprocess
|
import subprocess
|
||||||
#proc = subprocess.Popen(["python3","kiss_and_db.py"])
|
subprocess.Popen(["python3","kiss_and_db.py"])
|
||||||
# Combine under one process https://stackoverflow.com/a/13143013/2152245
|
|
||||||
proc = subprocess.Popen("exec " + "python3 kiss_and_db.py", stdout=subprocess.PIPE, shell=True)
|
|
||||||
print("kiss_and_db.py as subprocess pid "+str(proc.pid))
|
|
||||||
|
|
||||||
# The packets endpoint
|
api.add_resource(Packets, '/packets') # and '/locations' is our entry point for Locations
|
||||||
api.add_resource(Packets, '/packets')
|
|
||||||
# The stations endpoint
|
|
||||||
api.add_resource(Stations, '/stations')
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
api_app.run(debug=True, host='0.0.0.0', port=5001) # run our Flask app
|
api_app.run(debug=True, host='0.0.0.0', port=5001) # run our Flask app
|
||||||
|
|
|
@ -1,7 +0,0 @@
|
||||||
{
|
|
||||||
"folders": [
|
|
||||||
{
|
|
||||||
"path": "."
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
|
@ -1,19 +1,19 @@
|
||||||
[Settings]
|
[Settings]
|
||||||
|
# Path to direwolf log folder, include trailing slash
|
||||||
|
log_folder = logs/
|
||||||
|
#log_folder = /home/pi/logs/direwolf/
|
||||||
|
|
||||||
# Name and location of this station, for inclusion in the API
|
# Name and location of this station, for inclusion in the API
|
||||||
station_call = W1CDN-1
|
station_call = W1CDN-1
|
||||||
station_lat = 47.941500
|
station_lat = 47.941500
|
||||||
station_lon = -97.027000
|
station_lon = -97.027000
|
||||||
|
|
||||||
# Base URL for application (no trailing slash)
|
|
||||||
base_url = https://digi.w1cdn.net/aprs_api
|
|
||||||
|
|
||||||
# How long to keep packets (frames) e.g., "2 days", "5 minutes"
|
# How long to keep packets (frames) e.g., "2 days", "5 minutes"
|
||||||
keep_time = "2 days"
|
keep_time = "2 days"
|
||||||
|
|
||||||
# KISS settings
|
# KISS settings
|
||||||
kiss_host = 192.168.0.100
|
kiss_host = 192.168.0.30
|
||||||
kiss_port = 8001
|
kiss_port = 8001
|
||||||
|
|
||||||
# Development settings (not operational yet)
|
# Development settings (not operational yet)
|
||||||
mycall = W1CDN-15
|
mycall = W1CDN-15
|
||||||
log_path = aprs_api.log
|
|
54
constants.py
54
constants.py
|
@ -1,54 +0,0 @@
|
||||||
# Tuple of frames table fields
|
|
||||||
db_frames_fields = ("id",
|
|
||||||
"addresse",
|
|
||||||
"alive",
|
|
||||||
"altitude",
|
|
||||||
"body",
|
|
||||||
"comment",
|
|
||||||
"course",
|
|
||||||
"created",
|
|
||||||
"created_unix",
|
|
||||||
"format",
|
|
||||||
"frame",
|
|
||||||
"from",
|
|
||||||
"gpsfixstatus",
|
|
||||||
"header_raw",
|
|
||||||
"latitude",
|
|
||||||
"longitude",
|
|
||||||
"mbits",
|
|
||||||
"messagecapable",
|
|
||||||
"message_text",
|
|
||||||
"msgNo",
|
|
||||||
"mtype",
|
|
||||||
"object_format",
|
|
||||||
"object_name",
|
|
||||||
"party",
|
|
||||||
"path",
|
|
||||||
"phg",
|
|
||||||
"phg_dir",
|
|
||||||
"phg_gain",
|
|
||||||
"phg_height",
|
|
||||||
"phg_power",
|
|
||||||
"phg_range",
|
|
||||||
"posambiguity",
|
|
||||||
"raw",
|
|
||||||
"raw_timestamp",
|
|
||||||
"rng",
|
|
||||||
"speed",
|
|
||||||
"station_call",
|
|
||||||
"station_lat",
|
|
||||||
"station_lon",
|
|
||||||
"status",
|
|
||||||
"subpacket",
|
|
||||||
"symbol",
|
|
||||||
"symbol_table",
|
|
||||||
"telemetry",
|
|
||||||
"timestamp",
|
|
||||||
"to",
|
|
||||||
"tEQNS",
|
|
||||||
"tPARM",
|
|
||||||
"tUNIT",
|
|
||||||
"type",
|
|
||||||
"via",
|
|
||||||
"weather",
|
|
||||||
"wx_raw_timestamp")
|
|
201
kiss_and_db.py
201
kiss_and_db.py
|
@ -5,10 +5,53 @@ import aprs
|
||||||
import json
|
import json
|
||||||
import aprslib
|
import aprslib
|
||||||
import configparser
|
import configparser
|
||||||
import time
|
|
||||||
import logging
|
db_fields = ("id",
|
||||||
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
"addresse",
|
||||||
import time
|
"alive",
|
||||||
|
"altitude",
|
||||||
|
"comment",
|
||||||
|
"course",
|
||||||
|
"created",
|
||||||
|
"format",
|
||||||
|
"frame",
|
||||||
|
"from",
|
||||||
|
"gpsfixstatus",
|
||||||
|
"latitude",
|
||||||
|
"longitude",
|
||||||
|
"mbits",
|
||||||
|
"messagecapable",
|
||||||
|
"message_text",
|
||||||
|
"mtype",
|
||||||
|
"object_format",
|
||||||
|
"object_name",
|
||||||
|
"path",
|
||||||
|
"phg",
|
||||||
|
"phg_dir",
|
||||||
|
"phg_gain",
|
||||||
|
"phg_height",
|
||||||
|
"phg_power",
|
||||||
|
"phg_range",
|
||||||
|
"posambiguity",
|
||||||
|
"raw",
|
||||||
|
"raw_timestamp",
|
||||||
|
"speed",
|
||||||
|
"station_call",
|
||||||
|
"station_lat",
|
||||||
|
"station_lon",
|
||||||
|
"status",
|
||||||
|
"subpacket",
|
||||||
|
"symbol",
|
||||||
|
"symbol_table",
|
||||||
|
"telemetry",
|
||||||
|
"timestamp",
|
||||||
|
"to",
|
||||||
|
"tEQNS",
|
||||||
|
"tPARM",
|
||||||
|
"tUNIT",
|
||||||
|
"via",
|
||||||
|
"weather",
|
||||||
|
"wx_raw_timestamp")
|
||||||
|
|
||||||
def read_config():
|
def read_config():
|
||||||
config = configparser.ConfigParser()
|
config = configparser.ConfigParser()
|
||||||
|
@ -20,15 +63,6 @@ def get_db_connection():
|
||||||
conn.row_factory = sqlite3.Row
|
conn.row_factory = sqlite3.Row
|
||||||
return conn
|
return conn
|
||||||
|
|
||||||
def refresh_kiss_connection(kiss_conn):
|
|
||||||
logging.debug("Restarting KISS connection on schedule")
|
|
||||||
logging.debug("Stopping current connection")
|
|
||||||
kiss_conn.stop()
|
|
||||||
#logging.debug("Waiting 5 seconds")
|
|
||||||
#time.sleep(5)
|
|
||||||
logging.debug("Starting new connection")
|
|
||||||
kiss_conn.start()
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
# Add the call and location of this station to the packet info
|
# Add the call and location of this station to the packet info
|
||||||
|
@ -37,126 +71,51 @@ def main():
|
||||||
# KISS_HOST = os.environ.get("KISS_HOST", "192.168.0.30")
|
# KISS_HOST = os.environ.get("KISS_HOST", "192.168.0.30")
|
||||||
# KISS_PORT = os.environ.get("KISS_PORT", "8001")
|
# KISS_PORT = os.environ.get("KISS_PORT", "8001")
|
||||||
|
|
||||||
logging.basicConfig(filename=config['Settings']['log_path'], level=logging.DEBUG, \
|
|
||||||
format='%(asctime)s - %(message)s')
|
|
||||||
logging.debug('============= kiss_and_db.py running =============')
|
|
||||||
|
|
||||||
ki = aprs.TCPKISS(host=config['Settings']['kiss_host'], port=int(config['Settings']['kiss_port']))
|
ki = aprs.TCPKISS(host=config['Settings']['kiss_host'], port=int(config['Settings']['kiss_port']))
|
||||||
ki.start()
|
ki.start()
|
||||||
|
|
||||||
#scheduler = AsyncIOScheduler()
|
|
||||||
#scheduler.add_job(refresh_kiss_connection, 'interval', hours = 1, args = [ki])
|
|
||||||
#scheduler.start()
|
|
||||||
|
|
||||||
# Make a simple frame and send it
|
# Make a simple frame and send it
|
||||||
# frame = aprs.APRSFrame.ui(
|
frame = aprs.APRSFrame.ui(
|
||||||
# destination="APZ001",
|
destination="APZ001",
|
||||||
# source=config['Settings']['mycall'],
|
source=config['Settings']['mycall'],
|
||||||
# path=["WIDE1-1"],
|
path=["WIDE1-1"],
|
||||||
# info=b">Hello World!",
|
info=b">Hello World!",
|
||||||
# )
|
)
|
||||||
#ki.write(frame)
|
ki.write(frame)
|
||||||
|
|
||||||
# Watch for new packets to come in
|
# Watch for new packets to come in
|
||||||
while True:
|
while True:
|
||||||
conn = get_db_connection()
|
conn = get_db_connection()
|
||||||
for frame in ki.read(min_frames=1):
|
for frame in ki.read(min_frames=1):
|
||||||
logging.debug("New packet, trying to parse")
|
a = aprslib.parse(str(frame))
|
||||||
logging.debug(str(frame))
|
a['station_call'] = config['Settings']['station_call']
|
||||||
try:
|
a['station_lat'] = config['Settings']['station_lat']
|
||||||
try:
|
a['station_lon'] = config['Settings']['station_lon']
|
||||||
a = aprslib.parse(str(frame))
|
print(a)
|
||||||
except Exception as error:
|
# Make this a string and deal with it later (probably a mistake)
|
||||||
logging.error("Error with aprslib:", exc_info = error)
|
a['path'] = str(a['path'])
|
||||||
|
# Store true/false as 1/0
|
||||||
|
if 'alive' in a:
|
||||||
|
if a['alive'] == True:
|
||||||
|
a['alive'] = 1
|
||||||
else:
|
else:
|
||||||
a['station_call'] = config['Settings']['station_call']
|
a['alive'] = 0
|
||||||
a['station_lat'] = config['Settings']['station_lat']
|
# Build an INSERT statement based on the fields we have from the frame
|
||||||
a['station_lon'] = config['Settings']['station_lon']
|
attrib_names = ', '.join('"%s"' % w for w in a.keys())
|
||||||
a['created_unix'] = int(time.time())
|
attrib_values = ", ".join("?" * len(a.keys()))
|
||||||
a['party'] = 1
|
sql = "INSERT INTO frames ("+attrib_names+") VALUES ("+attrib_values+")"
|
||||||
|
try:
|
||||||
# Make this a string and deal with it later (probably a mistake)
|
# Insert data
|
||||||
a['path'] = str(a['path'])
|
conn.execute(sql, list(a.values()))
|
||||||
# Process 3rd-party data
|
conn.commit()
|
||||||
if 'subpacket' in a:
|
|
||||||
b = a['subpacket'] # make a copy
|
|
||||||
a['subpacket'] = str(a['subpacket']) # turn the original to a string
|
|
||||||
b['party'] = 3
|
|
||||||
b['path'] = str(b['path'])
|
|
||||||
b['header_raw'] = a['raw'].split("}", 1)[0] # just the first part
|
|
||||||
b['station_call'] = config['Settings']['station_call']
|
|
||||||
b['station_lat'] = config['Settings']['station_lat']
|
|
||||||
b['station_lon'] = config['Settings']['station_lon']
|
|
||||||
b['created_unix'] = int(time.time())
|
|
||||||
#logging.debug(a['path'])
|
|
||||||
# Store true/false as 1/0
|
|
||||||
if 'alive' in a:
|
|
||||||
if a['alive'] == True:
|
|
||||||
a['alive'] = 1
|
|
||||||
else:
|
|
||||||
a['alive'] = 0
|
|
||||||
# Build an INSERT statement based on the fields we have from the frame
|
|
||||||
attrib_names = ', '.join('"%s"' % w for w in a.keys())
|
|
||||||
attrib_values = ", ".join("?" * len(a.keys()))
|
|
||||||
logging.debug(attrib_names)
|
|
||||||
logging.debug(a.values())
|
|
||||||
if 'subpacket' in a:
|
|
||||||
# 3rd-party
|
|
||||||
b_attrib_names = ', '.join('"%s"' % w for w in b.keys())
|
|
||||||
b_attrib_values = ", ".join("?" * len(b.keys()))
|
|
||||||
logging.debug(b_attrib_names)
|
|
||||||
logging.debug(b.values())
|
|
||||||
|
|
||||||
try:
|
|
||||||
logging.debug("Inserting into database")
|
|
||||||
# Insert data
|
|
||||||
sql = "INSERT INTO frames ("+attrib_names+") VALUES ("+attrib_values+")"
|
|
||||||
conn.execute(sql, list(a.values()))
|
|
||||||
if 'subpacket' in a:
|
|
||||||
b_sql = "INSERT INTO frames ("+b_attrib_names+") VALUES ("+b_attrib_values+")"
|
|
||||||
conn.execute(b_sql, list(b.values()))
|
|
||||||
logging.debug("Frames table updated")
|
|
||||||
# TODO update stations table here
|
|
||||||
# Original intent was to include the id from the frames table,
|
|
||||||
# but that would mean making another query.
|
|
||||||
# It's not immediately needed, so I'm skipping it.
|
|
||||||
# Build query
|
|
||||||
# "from" is wrappedin [] because it is a reserved word and using '' doesn't work.
|
|
||||||
# https://www.sqlite.org/lang_keywords.html
|
|
||||||
#try:
|
|
||||||
station_update = "'"+a['from'] +"', '"+ str(a['created_unix']) +"', '1'"
|
|
||||||
query3 = "INSERT INTO stations ([from], last_heard_unix, count) \
|
|
||||||
VALUES("+station_update+") \
|
|
||||||
ON CONFLICT([from]) \
|
|
||||||
DO UPDATE SET count = count + 1,\
|
|
||||||
last_heard_unix = excluded.last_heard_unix;"
|
|
||||||
# Insert/update data
|
|
||||||
conn.execute(query3)
|
|
||||||
if 'subpacket' in a:
|
|
||||||
b_station_update = "'"+b['from'] +"', '"+ str(b['created_unix']) +"', '1'"
|
|
||||||
b_query3 = "INSERT INTO stations ([from], last_heard_unix, count) \
|
|
||||||
VALUES("+b_station_update+") \
|
|
||||||
ON CONFLICT([from]) \
|
|
||||||
DO UPDATE SET count = count + 1,\
|
|
||||||
last_heard_unix = excluded.last_heard_unix;"
|
|
||||||
# Insert/update data
|
|
||||||
conn.execute(b_query3)
|
|
||||||
logging.debug("Station table updated")
|
|
||||||
conn.commit()
|
|
||||||
#except:
|
|
||||||
# print("Stations table couldn't be updated.")
|
|
||||||
|
|
||||||
# TODO remove packets that are older ('created') than a limit set in config.ini
|
|
||||||
# "5 minutes" also works
|
|
||||||
#conn.execute("DELETE FROM frames WHERE created < DATETIME('now', '"+config['Settings']['keep_time']+"')")
|
|
||||||
#conn.commit()
|
|
||||||
except Exception as error:
|
|
||||||
#print("Error with SQLite!")
|
|
||||||
logging.error("Error with SQLite!", exc_info = error)
|
|
||||||
except Exception as error:
|
|
||||||
#print("Frame could not be parsed.")
|
|
||||||
logging.error("Frame could not be parsed:", exc_info = error)
|
|
||||||
|
|
||||||
|
# TODO remove packets that are older ('created') than a limit set in config.ini
|
||||||
|
# "5 minutes" also works
|
||||||
|
conn.execute("DELETE FROM frames WHERE created < DATETIME('now', '"+config['Settings']['keep_time']+"')")
|
||||||
|
conn.commit()
|
||||||
|
except:
|
||||||
|
print("Error with SQLite!")
|
||||||
|
|
||||||
conn.close()
|
conn.close()
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,4 @@
|
||||||
flask
|
flask
|
||||||
flask_restful
|
flask_restful
|
||||||
aprs3
|
aprs
|
||||||
kiss3
|
|
||||||
kiss
|
|
||||||
aprslib
|
aprslib
|
||||||
sqlite3
|
|
||||||
json
|
|
||||||
timeago
|
|
16
schema.sql
16
schema.sql
|
@ -5,26 +5,21 @@ CREATE TABLE frames (
|
||||||
addresse TEXT,
|
addresse TEXT,
|
||||||
alive INT,
|
alive INT,
|
||||||
altitude REAL,
|
altitude REAL,
|
||||||
body TEXT,
|
|
||||||
comment TEXT,
|
comment TEXT,
|
||||||
course REAL,
|
course REAL,
|
||||||
created TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
created TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
created_unix INT,
|
|
||||||
format TEXT,
|
format TEXT,
|
||||||
frame TEXT,
|
frame TEXT,
|
||||||
"from" TEXT,
|
"from" TEXT,
|
||||||
gpsfixstatus TEXT,
|
gpsfixstatus TEXT,
|
||||||
header_raw TEXT,
|
|
||||||
latitude REAL,
|
latitude REAL,
|
||||||
longitude REAL,
|
longitude REAL,
|
||||||
mbits INT,
|
mbits INT,
|
||||||
messagecapable INT,
|
messagecapable INT,
|
||||||
message_text TEXT,
|
message_text TEXT,
|
||||||
msgNo INT,
|
|
||||||
mtype TEXT,
|
mtype TEXT,
|
||||||
object_format TEXT,
|
object_format TEXT,
|
||||||
object_name TEXT,
|
object_name TEXT,
|
||||||
party INT,
|
|
||||||
path TEXT,
|
path TEXT,
|
||||||
phg REAL,
|
phg REAL,
|
||||||
phg_dir TEXT,
|
phg_dir TEXT,
|
||||||
|
@ -35,7 +30,6 @@ CREATE TABLE frames (
|
||||||
posambiguity INT,
|
posambiguity INT,
|
||||||
raw TEXT,
|
raw TEXT,
|
||||||
raw_timestamp TEXT,
|
raw_timestamp TEXT,
|
||||||
rng REAL,
|
|
||||||
speed REAL,
|
speed REAL,
|
||||||
station_call TEXT,
|
station_call TEXT,
|
||||||
station_lat REAL,
|
station_lat REAL,
|
||||||
|
@ -50,17 +44,7 @@ CREATE TABLE frames (
|
||||||
tEQNS TEXT,
|
tEQNS TEXT,
|
||||||
tPARM TEXT,
|
tPARM TEXT,
|
||||||
tUNIT TEXT,
|
tUNIT TEXT,
|
||||||
type TEXT,
|
|
||||||
via TEXT,
|
via TEXT,
|
||||||
weather TEXT,
|
weather TEXT,
|
||||||
wx_raw_timestamp TIMESTAMP
|
wx_raw_timestamp TIMESTAMP
|
||||||
);
|
);
|
||||||
|
|
||||||
CREATE TABLE "stations" (
|
|
||||||
"id" INTEGER NOT NULL UNIQUE,
|
|
||||||
"from" TEXT UNIQUE,
|
|
||||||
"frames_id" INTEGER,
|
|
||||||
"last_heard_unix" INTEGER,
|
|
||||||
"count" INTEGER,
|
|
||||||
PRIMARY KEY("id" AUTOINCREMENT)
|
|
||||||
);
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
# Run `chmod +x start-aprs_api.sh` so this can be run
|
# Run `chmod +x start-aprs_api.sh` so this can be run
|
||||||
|
|
||||||
screen -dmS aprs_api python3 /home/pi/aprs_tool/api_waitress.py
|
screen -dmS aprs_api python3 /home/pi/aprs_tools/api_waitress.py
|
||||||
|
|
109
tcp_kiss_send_recv.py
Normal file
109
tcp_kiss_send_recv.py
Normal file
|
@ -0,0 +1,109 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
import os
|
||||||
|
import sqlite3
|
||||||
|
import aprs
|
||||||
|
import json
|
||||||
|
import aprslib
|
||||||
|
import configparser
|
||||||
|
|
||||||
|
MYCALL = os.environ.get("MYCALL", "W1CDN")
|
||||||
|
KISS_HOST = os.environ.get("KISS_HOST", "192.168.0.30")
|
||||||
|
KISS_PORT = os.environ.get("KISS_PORT", "8001")
|
||||||
|
|
||||||
|
db_fields = ("id",
|
||||||
|
"addresse",
|
||||||
|
"alive",
|
||||||
|
"altitude",
|
||||||
|
"comment",
|
||||||
|
"course",
|
||||||
|
"created",
|
||||||
|
"format",
|
||||||
|
"frame",
|
||||||
|
"from",
|
||||||
|
"gpsfixstatus",
|
||||||
|
"latitude",
|
||||||
|
"longitude",
|
||||||
|
"mbits",
|
||||||
|
"messagecapable",
|
||||||
|
"message_text",
|
||||||
|
"mtype",
|
||||||
|
"object_format",
|
||||||
|
"object_name",
|
||||||
|
"path",
|
||||||
|
"posambiguity",
|
||||||
|
"raw",
|
||||||
|
"raw_timestamp",
|
||||||
|
"speed",
|
||||||
|
"station_call",
|
||||||
|
"station_lat",
|
||||||
|
"station_lon",
|
||||||
|
"status",
|
||||||
|
"symbol",
|
||||||
|
"symbol_table",
|
||||||
|
"telemetry",
|
||||||
|
"timestamp",
|
||||||
|
"to",
|
||||||
|
"tEQNS",
|
||||||
|
"tPARM",
|
||||||
|
"tUNIT",
|
||||||
|
"via",
|
||||||
|
"weather",
|
||||||
|
"wx_raw_timestamp")
|
||||||
|
|
||||||
|
def read_config():
|
||||||
|
config = configparser.ConfigParser()
|
||||||
|
config.read('config.ini')
|
||||||
|
return config
|
||||||
|
|
||||||
|
def get_db_connection():
|
||||||
|
conn = sqlite3.connect('database.db')
|
||||||
|
conn.row_factory = sqlite3.Row
|
||||||
|
return conn
|
||||||
|
|
||||||
|
def main():
|
||||||
|
|
||||||
|
# Add the call and location of this station to the packet info
|
||||||
|
config = read_config()
|
||||||
|
|
||||||
|
ki = aprs.TCPKISS(host=KISS_HOST, port=int(KISS_PORT))
|
||||||
|
ki.start()
|
||||||
|
|
||||||
|
# Make a simple frame and send it
|
||||||
|
frame = aprs.APRSFrame.ui(
|
||||||
|
destination="APZ001",
|
||||||
|
source=MYCALL,
|
||||||
|
path=["WIDE1-1"],
|
||||||
|
info=b">Hello World!",
|
||||||
|
)
|
||||||
|
#ki.write(frame)
|
||||||
|
|
||||||
|
# Watch for new packets to come in
|
||||||
|
while True:
|
||||||
|
conn = get_db_connection()
|
||||||
|
for frame in ki.read(min_frames=1):
|
||||||
|
a = aprslib.parse(str(frame))
|
||||||
|
a['station_call'] = config['Settings']['station_call']
|
||||||
|
a['station_lat'] = config['Settings']['station_lat']
|
||||||
|
a['station_lon'] = config['Settings']['station_lon']
|
||||||
|
print(a)
|
||||||
|
# Make this a string and deal with it later (probably a mistake)
|
||||||
|
a['path'] = str(a['path'])
|
||||||
|
# Build an INSERT statement based on the fields we have from the frame
|
||||||
|
attrib_names = ', '.join(f'"{w}"' for w in a.keys())
|
||||||
|
attrib_values = ", ".join("?" * len(a.keys()))
|
||||||
|
sql = f"INSERT INTO frames ({attrib_names}) VALUES ({attrib_values})"
|
||||||
|
# Insert data
|
||||||
|
conn.execute(sql, list(a.values()))
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
# TODO remove packets that are older ('created') than a limit set in config.ini
|
||||||
|
# "5 minutes" also works
|
||||||
|
conn.execute("DELETE FROM frames WHERE created < DATETIME('now', '"+config['Settings']['keep_time']+"')")
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
|
@ -1,129 +0,0 @@
|
||||||
<!DOCTYPE html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8">
|
|
||||||
<title>{{station_call}} Status</title>
|
|
||||||
|
|
||||||
<!-- Leaflet's CSS -->
|
|
||||||
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.9.4/dist/leaflet.css"
|
|
||||||
integrity="sha256-p4NxAoJBhIIN+hmNHrzRCf9tD/miZyoHS5obTRR9BMY="
|
|
||||||
crossorigin=""/>
|
|
||||||
<!-- Make sure you put this AFTER Leaflet's CSS -->
|
|
||||||
<script src="https://unpkg.com/leaflet@1.9.4/dist/leaflet.js"
|
|
||||||
integrity="sha256-20nQCchB9co0qIjJZRGuk2/Z9VM+kNiyxNV1lvTlZBo="
|
|
||||||
crossorigin=""></script>
|
|
||||||
|
|
||||||
<style>
|
|
||||||
table, th, td {
|
|
||||||
border: 1px solid black;
|
|
||||||
}
|
|
||||||
#map { height: 250px; }
|
|
||||||
.leaflet-tooltip.my-labels {
|
|
||||||
background-color: transparent;
|
|
||||||
border: transparent;
|
|
||||||
box-shadow: none;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<div style="width: 100%; overflow: hidden;">
|
|
||||||
<div style="width: 50%; float: left;">
|
|
||||||
|
|
||||||
<h1>{{station_call}} Status</h1>
|
|
||||||
Station location: {{station_lat}}, {{station_lon}}
|
|
||||||
|
|
||||||
<h2> About </h2>
|
|
||||||
This is a work in progress. See <a href="https://amiok.net/gitea/W1CDN/aprs_tool">https://amiok.net/gitea/W1CDN/aprs_tool</a> for usage.
|
|
||||||
</div>
|
|
||||||
<div style="margin-left: 50%;">
|
|
||||||
<div id="map"></div>
|
|
||||||
<script>
|
|
||||||
var map = L.map('map').setView([{{station_lat}}, {{station_lon}}], 10);
|
|
||||||
L.tileLayer('https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {attribution: '© <a href=\"https://www.openstreetmap.org/copyright\">OpenStreetMap</a> contributors'}).addTo(map);
|
|
||||||
|
|
||||||
//{{markers|safe}}
|
|
||||||
|
|
||||||
// Show station location
|
|
||||||
var station = L.marker([{{station_lat}}, {{station_lon}}]).addTo(map).bindTooltip('{{station_call}}', {permanent: true}).openTooltip();
|
|
||||||
|
|
||||||
// Show GeoJSON of markers
|
|
||||||
var group = L.geoJSON({{geojs|safe}},
|
|
||||||
{
|
|
||||||
style: function (feature) {
|
|
||||||
return {color: feature.properties.color};
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// group.bindTooltip(function (layer) {
|
|
||||||
// return 'Object '+layer.feature.properties.object_name+' from '+layer.feature.properties.from;
|
|
||||||
// }, {permanent: false}).openTooltip().addTo(map);
|
|
||||||
// Hacked together from https://gis.stackexchange.com/a/246919
|
|
||||||
var pointLayer = L.geoJSON(null, {
|
|
||||||
pointToLayer: function(feature,latlng){
|
|
||||||
//(true condition) ? "true" : "false"
|
|
||||||
label = (feature.properties.object_name === null) ? String(feature.properties.from) : String(feature.properties.object_name)
|
|
||||||
//label = String('Object '+feature.properties.object_name+' from '+feature.properties.from) // Must convert to string, .bindTooltip can't use straight 'feature.properties.attribute'
|
|
||||||
return new L.CircleMarker(latlng, {
|
|
||||||
radius: 1,
|
|
||||||
}).bindTooltip(label, {permanent: true, opacity: 0.7, className: "my-labels"}).openTooltip();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
pointLayer.addData({{geojs|safe}});
|
|
||||||
map.addLayer(pointLayer);
|
|
||||||
|
|
||||||
|
|
||||||
// Zoom to show all
|
|
||||||
map.fitBounds(group.getBounds().pad(0.3));
|
|
||||||
</script>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<h2> Recent RF Packets </h2>
|
|
||||||
Query <a href="{{path}}/packets?n=10">10</a> | <a href="{{path}}/packets?n=50">50</a> | <a href="{{path}}/packets?n=100">100</a>
|
|
||||||
<table>
|
|
||||||
<tr>
|
|
||||||
<th> from </th>
|
|
||||||
<th> object_name </th>
|
|
||||||
<th> raw </th>
|
|
||||||
<th> created (utc) </th>
|
|
||||||
<th> relative </th>
|
|
||||||
<th> more </th>
|
|
||||||
</tr>
|
|
||||||
{% for i in frames %}
|
|
||||||
<tr>
|
|
||||||
<td> <a href="https://digi.w1cdn.net/aprs_api/packets?from={{ i['from'] }}">{{ i['from'] }}</a> </td>
|
|
||||||
<td> {{ i['object_name'] }} </td>
|
|
||||||
<td> {{ i['raw'] }} </td>
|
|
||||||
<td> {{ i['created'] }} </td>
|
|
||||||
<td> {{ i['time_ago'] }} </td>
|
|
||||||
<td> <a href="https://digi.w1cdn.net/aprs_api/packets?id={{ i['id'] }}">query</a>,
|
|
||||||
<a href="https://aprs.fi/#!mt=roadmap&z=12&call=a%2F{{ i['from'] }}">aprs.fi</a></td>
|
|
||||||
</tr>
|
|
||||||
{% endfor %}
|
|
||||||
</table>
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
<h2> Recent Stations </h2>
|
|
||||||
Query <a href="{{path}}/stations?n=10">10</a> | <a href="{{path}}/stations?n=50">50</a> | <a href="{{path}}/stations?n=100">100</a>
|
|
||||||
<table>
|
|
||||||
<tr>
|
|
||||||
<th> from </th>
|
|
||||||
<th> last heard (utc) </th>
|
|
||||||
<th> relative </th>
|
|
||||||
<th> count </th>
|
|
||||||
<th> more </th>
|
|
||||||
</tr>
|
|
||||||
{% for i in stations %}
|
|
||||||
<tr>
|
|
||||||
<td> <a href="https://digi.w1cdn.net/aprs_api/packets?from={{ i['from'] }}">{{ i['from'] }}</a> </td>
|
|
||||||
<td> {{ i['last_heard'] }} </td>
|
|
||||||
<td> {{ i['time_ago'] }} </td>
|
|
||||||
<td> {{ i['count']}} </td>
|
|
||||||
<td> <a href="https://aprs.fi/#!mt=roadmap&z=12&call=a%2F{{ i['from'] }}">aprs.fi</a></td>
|
|
||||||
</tr>
|
|
||||||
{% endfor %}
|
|
||||||
</table>
|
|
||||||
|
|
||||||
</body>
|
|
||||||
</html>
|
|
|
@ -1,42 +0,0 @@
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<!-- Leaflet's CSS -->
|
|
||||||
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.9.4/dist/leaflet.css"
|
|
||||||
integrity="sha256-p4NxAoJBhIIN+hmNHrzRCf9tD/miZyoHS5obTRR9BMY="
|
|
||||||
crossorigin=""/>
|
|
||||||
<!-- Make sure you put this AFTER Leaflet's CSS -->
|
|
||||||
<script src="https://unpkg.com/leaflet@1.9.4/dist/leaflet.js"
|
|
||||||
integrity="sha256-20nQCchB9co0qIjJZRGuk2/Z9VM+kNiyxNV1lvTlZBo="
|
|
||||||
crossorigin=""></script>
|
|
||||||
<style>
|
|
||||||
#map { height: 100%; }
|
|
||||||
</style>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<div id="map"></div>
|
|
||||||
|
|
||||||
<script>
|
|
||||||
var map = L.map('map').setView([{{station_lat}}, {{station_lon}}], 10);
|
|
||||||
L.tileLayer('https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {attribution: '© <a href=\"https://www.openstreetmap.org/copyright\">OpenStreetMap</a> contributors'}).addTo(map);
|
|
||||||
|
|
||||||
//{{markers|safe}}
|
|
||||||
|
|
||||||
// Show station location
|
|
||||||
var station = L.marker([{{station_lat}}, {{station_lon}}]).addTo(map).bindTooltip('{{station_call}}', permanent=true).openTooltip();
|
|
||||||
|
|
||||||
// Show GeoJSON of markers
|
|
||||||
var group = L.geoJSON({{geojs|safe}},
|
|
||||||
{
|
|
||||||
style: function (feature) {
|
|
||||||
return {color: feature.properties.color};
|
|
||||||
}
|
|
||||||
}).bindTooltip(function (layer) {
|
|
||||||
return 'Object '+layer.feature.properties.object_name+' from '+layer.feature.properties.from;
|
|
||||||
}, permanent=true).addTo(map);
|
|
||||||
|
|
||||||
// Zoom to show all
|
|
||||||
map.fitBounds(group.getBounds().pad(0.2));
|
|
||||||
|
|
||||||
</script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
|
@ -1,11 +0,0 @@
|
||||||
import asyncio
|
|
||||||
import aprs
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
transport, protocol = await aprs.create_tcp_connection("192.168.0.30", 8001)
|
|
||||||
|
|
||||||
async for frame in protocol.read():
|
|
||||||
print(frame)
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
Loading…
Reference in New Issue
Block a user