Lots of changes.
This commit is contained in:
parent
6451e13ac4
commit
9ad72c4d7a
46
app.py
46
app.py
|
@ -2,9 +2,6 @@ from flask import Flask
|
||||||
from flask_restful import Resource, Api, reqparse
|
from flask_restful import Resource, Api, reqparse
|
||||||
from datetime import date, timedelta
|
from datetime import date, timedelta
|
||||||
import configparser
|
import configparser
|
||||||
# TODO Can we do it without pandas and numpy? They are so big.
|
|
||||||
#import pandas as pd
|
|
||||||
#import numpy as np
|
|
||||||
import csv
|
import csv
|
||||||
import ast
|
import ast
|
||||||
import glob
|
import glob
|
||||||
|
@ -19,7 +16,7 @@ def read_config():
|
||||||
|
|
||||||
def read_logs(log_folder):
|
def read_logs(log_folder):
|
||||||
# Read some log files
|
# Read some log files
|
||||||
# Zulu time, so let's look at tomorrow, today, and yesterday.
|
# UTC time, so let's look at tomorrow, today, and yesterday.
|
||||||
# TODO Load new files into a database on a schedule?
|
# TODO Load new files into a database on a schedule?
|
||||||
today = date.today()
|
today = date.today()
|
||||||
yesterday = today - timedelta(days = 1)
|
yesterday = today - timedelta(days = 1)
|
||||||
|
@ -27,10 +24,6 @@ def read_logs(log_folder):
|
||||||
file_list = glob.glob(log_folder+str(yesterday)+"*") + \
|
file_list = glob.glob(log_folder+str(yesterday)+"*") + \
|
||||||
glob.glob(log_folder+str(today)+"*") + \
|
glob.glob(log_folder+str(today)+"*") + \
|
||||||
glob.glob(log_folder+str(tomorrow)+"*")
|
glob.glob(log_folder+str(tomorrow)+"*")
|
||||||
# list_stacked = pd.DataFrame()
|
|
||||||
# for file in file_list:
|
|
||||||
# file1 = pd.read_csv(file)
|
|
||||||
# list_stacked = pd.concat([list_stacked, file1])
|
|
||||||
|
|
||||||
# https://stackoverflow.com/a/66071962
|
# https://stackoverflow.com/a/66071962
|
||||||
json_array = []
|
json_array = []
|
||||||
|
@ -41,27 +34,27 @@ def read_logs(log_folder):
|
||||||
#add this python dict to json array
|
#add this python dict to json array
|
||||||
json_array.append(row)
|
json_array.append(row)
|
||||||
|
|
||||||
# TODO Can we do this without numpy?
|
# Add the call and location of this station to the packet info
|
||||||
#list_stacked.replace(np.nan, 0, inplace=True)
|
config = read_config()
|
||||||
#print(list_stacked.head())
|
for item in json_array:
|
||||||
|
item['station_name'] = config['Settings']['station_call']
|
||||||
|
item['station_lat'] = config['Settings']['station_lat']
|
||||||
|
item['station_lon'] = config['Settings']['station_lon']
|
||||||
|
|
||||||
return(json_array)
|
return(json_array)
|
||||||
|
|
||||||
# class Users(Resource):
|
# TODO need a function that adds the most recent location for any 'name'
|
||||||
# def get(self):
|
# into a simple database. Then if it needs to be mapped, it can be, even
|
||||||
# data = pd.read_csv('users.csv') # read CSV
|
# if it hasn't been recently heard directly.
|
||||||
# data = data.to_dict(orient = 'records') # convert dataframe to dictionary
|
|
||||||
# return {'data': data}, 200 # return data and 200 OK code
|
|
||||||
|
|
||||||
# class Locations(Resource):
|
|
||||||
# def get(self):
|
|
||||||
# data = pd.read_csv('locations.csv') # read CSV
|
|
||||||
# data = data.to_dict(orient = 'records') # convert dataframe to dictionary
|
|
||||||
# return {'data': data}, 200 # return data and 200 OK code
|
|
||||||
|
|
||||||
class Packets(Resource):
|
class Packets(Resource):
|
||||||
def get(self):
|
def get(self):
|
||||||
#data = read_logs(log_folder) # re-reads the log files every time
|
# TODO neither path nor actual path taken are included here--how to
|
||||||
#data = data.to_dict(orient = 'records') # convert dataframe to dictionary
|
# include them? I am not sure whether multiple stations would be listed
|
||||||
|
# in 'source' or not;
|
||||||
|
# "source": "KF4ME-7",
|
||||||
|
# "heard": "K0UND-2",
|
||||||
|
# TODO need to be able to pass arguments in URL to filter
|
||||||
|
|
||||||
return {'data': data}, 200 # return data and 200 OK code
|
return {'data': data}, 200 # return data and 200 OK code
|
||||||
|
|
||||||
|
@ -71,10 +64,7 @@ log_folder = config['Settings']['log_folder']
|
||||||
# Load logs first (just to check for errors before page loads)
|
# Load logs first (just to check for errors before page loads)
|
||||||
data = read_logs(log_folder)
|
data = read_logs(log_folder)
|
||||||
|
|
||||||
|
|
||||||
#api.add_resource(Users, '/users') # '/users' is our entry point for Users
|
|
||||||
#api.add_resource(Locations, '/locations') # and '/locations' is our entry point for Locations
|
|
||||||
api.add_resource(Packets, '/packets') # and '/locations' is our entry point for Locations
|
api.add_resource(Packets, '/packets') # and '/locations' is our entry point for Locations
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
app.run(debug=True) # run our Flask app
|
app.run(debug=True, host='0.0.0.0') # run our Flask app
|
||||||
|
|
|
@ -1,4 +1,9 @@
|
||||||
[Settings]
|
[Settings]
|
||||||
# Include trainling slash
|
# Path to direwolf log folder, include trailing slash
|
||||||
log_folder = logs/
|
log_folder = logs/
|
||||||
#log_folder = /home/pi/logs/direwolf/
|
#log_folder = /home/pi/logs/direwolf/
|
||||||
|
|
||||||
|
# Name and location of this station, for inclusion in the API
|
||||||
|
station_call = W1CDN-1
|
||||||
|
station_lat = 47.941500
|
||||||
|
station_lon = -97.027000
|
||||||
|
|
Loading…
Reference in New Issue
Block a user