Get a config file set up, reload it every API call.

This commit is contained in:
mattbk 2023-04-05 20:36:19 -05:00
parent dcff8433ed
commit 392c2afb6c
2 changed files with 38 additions and 12 deletions

46
app.py
View File

@ -1,5 +1,7 @@
from flask import Flask from flask import Flask
from flask_restful import Resource, Api, reqparse from flask_restful import Resource, Api, reqparse
from datetime import date, timedelta
import configparser
import pandas as pd import pandas as pd
import numpy as np import numpy as np
import ast import ast
@ -8,6 +10,30 @@ import glob
app = Flask(__name__) app = Flask(__name__)
api = Api(app) api = Api(app)
def read_config():
config = configparser.ConfigParser()
config.read('config.ini')
return config
def read_logs(log_folder):
# Read some log files
# Zulu time, so let's look at tomorrow, today, and yesterday.
# TODO Load new files into a database on a schedule?
today = date.today()
yesterday = today - timedelta(days = 1)
tomorrow = today + timedelta(days = 1)
file_list = glob.glob(log_folder+str(yesterday)+"*") + \
glob.glob(log_folder+str(today)+"*") + \
glob.glob(log_folder+str(tomorrow)+"*")
list_stacked = pd.DataFrame()
for file in file_list:
file1 = pd.read_csv(file)
list_stacked = pd.concat([list_stacked, file1])
# TODO Can we do this without numpy?
list_stacked.replace(np.nan, 0, inplace=True)
#print(list_stacked.head())
return(list_stacked)
class Users(Resource): class Users(Resource):
def get(self): def get(self):
data = pd.read_csv('users.csv') # read CSV data = pd.read_csv('users.csv') # read CSV
@ -20,24 +46,20 @@ class Locations(Resource):
data = data.to_dict(orient = 'records') # convert dataframe to dictionary data = data.to_dict(orient = 'records') # convert dataframe to dictionary
return {'data': data}, 200 # return data and 200 OK code return {'data': data}, 200 # return data and 200 OK code
# Read some log files
list_stacked = pd.DataFrame()
file_list = glob.glob("logs/*.log")
#print(file_list)
for file in file_list:
file1 = pd.read_csv(file)
list_stacked = pd.concat([list_stacked, file1])
# TODO Can we do this without numpy?
list_stacked.replace(np.nan, 0, inplace=True)
#print(list_stacked.head())
class Packets(Resource): class Packets(Resource):
def get(self): def get(self):
data = list_stacked data = read_logs(log_folder) # re-reads the log files every time
data = data.to_dict(orient = 'records') # convert dataframe to dictionary data = data.to_dict(orient = 'records') # convert dataframe to dictionary
#data = data.to_json(orient='records') #data = data.to_json(orient='records')
return {'data': data}, 200 # return data and 200 OK code return {'data': data}, 200 # return data and 200 OK code
# Read config
config = read_config()
log_folder = config['Settings']['log_folder']
# Load logs first (just to check for errors before page loads)
data = read_logs(log_folder)
api.add_resource(Users, '/users') # '/users' is our entry point for Users api.add_resource(Users, '/users') # '/users' is our entry point for Users
api.add_resource(Locations, '/locations') # and '/locations' is our entry point for Locations api.add_resource(Locations, '/locations') # and '/locations' is our entry point for Locations
api.add_resource(Packets, '/packets') # and '/locations' is our entry point for Locations api.add_resource(Packets, '/packets') # and '/locations' is our entry point for Locations

4
config.ini Normal file
View File

@ -0,0 +1,4 @@
[Settings]
# Include trainling slash
log_folder = logs/
#log_folder = /home/pi/logs/direwolf/