Drop numpy and pandas.
This commit is contained in:
parent
133690efc4
commit
6451e13ac4
55
app.py
55
app.py
|
@ -2,9 +2,10 @@ from flask import Flask
|
|||
from flask_restful import Resource, Api, reqparse
|
||||
from datetime import date, timedelta
|
||||
import configparser
|
||||
# TODO Can we do it without pandas and numpy?
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
# TODO Can we do it without pandas and numpy? They are so big.
|
||||
#import pandas as pd
|
||||
#import numpy as np
|
||||
import csv
|
||||
import ast
|
||||
import glob
|
||||
import json
|
||||
|
@ -26,32 +27,42 @@ def read_logs(log_folder):
|
|||
file_list = glob.glob(log_folder+str(yesterday)+"*") + \
|
||||
glob.glob(log_folder+str(today)+"*") + \
|
||||
glob.glob(log_folder+str(tomorrow)+"*")
|
||||
list_stacked = pd.DataFrame()
|
||||
# list_stacked = pd.DataFrame()
|
||||
# for file in file_list:
|
||||
# file1 = pd.read_csv(file)
|
||||
# list_stacked = pd.concat([list_stacked, file1])
|
||||
|
||||
# https://stackoverflow.com/a/66071962
|
||||
json_array = []
|
||||
for file in file_list:
|
||||
file1 = pd.read_csv(file)
|
||||
list_stacked = pd.concat([list_stacked, file1])
|
||||
with open(file, encoding='utf-8') as csvf:
|
||||
csvReader = csv.DictReader(csvf)
|
||||
for row in csvReader:
|
||||
#add this python dict to json array
|
||||
json_array.append(row)
|
||||
|
||||
# TODO Can we do this without numpy?
|
||||
list_stacked.replace(np.nan, 0, inplace=True)
|
||||
#list_stacked.replace(np.nan, 0, inplace=True)
|
||||
#print(list_stacked.head())
|
||||
return(list_stacked)
|
||||
return(json_array)
|
||||
|
||||
class Users(Resource):
|
||||
def get(self):
|
||||
data = pd.read_csv('users.csv') # read CSV
|
||||
data = data.to_dict(orient = 'records') # convert dataframe to dictionary
|
||||
return {'data': data}, 200 # return data and 200 OK code
|
||||
# class Users(Resource):
|
||||
# def get(self):
|
||||
# data = pd.read_csv('users.csv') # read CSV
|
||||
# data = data.to_dict(orient = 'records') # convert dataframe to dictionary
|
||||
# return {'data': data}, 200 # return data and 200 OK code
|
||||
|
||||
class Locations(Resource):
|
||||
def get(self):
|
||||
data = pd.read_csv('locations.csv') # read CSV
|
||||
data = data.to_dict(orient = 'records') # convert dataframe to dictionary
|
||||
return {'data': data}, 200 # return data and 200 OK code
|
||||
# class Locations(Resource):
|
||||
# def get(self):
|
||||
# data = pd.read_csv('locations.csv') # read CSV
|
||||
# data = data.to_dict(orient = 'records') # convert dataframe to dictionary
|
||||
# return {'data': data}, 200 # return data and 200 OK code
|
||||
|
||||
class Packets(Resource):
|
||||
def get(self):
|
||||
data = read_logs(log_folder) # re-reads the log files every time
|
||||
data = data.to_dict(orient = 'records') # convert dataframe to dictionary
|
||||
#data = read_logs(log_folder) # re-reads the log files every time
|
||||
#data = data.to_dict(orient = 'records') # convert dataframe to dictionary
|
||||
|
||||
return {'data': data}, 200 # return data and 200 OK code
|
||||
|
||||
# Read config
|
||||
|
@ -61,8 +72,8 @@ log_folder = config['Settings']['log_folder']
|
|||
data = read_logs(log_folder)
|
||||
|
||||
|
||||
api.add_resource(Users, '/users') # '/users' is our entry point for Users
|
||||
api.add_resource(Locations, '/locations') # and '/locations' is our entry point for Locations
|
||||
#api.add_resource(Users, '/users') # '/users' is our entry point for Users
|
||||
#api.add_resource(Locations, '/locations') # and '/locations' is our entry point for Locations
|
||||
api.add_resource(Packets, '/packets') # and '/locations' is our entry point for Locations
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
Loading…
Reference in New Issue
Block a user