mirror of
https://github.com/FAUSheppy/ths-datenlogger
synced 2025-12-06 04:11:34 +01:00
refactor error output part 1
This commit is contained in:
@@ -7,6 +7,7 @@ import localization.de as de
|
||||
|
||||
from dbfread import DBF
|
||||
import timeutils
|
||||
import codecs
|
||||
|
||||
line_colors = ['b', 'r', 'g', 'c', 'm', 'y']
|
||||
tname = CFG("temperatur_plot_name")
|
||||
@@ -15,7 +16,7 @@ dname = CFG("dewcels_plot_name")
|
||||
color_id = 0
|
||||
|
||||
class Data:
|
||||
def __init__(self,name,plot=False):
|
||||
def __init__(self, name, plot=False, qtTextBrowser=None):
|
||||
global color_id,line_colors
|
||||
self.name = name
|
||||
self.color=line_colors[color_id%len(line_colors)]
|
||||
@@ -23,6 +24,7 @@ class Data:
|
||||
self.data = []
|
||||
self.times = []
|
||||
self.plot = plot
|
||||
self.qtTextBrowser = qtTextBrowser
|
||||
|
||||
def getFirstTime(self):
|
||||
'''Get time of first timestamp'''
|
||||
@@ -36,11 +38,15 @@ class Data:
|
||||
out_x = []
|
||||
out_y = []
|
||||
i = 0
|
||||
|
||||
if(len(self.times) != len(self.data)):
|
||||
raise RuntimeError("len(timestamps) != len(data), cannot continue, this should never happen")
|
||||
self.qtTextBrowser.append(de.special_err_1)
|
||||
raise ValueError(de.special_err_1)
|
||||
|
||||
if(len(self.times) <= 2):
|
||||
print("WARNING: No Data for %s!"%self.name)
|
||||
self.qtTextBrowser.append(de.warn_no_data.format(self.name))
|
||||
return (None,None)
|
||||
|
||||
############ AVERAGE OUT DATA #############
|
||||
if(CFG("combine_data_points") >= (self.times[1] - self.times[0]).total_seconds()):
|
||||
x_dp = 5
|
||||
@@ -67,7 +73,6 @@ class Data:
|
||||
i += 1
|
||||
return (out_x,out_y)
|
||||
|
||||
## no idea on what kind of drugs I was when i wrote this function (it is somewhat ingenious though) ##
|
||||
def _get_timeframe(self, callback,date1=None,date2=None):
|
||||
r=dict()
|
||||
for t,c in zip(self.times,self.data):
|
||||
@@ -89,16 +94,16 @@ class Data:
|
||||
|
||||
def parse_line(datapoints, line, timekey, keys, time_parser, timeformat=None):
|
||||
# This function expects:
|
||||
# - datapoints { String:DataObject }
|
||||
# - line { String:Any }
|
||||
# - timekey String (key for timevalue in 'line')
|
||||
# - keys [ (String,String) ] (source_key in 'line' to target_key in 'datapoints')
|
||||
# - datapoints { String:DataObject }
|
||||
# - line { String:Any }
|
||||
# - timekey String (key for timevalue in 'line')
|
||||
# - keys [ (String,String) ] (source_key in 'line' to target_key in 'datapoints')
|
||||
time = time_parser(line[ timekey ],timeformat)
|
||||
for key in keys:
|
||||
datapoints[ key[1] ].data += [ line[ key[0] ] ]
|
||||
datapoints[ key[1] ].times += [ time ]
|
||||
|
||||
def processExternalData(datapoints, plotNameKey, fromTime, toTime, dtype):
|
||||
def processExternalData(datapoints, plotNameKey, fromTime, toTime, dtype, qtTextBrowser):
|
||||
'''Download and parses external data of type dtype'''
|
||||
|
||||
# prepare strings #
|
||||
@@ -115,7 +120,7 @@ def processExternalData(datapoints, plotNameKey, fromTime, toTime, dtype):
|
||||
# download date if it doesn't exist #
|
||||
url = CFG("outside_data_url").format(dtype=dtype, fromDate=fromTimeStr, toDate=toTimeStr)
|
||||
r = requests.get(url)
|
||||
print(url)
|
||||
qtTextBrowser.append(de.pg_request.format(url))
|
||||
content = r.content.decode('utf-8', "ignore") # ignore bad bytes
|
||||
|
||||
# cache data #
|
||||
@@ -127,12 +132,11 @@ def processExternalData(datapoints, plotNameKey, fromTime, toTime, dtype):
|
||||
else:
|
||||
|
||||
# get data from cache otherwise
|
||||
print("INFO: Cache hit: {}".format(cacheFile))
|
||||
qtTextBrowser.append(de.cache_hit.format(cacheFile))
|
||||
with open(fullpath) as f:
|
||||
content = f.read()
|
||||
|
||||
skipBecauseFirstLine = True
|
||||
error = None
|
||||
for l in content.split("\n"):
|
||||
if not ";" in l:
|
||||
continue
|
||||
@@ -154,14 +158,13 @@ def processExternalData(datapoints, plotNameKey, fromTime, toTime, dtype):
|
||||
datapoints[plotNameKey].data += [float(cleanFloat)]
|
||||
datapoints[plotNameKey].times += [timestamp]
|
||||
except ValueError as e:
|
||||
error = de.warning_ext_data
|
||||
|
||||
# return a warning indication for qt #
|
||||
return error
|
||||
qtTextBrowser.append(de.warning_ext_data)
|
||||
|
||||
|
||||
def read_in_file(path, backend=None, outsideData=False, plotOutsideTemp=True, plotOutsideHum=True):
|
||||
'''Read in a file, add outside data if requested'''
|
||||
def read_in_file(path, backend=None, outsideData=False, plotOutsideTemp=True,
|
||||
plotOutsideHum=True, qtTextBrowser=None):
|
||||
'''Read in a file, add outside data if requested, optionally give a QtTextBrowser
|
||||
to output information, warnings and errors to.'''
|
||||
|
||||
datapoints = dict()
|
||||
identifiers = [ CFG("plot_temperatur_key"),
|
||||
@@ -197,7 +200,7 @@ def read_in_file(path, backend=None, outsideData=False, plotOutsideTemp=True, pl
|
||||
for i in range(0, len(names)):
|
||||
while len(names[i]) < max_name_len:
|
||||
names[i] += " "
|
||||
datapoints.update({ identifiers[i] : Data(names[i], plotSettings[i]) })
|
||||
datapoints.update({ identifiers[i] : Data(names[i], plotSettings[i], qtTextBrowser) })
|
||||
|
||||
# legacy variables...
|
||||
pt, ph, pd, pto, pho = identifiers
|
||||
@@ -210,78 +213,84 @@ def read_in_file(path, backend=None, outsideData=False, plotOutsideTemp=True, pl
|
||||
elif path.endswith(".DBF") or path.endswith(".dbf"):
|
||||
dbfread(path,datapoints,pt,ph,pd)
|
||||
elif path.endswith(".xls") or path.endswith(".XLS"):
|
||||
csvread(path,datapoints,pt,ph,pd)
|
||||
csvread(path,datapoints,pt,ph,pd,qtTextBrowser)
|
||||
elif path.endswith(".txt"):
|
||||
csvread_txt(path,datapoints,pt,ph,pd)
|
||||
csvread_txt(path,datapoints,pt,ph,pd,qtTextBrowser)
|
||||
else:
|
||||
raise NotImplementedError("Cannot determine filetype, cannot continue. Exit.")
|
||||
|
||||
# if nessesary download and process external data #
|
||||
error = None
|
||||
if outsideData:
|
||||
|
||||
fromTime = datapoints[CFG("plot_temperatur_key")].getFirstTime()
|
||||
toTime = datapoints[CFG("plot_temperatur_key")].getLastTime()
|
||||
|
||||
error1 = processExternalData(datapoints, pto, fromTime, toTime, CFG("dtype_temperatur"))
|
||||
error2 = processExternalData(datapoints, pho, fromTime, toTime, CFG("dtype_humidity"))
|
||||
dtypeTemp = CFG("dtype_temperatur")
|
||||
dtypeHum = CFG("dtype_humidity")
|
||||
|
||||
# pass on warnings #
|
||||
if error1:
|
||||
error = error1;
|
||||
else:
|
||||
error = error2
|
||||
processExternalData(datapoints, pto, fromTime, toTime, dtypeTemp, qtTextBrowser)
|
||||
processExternalData(datapoints, pho, fromTime, toTime, dtypeHum, qtTextBrowser)
|
||||
|
||||
return (datapoints, error)
|
||||
return datapoints
|
||||
|
||||
def dbfread(path,datapoints,pt,ph,pd):
|
||||
for record in DBF(path):
|
||||
parse_line(datapoints,record,'DATETIME',[ ('TEMPCELS',pt) , ('HUMIDITY',ph) , ('DEWCELS',pd) ] ,timeutils.time_from_dbf)
|
||||
for record in DBF(path):
|
||||
parse_line(datapoints, record, 'DATETIME',
|
||||
[ ('TEMPCELS',pt), ('HUMIDITY',ph), ('DEWCELS',pd) ],
|
||||
timeutils.time_from_dbf)
|
||||
|
||||
def csvread(path,datapoints,pt,ph,pd):
|
||||
count = 0;
|
||||
with open(path) as f:
|
||||
for l in f:
|
||||
if l.startswith(">>") or l.startswith("--") or l.startswith("NO."):
|
||||
count += 1
|
||||
continue
|
||||
else:
|
||||
row_arg = list(map(lambda s:s.replace(" ","").replace(",","."),l.split("\t")))
|
||||
row = {"temp":None,"hum":None,"taupunkt":None,"datetime":None}
|
||||
row["datetime"] = row_arg[1]+row_arg[2]
|
||||
row["temp"] = float(row_arg[3])
|
||||
row["hum"] = float(row_arg[4])
|
||||
row["taupunkt"] = float(row_arg[5])
|
||||
parse_line(datapoints,row,'datetime',[ ('temp',pt) , ('hum',ph) , ('taupunkt',pd) ],\
|
||||
timeutils.time_from_csv,timeformat="%d-%m-%Y%H:%M:%S")
|
||||
print("Info: Ignored %d lines at beginning of file"%count)
|
||||
def csvread(path, datapoints, pt, ph, pd, qtTextBrowser):
|
||||
count = 0;
|
||||
with open(path) as f:
|
||||
for l in f:
|
||||
if l.startswith(">>") or l.startswith("--") or l.startswith("NO."):
|
||||
count += 1
|
||||
continue
|
||||
else:
|
||||
row_arg = list(map(lambda s:s.replace(" ","").replace(",","."),l.split("\t")))
|
||||
row = {"temp":None,"hum":None,"taupunkt":None,"datetime":None}
|
||||
row["datetime"] = row_arg[1]+row_arg[2]
|
||||
row["temp"] = float(row_arg[3])
|
||||
row["hum"] = float(row_arg[4])
|
||||
row["taupunkt"] = float(row_arg[5])
|
||||
parse_line(datapoints, row, 'datetime',
|
||||
[('temp',pt), ('hum',ph), ('taupunkt',pd)],
|
||||
timeutils.time_from_csv,
|
||||
timeformat="%d-%m-%Y%H:%M:%S")
|
||||
|
||||
import codecs
|
||||
def csvread_txt(path,datapoints,pt,ph,pd):
|
||||
count = 0;
|
||||
f = open(path)
|
||||
qtTextBrowser.append(de.info_ig_lines.format(count))
|
||||
|
||||
def csvread_txt(path,datapoints,pt,ph,pd,qtTextBrowser):
|
||||
count = 0;
|
||||
with open(path) as f:
|
||||
try:
|
||||
for l in f:
|
||||
if any(s in l for s in ["Logger","Datenquelle","Sensortyp","Einheit","Daten"]):
|
||||
count += 1
|
||||
continue
|
||||
else:
|
||||
row_arg = list(map(lambda s:s.replace(" ","").replace(",","."),l.split("\t")))
|
||||
row = {"temp":None,"hum":None,"taupunkt":None,"datetime":None}
|
||||
row["datetime"] = "%s-%s-%s_%s:%s"%(row_arg[0],row_arg[1],row_arg[2],row_arg[3],row_arg[4])
|
||||
row["temp"] = float(row_arg[6])
|
||||
row["hum"] = float(row_arg[7])
|
||||
row["taupunkt"] = 0.0
|
||||
parse_line(datapoints,row,'datetime',[ ('temp',pt) , ('hum',ph) , ('taupunkt',pd) ],\
|
||||
timeutils.time_from_csv,timeformat="%d-%m-%Y_%H:%M")
|
||||
if any(s in l for s in ["Logger","Datenquelle","Sensortyp","Einheit","Daten"]):
|
||||
count += 1
|
||||
continue
|
||||
else:
|
||||
row_arg = list(map(lambda s:s.replace(" ","").replace(",","."),l.split("\t")))
|
||||
row = {"temp":None,"hum":None,"taupunkt":None,"datetime":None}
|
||||
row["datetime"] = "{}-{}-{}_{}:{}".format(row_arg[0],
|
||||
row_arg[1],
|
||||
row_arg[2],
|
||||
row_arg[3],
|
||||
row_arg[4])
|
||||
row["temp"] = float(row_arg[6])
|
||||
row["hum"] = float(row_arg[7])
|
||||
row["taupunkt"] = 0.0
|
||||
parse_line(datapoints, row, 'datetime',
|
||||
[('temp',pt), ('hum',ph), ('taupunkt',pd)],
|
||||
timeutils.time_from_csv,
|
||||
timeformat="%d-%m-%Y_%H:%M")
|
||||
except (UnicodeError, IndexError):
|
||||
count = csvread_txt_fallback(path,datapoints,pt,ph,pd)
|
||||
|
||||
print("Info: Ignored %d lines at beginning of the file"%count)
|
||||
f.close()
|
||||
qtTextBrowser.append(de.info_ig_lines.format(count))
|
||||
|
||||
def csvread_txt_fallback(path,datapoints,pt,ph,pd):
|
||||
'''fallback for different format and encoding of txt'''
|
||||
|
||||
count = 0
|
||||
with codecs.open(path, "r",encoding="ISO8859_2", errors='replace') as f:
|
||||
for l in f:
|
||||
@@ -290,11 +299,13 @@ def csvread_txt_fallback(path,datapoints,pt,ph,pd):
|
||||
continue
|
||||
else:
|
||||
date,time,temp,hum = l.replace(" ","").replace(".","-").replace(",",".").split("\t")
|
||||
row = {"temp":None,"hum":None,"taupunkt":None,"datetime":None}
|
||||
row = { "temp":None, "hum":None, "taupunkt":None, "datetime":None }
|
||||
row["datetime"] = "{}_{}".format(date,time[:5])
|
||||
row["temp"] = float(temp)
|
||||
row["hum"] = float(hum)
|
||||
row["taupunkt"] = 0.0
|
||||
parse_line(datapoints,row,'datetime',[ ('temp',pt) , ('hum',ph) , ('taupunkt',pd) ],\
|
||||
timeutils.time_from_csv,timeformat="%d-%m-%Y_%H:%M")
|
||||
parse_line(datapoints,row,'datetime',
|
||||
[('temp',pt), ('hum',ph), ('taupunkt',pd)],
|
||||
timeutils.time_from_csv,
|
||||
timeformat="%d-%m-%Y_%H:%M")
|
||||
return count
|
||||
|
||||
@@ -22,4 +22,10 @@ open_pic = "Bild öffnen.."
|
||||
bad_time = "Fehlerhafte Zeitangabe!"
|
||||
warning = "Warnung"
|
||||
button_idc = "Egal, weiter!"
|
||||
warning_ext_data = "Fehlerhafte oder fehlende Werte in externen Daten für angegebenen Zeitraum."
|
||||
warning_ext_data = "WARN: Fehlerhafte oder fehlende Werte in externen Daten für angegebenen Zeitraum."
|
||||
infoOutput = "Informationen"
|
||||
cache_hit = "INFO: Verwende bereits vorhandene Datei: {}"
|
||||
special_err_1 = "Zeitstempelanzahl stimmt nicht mit Datensatzzahl überein."
|
||||
warn_no_data = "Warnung, keine Daten für {}"
|
||||
pg_request = "Downloading: {}"
|
||||
info_ig_lines = "INFO: Ignoriere {} Zeilen ohne Daten am Anfang der Datei"
|
||||
|
||||
@@ -8,7 +8,7 @@ from PyQt5.QtWidgets import (QApplication, QCheckBox, QComboBox, QDateTimeEdit,
|
||||
QDial, QDialog, QGridLayout, QGroupBox, QHBoxLayout, QLabel, QLineEdit,
|
||||
QProgressBar, QPushButton, QRadioButton, QScrollBar, QSizePolicy,
|
||||
QSlider, QSpinBox, QStyleFactory, QTableWidget, QTabWidget, QTextEdit,
|
||||
QVBoxLayout, QWidget, QFileDialog, QDateEdit, QMessageBox)
|
||||
QVBoxLayout, QWidget, QFileDialog, QDateEdit, QMessageBox, QTextBrowser)
|
||||
|
||||
import localization.de as de
|
||||
import sys
|
||||
@@ -35,12 +35,14 @@ class WidgetGallery(QDialog):
|
||||
self.createFileSelection()
|
||||
self.createDateSelection()
|
||||
self.createCheckboxArea()
|
||||
self.createInfoOutputSection()
|
||||
|
||||
mainLayout = QGridLayout()
|
||||
mainLayout.addWidget(self.fileSelectionGroup, 1, 0)
|
||||
mainLayout.addWidget(self.dateSelectionGroupBox, 2, 0)
|
||||
mainLayout.addWidget(self.checkboxGroup, 3, 0)
|
||||
mainLayout.addWidget(self.startSection, 4, 0)
|
||||
mainLayout.addWidget(self.infoOutputSection, 5, 0)
|
||||
|
||||
self.setLayout(mainLayout)
|
||||
|
||||
@@ -59,6 +61,17 @@ class WidgetGallery(QDialog):
|
||||
|
||||
self.startSection.setLayout(layout)
|
||||
|
||||
def createInfoOutputSection(self):
|
||||
'''Generate Aread containing progress, error and warning outputs'''
|
||||
|
||||
self.infoOutputSection = QGroupBox(self.localization.infoOutput)
|
||||
self.infoTextBox = QTextBrowser()
|
||||
|
||||
layout = QVBoxLayout()
|
||||
layout.addWidget(self.infoTextBox)
|
||||
|
||||
self.infoOutputSection.setLayout(layout)
|
||||
|
||||
def createFileSelection(self):
|
||||
'''Generate the area containing the file selectors and go button'''
|
||||
|
||||
@@ -140,6 +153,7 @@ class WidgetGallery(QDialog):
|
||||
'''Run generation with selected file and options'''
|
||||
|
||||
# set save target if nessesary #
|
||||
self.infoTextBox.clear()
|
||||
self.buttonGo.setText(self.localization.button_go_wait)
|
||||
self.buttonGo.setDisabled(True)
|
||||
self.repaint()
|
||||
@@ -153,17 +167,11 @@ class WidgetGallery(QDialog):
|
||||
|
||||
# workaround for checkboxes changed #
|
||||
outsideDataNeeded = self.boxOTemp.isChecked() or self.boxOHumidity.isChecked()
|
||||
self.datapoints, error = input_backend.read_in_file(self.srcFileString,
|
||||
self.datapoints = input_backend.read_in_file(self.srcFileString,
|
||||
outsideData=outsideDataNeeded,
|
||||
plotOutsideTemp=self.boxOTemp.isChecked(),
|
||||
plotOutsideHum=self.boxOHumidity.isChecked())
|
||||
|
||||
if error:
|
||||
errorBox = QMessageBox(self)
|
||||
errorBox.setAttribute(PyQt5.QtCore.Qt.WA_DeleteOnClose)
|
||||
errorBox.setText(self.localization.warning)
|
||||
errorBox.setDetailedText(error)
|
||||
errorBox.show()
|
||||
plotOutsideHum=self.boxOHumidity.isChecked(),
|
||||
qtTextBrowser=self.infoTextBox)
|
||||
|
||||
# build dates #
|
||||
try:
|
||||
@@ -218,11 +226,13 @@ class WidgetGallery(QDialog):
|
||||
waitDialog.setAttribute(PyQt5.QtCore.Qt.WA_DeleteOnClose)
|
||||
waitDialog.setText(self.localization.wait_dialog_text)
|
||||
waitDialog.show()
|
||||
|
||||
try:
|
||||
self.datapoints, error = input_backend.read_in_file(self.srcFileString,
|
||||
self.datapoints = input_backend.read_in_file(self.srcFileString,
|
||||
outsideData=False,
|
||||
plotOutsideTemp=False,
|
||||
plotOutsideHum=False)
|
||||
plotOutsideHum=False,
|
||||
qtTextBrowser=self.infoTextBox)
|
||||
except Exception as e:
|
||||
waitDialog.close()
|
||||
errorBox = QMessageBox(self)
|
||||
|
||||
Reference in New Issue
Block a user