๐Ÿ“ฆ barneyman / tank-sensor

๐Ÿ“„ server.py ยท 306 lines
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306from flask import Flask, request, send_from_directory
from concurrent.futures import ThreadPoolExecutor
import json
import signal
import os
import sys

# on a virgin pi
#
# apt-get install python3 python3-pip
# python3 setup.py
#   clone my pyLibraries 'somewhere' on the pi
#   go to that clone and run ...
# python3 setup.py install
#   create a config directory under this script's dir
#   copy your client_secrets.json file fomr Google Developer Console
#     you may have to create a project and add ..
#     fuson R/W Sheets R/W APIs to your project
#   run this script - the first time it should notice there's not google credentials and prompt you to create them by giving you an URL
#   it will ask for access rights, and if you approve, give you a string to paste in



from bjfGoogle import bjfGoogle, bjfFusionService, bjfSheetsService
#import bjfGoogle

from datetime import datetime
from datetime import timedelta

app=Flask(__name__)

@app.route("/")
def main():
    return "Welcome!"

#pi
#defaultDirectory="/home/pi/tank/"
defaultDirectory=os.path.dirname(os.path.abspath(__file__))+"/../"
print ("default dir is ",defaultDirectory)
# windows
#defaultDirectory="c:\\scribble\\"

#upgradesDict={ 'tank_1.0':'TankSensor_1_1.bin','tank_1.1':'TankSensor_1_2.bin','tank_1.2':'TankSensor_1_3.bin' }
FlaskPort=5000


@app.route('/upgrades/<path:filename>')
def send_js(filename):
	print("sending update "+filename)
	return send_from_directory(defaultDirectory+'upgrades', filename)


# only 1 worker, so we get it off the http thread, but don't then create mutex problems in our own code
singleThreadWorker=ThreadPoolExecutor(max_workers=1)


@app.route("/data", methods=['GET', 'POST'])
def data():
	if request.method == 'GET':

		lastSeenValues=[]
		print( "data GET" )
		print (lastSeenValues)
		try:
			lastSeenValues=json.load( open(defaultDirectory+"config/lastSeenData.json"))
		except:
			print ("no last data")

		return json.dumps(lastSeenValues, separators=(',', ':'));
	else:
		if not request.json:
			print ("error")
			print (request.data)
			return "error"

		singleThreadWorker.submit(ProcessJSON, request.json)

		try:
			# create a json blob to send back
			timenow=datetime.now()
			returnBlob = {'localTime': timenow.strftime("%H:%M:%S"), 'minutes':timenow.strftime("%M"),'seconds':timenow.strftime("%S"), 'reset': False}
			# get the version running
			clientVer=request.json['version']

			upgradesDict={}

			try:
				upgradesDict=json.load(open(defaultDirectory+"upgrades/available.json"))
			except Exception as e:
				print ("parsing problem ",e)

			# expects { "latest":"tank_1.11", "binary":"TankSensor_1.11.bin"}
			if not "latest" in upgradesDict or not "binary" in upgradesDict:
				latestVer=clientVer
			else:
				latestVer=upgradesDict['latest']


			if clientVer != latestVer:
				#upgradeDict={'host':{{ request.host.split(':')[0] }}, 'port':FlaskPort, 'url':upgradesDict[clientVer]}
				#upgradeDict={'host':'192.168.43.22', 'port':FlaskPort, 'url':'upgrades/'+upgradesDict[clientVer]}
				print("upgrade potential! ", clientVer,latestVer)

				upgradeDict={'url':'upgrades/'+upgradesDict["binary"]}
				returnBlob['upgrade']=upgradeDict

			return json.dumps(returnBlob, separators=(',', ':'));

		except Exception as e:
			print ("Exception occurred ",e)
			return "bad";

failedRowValues=[]

def sigterm_handler(_signo, _stack_frame):
    # Raises SystemExit(0):
	
	writeStaleData()
	sys.exit(0)


def addFailedRow(tableName, rowData):
	addition={ "table":tableName,"data":rowData}
	failedRowValues.append(addition)
	# debug
	# print ("Added ", addition)
	# json.dump(failedRowValues, open(defaultDirectory+"config/staleDataTemp.json",'w'))



def writeStaleData():	
	if len(failedRowValues)>0:
		print("scribbling the stales to file")
		json.dump(failedRowValues, open(defaultDirectory+"config/staleData.json",'w'))

def PublishCachedJSON(maxRows):	
	# the [:] means take a copy of thing to  iterate with - i like python
	archiveRowCount=0
	print("stale item count ",len(failedRowValues), " attempting ...");
	# the [:] means take a copy of thing to  iterate with - i like python
	
	for row in failedRowValues[:]:
		# don't try tpoo many times and swamp poor google
		archiveRowCount=archiveRowCount+1
		bailNow=False
		if archiveRowCount>maxRows:
			break
		try:
			#debug
			#print("retrying row ",row)
			#raise ValueError('faking a fusion error - collect a few of these.')


			theTable=getOrCreateTable(row['table'])		
			fusion.InsertRowData(theTable,row['data'])
			# worked, remove the the ORIGINAL list
			failedRowValues.remove(row)
		except Exception as e:
			print("fusion retry failed ",e)
			# we need to bail on fail or we risk breaking sequence
			bailNow=True
		try:
			if bailNow==False:
				sheetsService.AppendSheetRange('1hVkEaao2yQ6g680cfmSKf6PiZUFidZwlI_8EWsFN7s0', row['data'], 'HISTORY_DATA')
		except Exception as e:
			print("sheets retry failed - ",e)
			
		if bailNow == True:
			break
	
	
def ProcessJSON(jsonData):
	#debug
	print (jsonData)
	print("=======")
	print (jsonData["data"])
	print("=======")

	# work out how many times we have to do this
	try:

		# push any stale values
		PublishCachedJSON(2)

		rowValues=[]

		utcNow=datetime.utcnow()
		localNow=datetime.now()
		# 'current' iteration is 'now' so subtract
		currentIter=jsonData["current"]
		intervalSeconds=jsonData["intervalS"]
		#utcStart=utcNow-timedelta(seconds=jsonData["intervalS"]*(len(jsonData["data"]))-1)
		#localStart=datetime.now()-timedelta(seconds=jsonData["intervalS"]*(len(jsonData["data"]))-1)
		for reading in jsonData["data"]:
			#print (reading)
			utcForIteration=utcNow-(timedelta(seconds=intervalSeconds*(currentIter-reading["iter"])))
			localForIteration=localNow-(timedelta(seconds=intervalSeconds*(currentIter-reading["iter"])))

			lastRowValues=[utcForIteration.strftime("%Y-%m-%d %H:%M:%S"),localForIteration.strftime("%Y-%m-%d %H:%M:%S"), reading["iter"], reading["tempC"],reading["pressMB"],reading["humid%"], reading["distCM"], reading["lux"], reading["lipo"] ]

			lastRowValuesJSON={	"utc":lastRowValues[0],
								"local":lastRowValues[1], 
								"iteration":lastRowValues[2],
								"outsideTemp": lastRowValues[3],
								"outsidePressure":lastRowValues[4],
								"humidity":lastRowValues[5],
								"distance":lastRowValues[6],
								"outsideLux":lastRowValues[7],
								"lipoVoltage":lastRowValues[8]
								}

			# DEBUG - seive to minutes only
			#if reading["iter"] % 6 == 0:
			rowValues.append(lastRowValues)


		# add to history
		# DEBUG
		if len(rowValues)>0 :
			# fusion is the important one
			tableName=jsonData["host"]+"_tempPressure";

			# to keep sequential order, if we have anything stale, add to the back of it
			if len(failedRowValues) > 0:
				addFailedRow(tableName,rowValues)
			else:
				try:
					theTable=getOrCreateTable(tableName)		
					#debug
					#raise ValueError('faking a fusion error - collect a few of these.')


					fusion.InsertRowData(theTable,rowValues)
					# then try to add to the spreadsheet
					try:
						sheetsService.AppendSheetRange('1hVkEaao2yQ6g680cfmSKf6PiZUFidZwlI_8EWsFN7s0', rowValues, 'HISTORY_DATA')
					except Exception as e:
						print("sheets exception ",e)
				except Exception as e:
					print("fusion exception ",e)
					if len(failedRowValues) < 1000:
						addFailedRow(tableName,rowValues)


		# deepcopy
		json.dump(lastRowValuesJSON, open(defaultDirectory+"config/lastSeenData.json",'w'))
		#  deep copy
		latestRowData = [i for i in lastRowValues]
		rowValues=[latestRowData]
		latestRowData.append(utcNow.strftime("%Y-%m-%d %H:%M:%S"))
		sheetsService.UpdateSheetRange('1hVkEaao2yQ6g680cfmSKf6PiZUFidZwlI_8EWsFN7s0', rowValues,'LATEST_DATA')
		print( "synch process finished")
	except Exception as e:
		print ("Async Exception occurred ",e)
		

		
def getOrCreateTable(tableName):
	theTable=fusion.GetTableByName(tableName)
	if theTable==None:
		print("Creating fusion Table ...", tableName)
		tableDef={"name":tableName,"isExportable":False, "columns":[{"name":"whenUTC","type":"DATETIME"},{"name":"local","type":"DATETIME"},{"name":"iter","type":"NUMBER"},{"name":"tempC","type":"NUMBER"},{"name":"pressureMB","type":"NUMBER"},{"name":"humidity%","type":"NUMBER"},{"name":"distanceCM","type":"NUMBER"},{"name":"lux","type":"NUMBER"},{"name":"lipoV","type":"NUMBER"}]  }
		theTable=fusion.CreateTable(tableDef)
	return theTable


if __name__ == "__main__":
	thisG=bjfGoogle()
	if thisG.Authenticate(defaultDirectory+"config/client_secret.json",defaultDirectory+"config/credentialStore.credentials", "https://www.googleapis.com/auth/spreadsheets https://www.googleapis.com/auth/fusiontables"):

		# create a fusion table
		fusion=bjfFusionService(thisG)
		sheetsService=bjfSheetsService(thisG)
		# see if our table is there

		#tableName="tempPressure"
		#tpTable=fusion.GetTableByName(tableName)
	
		signal.signal(signal.SIGTERM, sigterm_handler)
		signal.signal(signal.SIGINT, sigterm_handler)
	
		print ("reading stale data "),
		try:
			failedRowValues = json.load(open(defaultDirectory+"config/staleData.json"))
			print(len(failedRowValues))
			PublishCachedJSON(20)
			# and delete the file
			os.remove(defaultDirectory+"config/staleData.json")
		except:
			failedRowValues=[]
	
		if True: #tpTable!=None:
			print("running")
			app.run(host="0.0.0.0", port=FlaskPort)
			writeStaleData()

	else:
		print("failed to authenticate")