put back most folders
This commit is contained in:
0
recordGenerators/__init__.py
Normal file
0
recordGenerators/__init__.py
Normal file
85
recordGenerators/achesAndPains.py
Normal file
85
recordGenerators/achesAndPains.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
import shutil
|
||||||
|
import requests
|
||||||
|
import logging,coloredlogs
|
||||||
|
import py2Lib.bit
|
||||||
|
import util.machineProductCfg as MPC
|
||||||
|
import records.lfRecord as LFR
|
||||||
|
import gzip
|
||||||
|
from os import remove
|
||||||
|
import xml.dom.minidom
|
||||||
|
import aiohttp, aiofiles, asyncio
|
||||||
|
|
||||||
|
l = logging.getLogger(__name__)
|
||||||
|
coloredlogs.install()
|
||||||
|
|
||||||
|
geocodes = []
|
||||||
|
coopIds = []
|
||||||
|
|
||||||
|
for i in MPC.getPrimaryLocations():
|
||||||
|
coopIds.append(LFR.getCoopId(i))
|
||||||
|
geocodes.append(LFR.getLatLong(i).replace('/', ','))
|
||||||
|
|
||||||
|
# Open the config file and make it accessible via "cfg"
|
||||||
|
import json
|
||||||
|
with open("config.json", "r") as file:
|
||||||
|
cfg = json.load(file)
|
||||||
|
|
||||||
|
apiKey = cfg["twcApiKey"]
|
||||||
|
|
||||||
|
async def getData(coopId, geocode):
|
||||||
|
fetchUrl = f"https://api.weather.com/v2/indices/achePain/daypart/7day?geocode={geocode}&language=en-US&format=xml&apiKey={apiKey}"
|
||||||
|
data = ""
|
||||||
|
|
||||||
|
async with aiohttp.ClientSession() as s:
|
||||||
|
async with s.get(fetchUrl) as r:
|
||||||
|
if r.status != 200:
|
||||||
|
l.error(f"Failed to write AchesAndPains record -- status code {r.status}")
|
||||||
|
return
|
||||||
|
|
||||||
|
data = await r.text()
|
||||||
|
|
||||||
|
|
||||||
|
newData = data[63:-26]
|
||||||
|
|
||||||
|
i2Doc = f'\n <AchesAndPains id="000000000" locationKey="{coopId}" isWxScan="0">\n {newData}\n <clientKey>{coopId}</clientKey>\n </AchesAndPains>'
|
||||||
|
|
||||||
|
async with aiofiles.open('./.temp/AchesAndPains.i2m', 'a') as f:
|
||||||
|
await f.write(i2Doc)
|
||||||
|
await f.close()
|
||||||
|
|
||||||
|
async def makeRecord():
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
l.info("Writing AchesAndPains record.")
|
||||||
|
|
||||||
|
header = '<Data type="AchesAndPains">'
|
||||||
|
footer = '</Data>'
|
||||||
|
|
||||||
|
async with aiofiles.open('./.temp/AchesAndPains.i2m', 'a') as doc:
|
||||||
|
await doc.write(header)
|
||||||
|
|
||||||
|
for (x, y) in zip(coopIds, geocodes):
|
||||||
|
await getData(x,y)
|
||||||
|
|
||||||
|
async with aiofiles.open('./.temp/AchesAndPains.i2m', 'a') as end:
|
||||||
|
await end.write(footer)
|
||||||
|
|
||||||
|
dom = xml.dom.minidom.parse('./.temp/AchesAndPains.i2m')
|
||||||
|
xmlPretty = dom.toprettyxml(indent= " ")
|
||||||
|
|
||||||
|
async with aiofiles.open('./.temp/AchesAndPains.i2m', 'w') as g:
|
||||||
|
await g.write(xmlPretty[23:])
|
||||||
|
await g.close()
|
||||||
|
|
||||||
|
|
||||||
|
# Compresss i2m to gzip
|
||||||
|
with open ('./.temp/AchesAndPains.i2m', 'rb') as f_in:
|
||||||
|
with gzip.open('./.temp/AchesAndPains.gz', 'wb') as f_out:
|
||||||
|
shutil.copyfileobj(f_in, f_out)
|
||||||
|
|
||||||
|
file = "./.temp/AchesAndPains.gz"
|
||||||
|
command = '<MSG><Exec workRequest="storeData(File={0},QGROUP=__AchesAndPains__,Feed=AchesAndPains)" /><GzipCompressedMsg fname="AchesAndPains" /></MSG>'
|
||||||
|
|
||||||
|
bit.sendFile([file], [command], 1, 0)
|
||||||
|
|
||||||
|
remove('./.temp/AchesAndPains.i2m')
|
||||||
|
remove('./.temp/AchesAndPains.gz')
|
||||||
110
recordGenerators/airQuality.py
Normal file
110
recordGenerators/airQuality.py
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
import requests
|
||||||
|
import gzip
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import xml.dom.minidom
|
||||||
|
import logging,coloredlogs
|
||||||
|
import aiohttp, aiofiles, asyncio
|
||||||
|
|
||||||
|
l = logging.getLogger(__name__)
|
||||||
|
coloredlogs.install()
|
||||||
|
|
||||||
|
import py2Lib.bit
|
||||||
|
import util.machineProductCfg as MPC
|
||||||
|
import records.lfRecord as LFR
|
||||||
|
|
||||||
|
locationIds = []
|
||||||
|
zipCodes = []
|
||||||
|
epaIds = []
|
||||||
|
|
||||||
|
for i in MPC.getPrimaryLocations():
|
||||||
|
locationIds.append(LFR.getCoopId(i))
|
||||||
|
zipCodes.append(LFR.getZip(i))
|
||||||
|
epaIds.append(LFR.getEpaId(i))
|
||||||
|
|
||||||
|
# Open the config file and make it accessible via "cfg"
|
||||||
|
import json
|
||||||
|
with open("config.json", "r") as file:
|
||||||
|
cfg = json.load(file)
|
||||||
|
|
||||||
|
apiKey = cfg["twcApiKey"]
|
||||||
|
|
||||||
|
async def getData(epaId, zipcode):
|
||||||
|
url = f"https://api.weather.com/v1/location/{zipcode}:4:US/airquality.xml?language=en-US&apiKey={apiKey}"
|
||||||
|
data = ""
|
||||||
|
|
||||||
|
async with aiohttp.ClientSession() as s:
|
||||||
|
async with s.get(url) as r:
|
||||||
|
data = await r.text()
|
||||||
|
|
||||||
|
newData = data[57:-11]
|
||||||
|
|
||||||
|
# Write to i2doc file
|
||||||
|
i2Doc = f'<AirQuality id="000000000" locationKey="{epaId}" isWxScan="0">' + '' + newData + f'<clientKey>{epaId}</clientKey></AirQuality>'
|
||||||
|
|
||||||
|
async with aiofiles.open("./.temp/AirQuality.i2m", 'a') as f:
|
||||||
|
await f.write(i2Doc)
|
||||||
|
await f.close()
|
||||||
|
|
||||||
|
async def writeData():
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
useData = False
|
||||||
|
workingEpaIds = []
|
||||||
|
|
||||||
|
for i in epaIds:
|
||||||
|
if i == None:
|
||||||
|
l.debug(f"No EPA ID found for location -- Skipping.")
|
||||||
|
else:
|
||||||
|
l.debug(f"EPA ID found for location! Writing data for Air Quality.")
|
||||||
|
workingEpaIds.append(i)
|
||||||
|
useData = True
|
||||||
|
|
||||||
|
|
||||||
|
# Check to see if we even have EPA ids, as some areas don't have air quality reports
|
||||||
|
if (useData):
|
||||||
|
try:
|
||||||
|
l.info("Writing an AirQuality record.")
|
||||||
|
header = '<Data type="AirQuality">'
|
||||||
|
footer = "</Data>"
|
||||||
|
|
||||||
|
async with aiofiles.open("./.temp/AirQuality.i2m", 'w') as doc:
|
||||||
|
await doc.write(header)
|
||||||
|
|
||||||
|
for (x, y) in zip(workingEpaIds, zipCodes):
|
||||||
|
await getData(x, y)
|
||||||
|
|
||||||
|
async with aiofiles.open("./.temp/AirQuality.i2m", 'a') as end:
|
||||||
|
await end.write(footer)
|
||||||
|
|
||||||
|
dom = xml.dom.minidom.parse("./.temp/AirQuality.i2m")
|
||||||
|
xmlPretty = dom.toprettyxml(indent = " ")
|
||||||
|
|
||||||
|
async with aiofiles.open("./.temp/AirQuality.i2m", 'w') as g:
|
||||||
|
await g.write(xmlPretty[23:])
|
||||||
|
await g.close()
|
||||||
|
|
||||||
|
files = []
|
||||||
|
commands = []
|
||||||
|
with open("./.temp/AirQuality.i2m", 'rb') as f_in:
|
||||||
|
with gzip.open("./.temp/AirQuality.gz", 'wb') as f_out:
|
||||||
|
shutil.copyfileobj(f_in, f_out)
|
||||||
|
|
||||||
|
gZipFile = "./.temp/AirQuality.gz"
|
||||||
|
|
||||||
|
files.append(gZipFile)
|
||||||
|
comand = commands.append('<MSG><Exec workRequest="storeData(File={0},QGROUP=__AirQuality__,Feed=AirQuality)" /><GzipCompressedMsg fname="AirQuality" /></MSG>')
|
||||||
|
numFiles = len(files)
|
||||||
|
|
||||||
|
bit.sendFile(files, commands, numFiles, 0)
|
||||||
|
|
||||||
|
os.remove("./.temp/AirQuality.i2m")
|
||||||
|
os.remove("./.temp/AirQuality.gz")
|
||||||
|
except Exception as e:
|
||||||
|
l.error("DO NOT REPORT THE ERROR BELOW")
|
||||||
|
l.error("Failed to write an AirQuality record.")
|
||||||
|
os.remove('./.temp/AirQuality.i2m')
|
||||||
|
else:
|
||||||
|
l.info("Not writing an AirQuality record due to a lack of working EPA ids.")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
103
recordGenerators/airportDelays.py
Normal file
103
recordGenerators/airportDelays.py
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
import requests
|
||||||
|
import gzip
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import xml.dom.minidom
|
||||||
|
import logging,coloredlogs
|
||||||
|
import aiohttp, aiofiles, asyncio
|
||||||
|
|
||||||
|
import py2Lib.bit
|
||||||
|
import util.machineProductCfg as MPC
|
||||||
|
import records.lfRecord as LFR
|
||||||
|
|
||||||
|
l = logging.getLogger(__name__)
|
||||||
|
coloredlogs.install()
|
||||||
|
|
||||||
|
locationIds = []
|
||||||
|
zipCodes = []
|
||||||
|
airports = []
|
||||||
|
|
||||||
|
for i in MPC.getPrimaryLocations():
|
||||||
|
locationIds.append(LFR.getCoopId(i))
|
||||||
|
zipCodes.append(LFR.getZip(i))
|
||||||
|
|
||||||
|
airports = MPC.getAirportCodes()
|
||||||
|
l.debug(airports)
|
||||||
|
|
||||||
|
# Open the config file and make it accessible via "cfg"
|
||||||
|
import json
|
||||||
|
with open("config.json", "r") as file:
|
||||||
|
cfg = json.load(file)
|
||||||
|
|
||||||
|
apiKey = cfg["twcApiKey"]
|
||||||
|
|
||||||
|
async def getData(airport):
|
||||||
|
url = f"https://api.weather.com/v1/airportcode/{airport}/airport/delays.xml?language=en-US&apiKey={apiKey}"
|
||||||
|
data = ""
|
||||||
|
|
||||||
|
async with aiohttp.ClientSession() as s:
|
||||||
|
async with s.get(url) as r:
|
||||||
|
data = await r.text()
|
||||||
|
|
||||||
|
newData = data[48:-11].replace('¿', '-')
|
||||||
|
|
||||||
|
# Write to i2doc file
|
||||||
|
i2Doc = f'<AirportDelays id="000000000" locationKey="{airport}" isWxScan="0">' + '' + newData + f'<clientKey>{airport}</clientKey></AirportDelays>'
|
||||||
|
|
||||||
|
async with aiofiles.open("./.temp/AirportDelays.i2m", 'a') as f:
|
||||||
|
await f.write(i2Doc)
|
||||||
|
await f.close()
|
||||||
|
|
||||||
|
async def writeData():
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
useData = False
|
||||||
|
airportsWithDelays = []
|
||||||
|
|
||||||
|
for x in airports:
|
||||||
|
async with aiohttp.ClientSession() as s:
|
||||||
|
async with s.get(f"https://api.weather.com/v1/airportcode/{x}/airport/delays.xml?language=en-US&apiKey={apiKey}") as r:
|
||||||
|
if r.status != 200:
|
||||||
|
l.debug(f"No delay for {x} found, skipping..")
|
||||||
|
else:
|
||||||
|
airportsWithDelays.append(x)
|
||||||
|
useData = True
|
||||||
|
|
||||||
|
if (useData):
|
||||||
|
l.info("Writing an AirportDelays record.")
|
||||||
|
header = '<Data type="AirportDelays">'
|
||||||
|
footer = "</Data>"
|
||||||
|
|
||||||
|
async with aiofiles.open("./.temp/AirportDelays.i2m", 'w') as doc:
|
||||||
|
await doc.write(header)
|
||||||
|
|
||||||
|
for x in airportsWithDelays:
|
||||||
|
await getData(x)
|
||||||
|
|
||||||
|
async with aiofiles.open("./.temp/AirportDelays.i2m", 'a') as end:
|
||||||
|
await end.write(footer)
|
||||||
|
|
||||||
|
dom = xml.dom.minidom.parse("./.temp/AirportDelays.i2m")
|
||||||
|
prettyXml = dom.toprettyxml(indent=" ")
|
||||||
|
|
||||||
|
async with aiofiles.open("./.temp/AirportDelays.i2m", 'w') as g:
|
||||||
|
await g.write(prettyXml)
|
||||||
|
await g.close()
|
||||||
|
|
||||||
|
files = []
|
||||||
|
commands = []
|
||||||
|
with open("./.temp/AirportDelays.i2m", 'rb') as f_in:
|
||||||
|
with gzip.open("./.temp/AirportDelays.gz", 'wb') as f_out:
|
||||||
|
shutil.copyfileobj(f_in, f_out)
|
||||||
|
|
||||||
|
gZipFile = "./.temp/AirportDelays.gz"
|
||||||
|
|
||||||
|
files.append(gZipFile)
|
||||||
|
comand = commands.append('<MSG><Exec workRequest="storeData(File={0},QGROUP=__AirportDelays__,Feed=AirportDelays)" /><GzipCompressedMsg fname="AirportDelays" /></MSG>')
|
||||||
|
numFiles = len(files)
|
||||||
|
|
||||||
|
bit.sendFile(files, commands, numFiles, 0)
|
||||||
|
|
||||||
|
os.remove("./.temp/AirportDelays.i2m")
|
||||||
|
os.remove("./.temp/AirportDelays.gz")
|
||||||
|
else:
|
||||||
|
l.info("No airport delays found.")
|
||||||
365
recordGenerators/alerts.py
Normal file
365
recordGenerators/alerts.py
Normal file
@@ -0,0 +1,365 @@
|
|||||||
|
import requests
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
from datetime import datetime,timedelta
|
||||||
|
from util.machineProductCfg import getAlertZones
|
||||||
|
import time
|
||||||
|
import pytz
|
||||||
|
import xml.dom.minidom
|
||||||
|
import shutil
|
||||||
|
import gzip
|
||||||
|
import logging,coloredlogs
|
||||||
|
import aiohttp, aiofiles, asyncio
|
||||||
|
import py2Lib.bit
|
||||||
|
|
||||||
|
l = logging.getLogger(__name__)
|
||||||
|
coloredlogs.install()
|
||||||
|
|
||||||
|
#Zones/Counties to fetch alerts for
|
||||||
|
alertLocations = getAlertZones()
|
||||||
|
# Open the config file and make it accessible via "cfg"
|
||||||
|
import json
|
||||||
|
with open("config.json", "r") as file:
|
||||||
|
cfg = json.load(file)
|
||||||
|
|
||||||
|
headlineApiKey = cfg["twcApiKey"]
|
||||||
|
detailsApiKey = cfg["twcApiKey"]
|
||||||
|
|
||||||
|
k = 0
|
||||||
|
async def getAlerts(location):
|
||||||
|
global k
|
||||||
|
fetchUrl = 'https://api.weather.com/v3/alerts/headlines?areaId=' + location + ':US&format=json&language=en-US&apiKey=' + headlineApiKey
|
||||||
|
# response = requests.get(fetchUrl)
|
||||||
|
|
||||||
|
# theCode = response.status_code
|
||||||
|
|
||||||
|
theCode = 0
|
||||||
|
|
||||||
|
async with aiohttp.ClientSession() as s:
|
||||||
|
async with s.get(fetchUrl) as r:
|
||||||
|
theCode = r.status
|
||||||
|
|
||||||
|
#Set the actions based on response code
|
||||||
|
if theCode == 204:
|
||||||
|
l.info('No alerts for area ' + location + '.\n')
|
||||||
|
return
|
||||||
|
elif theCode == 403:
|
||||||
|
l.critical("Uh oh! Your API key may not be authorized for alerts. Tsk Tsk. Maybe you shouldn't pirate IBM data :)\n")
|
||||||
|
return
|
||||||
|
elif theCode == 401:
|
||||||
|
l.critical("Uh oh! This request requires authentication. Maybe you shouldn't try to access resources for IBM employee's only :)\n")
|
||||||
|
return
|
||||||
|
elif theCode == 404:
|
||||||
|
l.error("Uh oh! The requested resource cannot be found. This means either the URL is wrong or IBM is having technical difficulties :(\n Or.... They deleted the API :O\n")
|
||||||
|
return
|
||||||
|
elif theCode == 405:
|
||||||
|
l.error("Uh oh! Got a 405! This means that somehow.... someway..... this script made an invalid request. So sad..... So terrible..... :(\n")
|
||||||
|
return
|
||||||
|
elif theCode == 406:
|
||||||
|
l.critical("Uh oh! Got a 406! This means that IBM doesn't like us. :(\n")
|
||||||
|
return
|
||||||
|
elif theCode == 408:
|
||||||
|
l.error("Uh oh! We were too slow in providing IBM our alert request. Although I prefer to say we were Slowly Capable! :)\n")
|
||||||
|
return
|
||||||
|
elif theCode == 500:
|
||||||
|
l.error("Uh oh! Seems IBM's on call IT Tech spilled coffee on the server! Looks like no alerts for a while. Please check back later :)\n")
|
||||||
|
return
|
||||||
|
elif theCode == 502 or theCode == 503 or theCode == 504:
|
||||||
|
l.error("Uh oh! This is why you don't have interns messing with the server configuration. Please stand by while IBM's on call IT Tech resolves the issue :)\n")
|
||||||
|
return
|
||||||
|
elif theCode == 200:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Map headline variables
|
||||||
|
l.debug('Found Alert for ' + location + '\n')
|
||||||
|
dataH = await r.json()
|
||||||
|
alertsRoot = dataH['alerts']
|
||||||
|
|
||||||
|
for x in alertsRoot:
|
||||||
|
detailKey = x['detailKey']
|
||||||
|
#Lets get map our detail variables.
|
||||||
|
detailsUrl = 'https://api.weather.com/v3/alerts/detail?alertId=' + detailKey + '&format=json&language=en-US&apiKey=' + detailsApiKey
|
||||||
|
detailsResponse = requests.get(detailsUrl)
|
||||||
|
dataD = detailsResponse.json()
|
||||||
|
detailsRoot = dataD['alertDetail']
|
||||||
|
theDetailsText = detailsRoot['texts']
|
||||||
|
detailsText = theDetailsText[0]
|
||||||
|
descriptionRaw = detailsText['description']
|
||||||
|
language = detailsText['languageCode']
|
||||||
|
Identifier = location + '_' + x['phenomena'] + '_' + x['significance'] + '_' + str(x['processTimeUTC'])
|
||||||
|
|
||||||
|
#Is this for a NWS Zone or County?
|
||||||
|
last4 = location[2:]
|
||||||
|
locationType = None
|
||||||
|
if 'C' in last4:
|
||||||
|
locationType = 'C'
|
||||||
|
elif 'Z' in last4:
|
||||||
|
locationType = 'Z'
|
||||||
|
|
||||||
|
#theIdent = str(Identifier)
|
||||||
|
try:
|
||||||
|
async with aiofiles.open('./.temp/alertmanifest.txt', 'r' ) as checkFile:
|
||||||
|
c = await checkFile.read()
|
||||||
|
|
||||||
|
if c.find(Identifier) != -1:
|
||||||
|
l.debug(f"{Identifier} was sent already, skipping..")
|
||||||
|
return
|
||||||
|
except FileNotFoundError:
|
||||||
|
l.warning("alert manifest does not exist (yet)")
|
||||||
|
|
||||||
|
k += 1 #We have an alert to send!
|
||||||
|
|
||||||
|
#Lets Map Our Vocal Codes!
|
||||||
|
vocalCheck = x['phenomena'] + '_' + x['significance']
|
||||||
|
vocalCode = None
|
||||||
|
|
||||||
|
if vocalCheck == 'HU_W':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE001</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'TY_W':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE002</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'HI_W':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE003</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'TO_A':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE004</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'SV_A':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE005</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'HU_A':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE006</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'TY_A':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE007</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'TR_W':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE008</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'TR_A':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE009</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'TI_W':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE010</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'HI_A':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE011</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'TI_A':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE012</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'BZ_W':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE013</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'IS_W':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE014</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'WS_W':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE015</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'HW_W':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE016</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'LE_W':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE017</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'ZR_Y':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE018</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'CF_W':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE019</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'LS_W':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE020</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'WW_Y':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE021</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'LB_Y':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE022</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'LE_Y':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE023</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'BZ_A':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE024</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'WS_A':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE025</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'FF_A':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE026</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'FA_A':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE027</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'FA_Y':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE028</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'HW_A':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE029</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'LE_A':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE030</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'SU_W':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE031</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'LS_Y':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE032</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'CF_A':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE033</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'ZF_Y':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE034</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'FG_Y':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE035</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'SM_Y':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE036</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'EC_W':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE037</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'EH_W':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE038</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'HZ_W':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE039</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'FZ_W':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE040</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'HT_Y':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE041</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'WC_Y':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE042</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'FR_Y':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE043</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'EC_A':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE044</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'EH_A':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE045</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'HZ_A':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE046</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'DS_W':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE047</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'WI_Y':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE048</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'SU_Y':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE049</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'AS_Y':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE050</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'WC_W':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE051</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'FZ_A':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE052</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'WC_A':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE053</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'AF_W':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE054</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'AF_Y':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE055</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'DU_Y':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE056</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'LW_Y':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE057</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'LS_A':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE058</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'HF_W':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE059</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'SR_W':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE060</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'GL_W':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE061</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'HF_A':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE062</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'UP_W':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE063</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'SE_W':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE064</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'SR_A':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE065</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'GL_A':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE066</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'MF_Y':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE067</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'MS_Y':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE068</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'SC_Y':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE069</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'UP_Y':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE073</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'LO_Y':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE074</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'AF_V':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE075</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'UP_A':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE076</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'TAV_W':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE077</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'TAV_A':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE078</bVocHdlnCd>'
|
||||||
|
elif vocalCheck == 'TO_W':
|
||||||
|
vocalCode = '<bVocHdlnCd>HE110</bVocHdlnCd>'
|
||||||
|
else:
|
||||||
|
vocalCode = '<bVocHdlnCd />'
|
||||||
|
|
||||||
|
#Do some date/time conversions
|
||||||
|
EndTimeUTCEpoch = x['expireTimeUTC']
|
||||||
|
EndTimeUTC = datetime.utcfromtimestamp(EndTimeUTCEpoch).strftime('%Y%m%d%H%M')
|
||||||
|
#EndTimeUTC = EndTimeUTCString.astimezone(pytz.UTC)
|
||||||
|
|
||||||
|
expireTimeEpoch = x['expireTimeUTC']
|
||||||
|
expireTimeUTC = datetime.utcfromtimestamp(expireTimeEpoch).strftime('%Y%m%d%H%M')
|
||||||
|
|
||||||
|
#V3 Alert API doesn't give us issueTime in UTC. So we have to convert ourselves. Ughhh!!
|
||||||
|
iTLDTS = x['issueTimeLocal']
|
||||||
|
iTLDTO = datetime.strptime(iTLDTS, '%Y-%m-%dT%H:%M:%S%z')
|
||||||
|
issueTimeToUTC = iTLDTO.astimezone(pytz.UTC)
|
||||||
|
issueTimeUtc = issueTimeToUTC.strftime('%Y%m%d%H%M')
|
||||||
|
|
||||||
|
processTimeEpoch = x['processTimeUTC']
|
||||||
|
processTime = datetime.fromtimestamp(processTimeEpoch).strftime('%Y%m%d%H%M%S')
|
||||||
|
|
||||||
|
#What is the action of this alert?
|
||||||
|
Action = None
|
||||||
|
if x['messageType'] == 'Update':
|
||||||
|
Action = 'CON'
|
||||||
|
elif x['messageType'] == 'New':
|
||||||
|
Action = 'NEW'
|
||||||
|
|
||||||
|
#Fix description to replace new lines with space and add XML escape Chars. when needed
|
||||||
|
|
||||||
|
description = ' '.join(descriptionRaw.splitlines())
|
||||||
|
description = description.replace('&', '&')
|
||||||
|
description = description.replace('<', '<')
|
||||||
|
description = description.replace('>', '>')
|
||||||
|
description = description.replace('-', '')
|
||||||
|
description = description.replace(':', '')
|
||||||
|
|
||||||
|
#Is this alert urgent?
|
||||||
|
urgency ='piss'
|
||||||
|
if vocalCheck == 'TO_W' or vocalCheck == 'SV_W' or vocalCheck == 'FF_W':
|
||||||
|
urgency = 'BEUrgent'
|
||||||
|
else:
|
||||||
|
urgency = 'BERecord'
|
||||||
|
|
||||||
|
alertMsg = '<BERecord id="0000" locationKey="' + location + '_' + x['phenomena'] + '_' + x['significance'] + '_' + x['eventTrackingNumber'] + '_' + x['officeCode'] + '" isWxscan="0"><action>NOT_USED</action><BEHdr><bPIL>' + x['productIdentifier'] + '</bPIL><bWMOHdr>NOT_USED</bWMOHdr><bEvent><eActionCd eActionPriority="' + str(x['messageTypeCode']) + '">' + Action + '</eActionCd><eOfficeId eOfficeNm="' + x['officeName'] + '">' + x['officeCode'] + '</eOfficeId><ePhenom>' + x['phenomena'] + '</ePhenom><eSgnfcnc>' + x['significance'] + '</eSgnfcnc><eETN>' + x['eventTrackingNumber'] + '</eETN><eDesc>' + x['eventDescription'] + '</eDesc><eStTmUTC>NOT_USED</eStTmUTC><eEndTmUTC>' + EndTimeUTC + '</eEndTmUTC><eSvrty>' + str(x['severityCode']) + '</eSvrty><eTWCIId>NOT_USED</eTWCIId><eExpTmUTC>' + expireTimeUTC + '</eExpTmUTC></bEvent><bLocations><bLocCd bLoc="' + x['areaName'] + '" bLocTyp="' + locationType + '">' + location + '</bLocCd><bStCd bSt="' + x['adminDistrict'] + '">' + x['adminDistrictCode'] + '</bStCd><bUTCDiff>NOT_USED</bUTCDiff><bTzAbbrv>NOT_USED</bTzAbbrv><bCntryCd>NOT_USED</bCntryCd></bLocations><bSgmtChksum>' + x['identifier'] + '</bSgmtChksum><procTm>' + processTime + '</procTm></BEHdr><BEData><bIssueTmUTC>' + issueTimeUtc + '</bIssueTmUTC><bHdln><bHdlnTxt>' + x['headlineText'] + '</bHdlnTxt>' + vocalCode + '</bHdln><bParameter>NOT_USED</bParameter><bNarrTxt bNarrTxtLang="en-US"><bLn>' + description + '</bLn></bNarrTxt><bSrchRslt>NOT_USED</bSrchRslt></BEData><clientKey>' + location + '_' + x['phenomena'] + '_' + x['significance'] + '_' + x['eventTrackingNumber'] + '_' + x['officeCode'] + '</clientKey></BERecord>'
|
||||||
|
|
||||||
|
#Append BERecord
|
||||||
|
async with aiofiles.open('./.temp/BERecord.xml', "a") as b:
|
||||||
|
await b.write(alertMsg)
|
||||||
|
await b.close()
|
||||||
|
|
||||||
|
#Add our alert to the manifest so we don't keep sending in the same alert every 60 seconds unless an update is issued.
|
||||||
|
async with aiofiles.open('./.temp/alertmanifest.txt', "a") as c:
|
||||||
|
await c.write('\n' + location + '_' + x['phenomena'] + '_' + x['significance'] + '_' + str(x['processTimeUTC']))
|
||||||
|
await c.close()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
async def makeRecord():
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
global k
|
||||||
|
|
||||||
|
async with aiofiles.open("./.temp/BERecord.xml", 'w') as BERecord:
|
||||||
|
await BERecord.write('<Data type="BERecord">')
|
||||||
|
await BERecord.close()
|
||||||
|
|
||||||
|
for z in alertLocations:
|
||||||
|
await getAlerts(z)
|
||||||
|
|
||||||
|
async with aiofiles.open('./.temp/BERecord.xml', 'a') as BERecord:
|
||||||
|
await BERecord.write("</Data>")
|
||||||
|
await BERecord.close()
|
||||||
|
|
||||||
|
dom = xml.dom.minidom.parse("./.temp/BERecord.xml")
|
||||||
|
pretty_xml_as_string = dom.toprettyxml(indent = " ")
|
||||||
|
|
||||||
|
async with aiofiles.open("./.temp/BERecord.i2m", 'w') as h:
|
||||||
|
await h.write(pretty_xml_as_string[23:])
|
||||||
|
await h.close()
|
||||||
|
|
||||||
|
# The BERecord XML doesn't need to be written if there's no alerts.
|
||||||
|
if k > 0:
|
||||||
|
l.info("Sending alert(s) to the IntelliStar 2!")
|
||||||
|
with open("./.temp/BERecord.i2m", 'rb') as f_in:
|
||||||
|
with gzip.open("./.temp/BERecord.gz", 'wb') as f_out:
|
||||||
|
shutil.copyfileobj(f_in, f_out)
|
||||||
|
|
||||||
|
files = []
|
||||||
|
commands = []
|
||||||
|
gZipFile = "./.temp/BERecord.gz"
|
||||||
|
files.append(gZipFile)
|
||||||
|
command = commands.append('<MSG><Exec workRequest="storeData(File={0},QGROUP=__BERecord__,Feed=BERecord)" /><GzipCompressedMsg fname="BERecord" /></MSG>')
|
||||||
|
bit.sendFile(files, commands, 1, 0)
|
||||||
|
os.remove(gZipFile)
|
||||||
|
k = 0
|
||||||
|
|
||||||
|
os.remove("./.temp/BERecord.xml")
|
||||||
|
os.remove("./.temp/BERecord.i2m")
|
||||||
|
|
||||||
93
recordGenerators/breathing.py
Normal file
93
recordGenerators/breathing.py
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
import requests
|
||||||
|
import gzip
|
||||||
|
import uuid
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import xml.dom.minidom
|
||||||
|
import logging,coloredlogs
|
||||||
|
import aiohttp, aiofiles, asyncio
|
||||||
|
|
||||||
|
import py2Lib.bit
|
||||||
|
import util.machineProductCfg as MPC
|
||||||
|
import records.lfRecord as LFR
|
||||||
|
|
||||||
|
l = logging.getLogger(__name__)
|
||||||
|
coloredlogs.install()
|
||||||
|
|
||||||
|
coopIds = []
|
||||||
|
geocodes = []
|
||||||
|
|
||||||
|
|
||||||
|
# Auto-grab the tecci and zip codes
|
||||||
|
for i in MPC.getPrimaryLocations():
|
||||||
|
coopIds.append(LFR.getCoopId(i))
|
||||||
|
geocodes.append(LFR.getLatLong(i).replace('/', ','))
|
||||||
|
|
||||||
|
l.debug(coopIds, geocodes)
|
||||||
|
|
||||||
|
# Open the config file and make it accessible via "cfg"
|
||||||
|
import json
|
||||||
|
with open("config.json", "r") as file:
|
||||||
|
cfg = json.load(file)
|
||||||
|
|
||||||
|
apiKey = cfg["twcApiKey"]
|
||||||
|
|
||||||
|
async def getData(coopId, geocode):
|
||||||
|
fetchUrl = f"https://api.weather.com/v2/indices/breathing/daypart/7day?geocode={geocode}&language=en-US&format=xml&apiKey={apiKey}"
|
||||||
|
data = ""
|
||||||
|
|
||||||
|
#Fetch data
|
||||||
|
async with aiohttp.ClientSession() as s:
|
||||||
|
async with s.get(fetchUrl) as r:
|
||||||
|
data = await r.text()
|
||||||
|
|
||||||
|
newData = data[63:-26]
|
||||||
|
|
||||||
|
l.debug('Gathering data for location id ' + coopId)
|
||||||
|
#Write to .i2m file
|
||||||
|
i2Doc = '<Breathing id="000000000" locationKey="' + str(coopId) + '" isWxscan="0">' + '' + newData + '<clientKey>' + str(coopId) + '</clientKey></Breathing>'
|
||||||
|
|
||||||
|
async with aiofiles.open("./.temp/Breathing.i2m", "a") as f:
|
||||||
|
await f.write(i2Doc)
|
||||||
|
await f.close()
|
||||||
|
|
||||||
|
|
||||||
|
async def makeDataFile():
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
l.info("Writing a Breathing forecast record.")
|
||||||
|
header = '<Data type="Breathing">'
|
||||||
|
footer = '</Data>'
|
||||||
|
|
||||||
|
async with aiofiles.open("./.temp/Breathing.i2m", 'w') as doc:
|
||||||
|
await doc.write(header)
|
||||||
|
|
||||||
|
for x, y in zip(coopIds, geocodes):
|
||||||
|
await getData(x, y)
|
||||||
|
|
||||||
|
async with aiofiles.open("./.temp/Breathing.i2m", 'a') as end:
|
||||||
|
await end.write(footer)
|
||||||
|
|
||||||
|
|
||||||
|
dom = xml.dom.minidom.parse("./.temp/Breathing.i2m")
|
||||||
|
pretty_xml_as_string = dom.toprettyxml(indent = " ")
|
||||||
|
|
||||||
|
async with aiofiles.open("./.temp/Breathing.i2m", "w") as g:
|
||||||
|
await g.write(pretty_xml_as_string[23:])
|
||||||
|
await g.close()
|
||||||
|
|
||||||
|
files = []
|
||||||
|
commands = []
|
||||||
|
with open("./.temp/Breathing.i2m", 'rb') as f_in:
|
||||||
|
with gzip.open("./.temp/Breathing.gz", 'wb') as f_out:
|
||||||
|
shutil.copyfileobj(f_in, f_out)
|
||||||
|
|
||||||
|
gZipFile = "./.temp/Breathing.gz"
|
||||||
|
|
||||||
|
files.append(gZipFile)
|
||||||
|
command = commands.append('<MSG><Exec workRequest="storeData(File={0},QGROUP=__Breathing__,Feed=Breathing)" /><GzipCompressedMsg fname="Breathing" /></MSG>')
|
||||||
|
numFiles = len(files)
|
||||||
|
|
||||||
|
bit.sendFile(files, commands, numFiles, 0)
|
||||||
|
|
||||||
|
os.remove("./.temp/Breathing.i2m")
|
||||||
|
os.remove("./.temp/Breathing.gz")
|
||||||
99
recordGenerators/currentObservations.py
Normal file
99
recordGenerators/currentObservations.py
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
import requests
|
||||||
|
import py2Lib.bit as bit
|
||||||
|
import gzip
|
||||||
|
import uuid
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import xml.dom.minidom
|
||||||
|
import logging,coloredlogs
|
||||||
|
import aiohttp, aiofiles, asyncio
|
||||||
|
|
||||||
|
import py2Lib.bit
|
||||||
|
import util.machineProductCfg as MPC
|
||||||
|
import records.lfRecord as LFR
|
||||||
|
|
||||||
|
l = logging.getLogger(__name__)
|
||||||
|
coloredlogs.install()
|
||||||
|
|
||||||
|
tecciId = []
|
||||||
|
zipCodes = []
|
||||||
|
|
||||||
|
# Auto-grab the tecci and zip codes
|
||||||
|
for i in MPC.getPrimaryLocations():
|
||||||
|
tecciId.append("T" + LFR.getCoopId(i))
|
||||||
|
zipCodes.append(LFR.getZip(i))
|
||||||
|
|
||||||
|
# Obtain metro map city TECCI and zips:
|
||||||
|
for i in MPC.getMetroCities():
|
||||||
|
tecciId.append("T" + LFR.getCoopId(i))
|
||||||
|
zipCodes.append(LFR.getZip(i))
|
||||||
|
|
||||||
|
|
||||||
|
# Open the config file and make it accessible via "cfg"
|
||||||
|
import json
|
||||||
|
with open("config.json", "r") as file:
|
||||||
|
cfg = json.load(file)
|
||||||
|
|
||||||
|
apiKey = cfg["twcApiKey"]
|
||||||
|
|
||||||
|
async def getData(tecci, zipCode):
|
||||||
|
l.debug('Gathering data for location id ' + tecci)
|
||||||
|
fetchUrl = 'https://api.weather.com/v1/location/' + zipCode + ':4:US/observations/current.xml?language=en-US&units=e&apiKey=' + apiKey
|
||||||
|
data = ""
|
||||||
|
|
||||||
|
async with aiohttp.ClientSession() as s:
|
||||||
|
async with s.get(fetchUrl) as r:
|
||||||
|
data = await r.text()
|
||||||
|
|
||||||
|
newData = data[67:-30]
|
||||||
|
|
||||||
|
#Write to .i2m file
|
||||||
|
i2Doc = '<CurrentObservations id="000000000" locationKey="' + str(tecci) + '" isWxscan="0">' + '' + newData + '<clientKey>' + str(tecci) + '</clientKey></CurrentObservations>'
|
||||||
|
async with aiofiles.open("./.temp/CurrentObservations.i2m", 'a') as f:
|
||||||
|
await f.write(i2Doc)
|
||||||
|
await f.close()
|
||||||
|
|
||||||
|
|
||||||
|
async def makeDataFile():
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
l.info("Writing a CurrentObservations record.")
|
||||||
|
header = '<Data type="CurrentObservations">'
|
||||||
|
footer = '</Data>'
|
||||||
|
|
||||||
|
async with aiofiles.open("./.temp/CurrentObservations.i2m", 'w') as doc:
|
||||||
|
await doc.write(header)
|
||||||
|
|
||||||
|
for x, y in zip(tecciId, zipCodes):
|
||||||
|
await getData(x, y)
|
||||||
|
|
||||||
|
async with aiofiles.open("./.temp/CurrentObservations.i2m", 'a') as end:
|
||||||
|
await end.write(footer)
|
||||||
|
|
||||||
|
dom = xml.dom.minidom.parse("./.temp/CurrentObservations.i2m")
|
||||||
|
pretty_xml_as_string = dom.toprettyxml(indent = " ")
|
||||||
|
|
||||||
|
async with aiofiles.open("./.temp/CurrentObservations.i2m", "w") as g:
|
||||||
|
await g.write(pretty_xml_as_string[23:])
|
||||||
|
await g.close()
|
||||||
|
|
||||||
|
files = []
|
||||||
|
commands = []
|
||||||
|
|
||||||
|
"""
|
||||||
|
TODO: This can be ran in a seperate thread using loop.run_in_executor() according to the python discord.
|
||||||
|
! This should probably be implemented ASAP.
|
||||||
|
"""
|
||||||
|
with open("./.temp/CurrentObservations.i2m", 'rb') as f_in:
|
||||||
|
with gzip.open("./.temp/CurrentObservations.gz", 'wb') as f_out:
|
||||||
|
shutil.copyfileobj(f_in, f_out)
|
||||||
|
|
||||||
|
gZipFile = "./.temp/CurrentObservations.gz"
|
||||||
|
|
||||||
|
files.append(gZipFile)
|
||||||
|
command = commands.append('<MSG><Exec workRequest="storeData(File={0},QGROUP=__CurrentObservations__,Feed=CurrentObservations)" /><GzipCompressedMsg fname="CurrentObservations" /></MSG>')
|
||||||
|
numFiles = len(files)
|
||||||
|
|
||||||
|
bit.sendFile(files, commands, numFiles, 0)
|
||||||
|
|
||||||
|
os.remove("./.temp/CurrentObservations.i2m")
|
||||||
|
os.remove("./.temp/CurrentObservations.gz")
|
||||||
94
recordGenerators/dailyForecast.py
Normal file
94
recordGenerators/dailyForecast.py
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
import requests
|
||||||
|
import gzip
|
||||||
|
import uuid
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import xml.dom.minidom
|
||||||
|
import logging,coloredlogs
|
||||||
|
import aiohttp, aiofiles, asyncio
|
||||||
|
|
||||||
|
import py2Lib.bit
|
||||||
|
import util.machineProductCfg as MPC
|
||||||
|
import records.lfRecord as LFR
|
||||||
|
|
||||||
|
l = logging.getLogger(__name__)
|
||||||
|
coloredlogs.install()
|
||||||
|
|
||||||
|
tecciId = []
|
||||||
|
zipCodes = []
|
||||||
|
|
||||||
|
# Auto-grab the tecci and zip codes
|
||||||
|
for i in MPC.getPrimaryLocations():
|
||||||
|
tecciId.append(LFR.getCoopId(i))
|
||||||
|
zipCodes.append(LFR.getZip(i))
|
||||||
|
|
||||||
|
# Grab metro map city tecci and zip codes
|
||||||
|
for i in MPC.getMetroCities():
|
||||||
|
tecciId.append(LFR.getCoopId(i))
|
||||||
|
zipCodes.append(LFR.getZip(i))
|
||||||
|
|
||||||
|
# Open the config file and make it accessible via "cfg"
|
||||||
|
import json
|
||||||
|
with open("config.json", "r") as file:
|
||||||
|
cfg = json.load(file)
|
||||||
|
|
||||||
|
apiKey = cfg["twcApiKey"]
|
||||||
|
|
||||||
|
async def getData(tecci, zipCode):
|
||||||
|
fetchUrl = 'https://api.weather.com/v1/location/' + zipCode + ':4:US/forecast/daily/7day.xml?language=en-US&units=e&apiKey=' + apiKey
|
||||||
|
data = ""
|
||||||
|
|
||||||
|
async with aiohttp.ClientSession() as s:
|
||||||
|
async with s.get(fetchUrl) as r:
|
||||||
|
data = await r.text()
|
||||||
|
|
||||||
|
newData = data[61:-24]
|
||||||
|
|
||||||
|
l.debug('Gathering data for location id ' + tecci)
|
||||||
|
#Write to .i2m file
|
||||||
|
i2Doc = '<DailyForecast id="000000000" locationKey="' + str(tecci) + '" isWxscan="0">' + '' + newData + '<clientKey>' + str(tecci) + '</clientKey></DailyForecast>'
|
||||||
|
|
||||||
|
async with aiofiles.open('./.temp/DailyForecast.i2m', 'a') as f:
|
||||||
|
await f.write(i2Doc)
|
||||||
|
await f.close()
|
||||||
|
|
||||||
|
|
||||||
|
async def makeDataFile():
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
l.info("Writing a DailyForecast record.")
|
||||||
|
header = '<Data type="DailyForecast">'
|
||||||
|
footer = '</Data>'
|
||||||
|
|
||||||
|
async with aiofiles.open("./.temp/DailyForecast.i2m", 'w') as doc:
|
||||||
|
await doc.write(header)
|
||||||
|
|
||||||
|
for x, y in zip(tecciId, zipCodes):
|
||||||
|
await getData(x, y)
|
||||||
|
|
||||||
|
async with aiofiles.open("./.temp/DailyForecast.i2m", 'a') as end:
|
||||||
|
await end.write(footer)
|
||||||
|
|
||||||
|
|
||||||
|
dom = xml.dom.minidom.parse("./.temp/DailyForecast.i2m")
|
||||||
|
pretty_xml_as_string = dom.toprettyxml(indent = " ")
|
||||||
|
|
||||||
|
async with aiofiles.open("./.temp/DailyForecast.i2m", "w") as g:
|
||||||
|
await g.write(pretty_xml_as_string[23:])
|
||||||
|
await g.close()
|
||||||
|
|
||||||
|
files = []
|
||||||
|
commands = []
|
||||||
|
with open("./.temp/DailyForecast.i2m", 'rb') as f_in:
|
||||||
|
with gzip.open("./.temp/DailyForecast.gz", 'wb') as f_out:
|
||||||
|
shutil.copyfileobj(f_in, f_out)
|
||||||
|
|
||||||
|
gZipFile = "./.temp/DailyForecast.gz"
|
||||||
|
|
||||||
|
files.append(gZipFile)
|
||||||
|
command = commands.append('<MSG><Exec workRequest="storeData(File={0},QGROUP=__DailyForecast__,Feed=DailyForecast)" /><GzipCompressedMsg fname="DailyForecast" /></MSG>')
|
||||||
|
numFiles = len(files)
|
||||||
|
|
||||||
|
bit.sendFile(files, commands, numFiles, 0)
|
||||||
|
|
||||||
|
os.remove("./.temp/DailyForecast.i2m")
|
||||||
|
os.remove("./.temp/DailyForecast.gz")
|
||||||
85
recordGenerators/heatingAndCooling.py
Normal file
85
recordGenerators/heatingAndCooling.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
import shutil
|
||||||
|
import requests
|
||||||
|
import logging,coloredlogs
|
||||||
|
import py2Lib.bit
|
||||||
|
import util.machineProductCfg as MPC
|
||||||
|
import records.lfRecord as LFR
|
||||||
|
import gzip
|
||||||
|
from os import remove
|
||||||
|
import xml.dom.minidom
|
||||||
|
import aiohttp, aiofiles, asyncio
|
||||||
|
|
||||||
|
l = logging.getLogger(__name__)
|
||||||
|
coloredlogs.install()
|
||||||
|
|
||||||
|
geocodes = []
|
||||||
|
coopIds = []
|
||||||
|
|
||||||
|
for i in MPC.getPrimaryLocations():
|
||||||
|
coopIds.append(LFR.getCoopId(i))
|
||||||
|
geocodes.append(LFR.getLatLong(i).replace('/', ','))
|
||||||
|
|
||||||
|
# Open the config file and make it accessible via "cfg"
|
||||||
|
import json
|
||||||
|
with open("config.json", "r") as file:
|
||||||
|
cfg = json.load(file)
|
||||||
|
|
||||||
|
apiKey = cfg["twcApiKey"]
|
||||||
|
|
||||||
|
async def getData(coopId, geocode):
|
||||||
|
fetchUrl = f"https://api.weather.com/v2/indices/heatCool/daypart/7day?geocode={geocode}&language=en-US&format=xml&apiKey={apiKey}"
|
||||||
|
data = ""
|
||||||
|
|
||||||
|
async with aiohttp.ClientSession() as s:
|
||||||
|
async with s.get(fetchUrl) as r:
|
||||||
|
if r.status != 200:
|
||||||
|
l.error(f"Failed to write HeatingAndCooling record -- Status code {r.status}")
|
||||||
|
return
|
||||||
|
|
||||||
|
data = await r.text()
|
||||||
|
|
||||||
|
# data = res.text
|
||||||
|
newData = data[63:-26]
|
||||||
|
|
||||||
|
i2Doc = f'\n <HeatingAndCooling id="000000000" locationKey="{coopId}" isWxScan="0">\n {newData}\n <clientKey>{coopId}</clientKey>\n </HeatingAndCooling>'
|
||||||
|
|
||||||
|
async with aiofiles.open('./.temp/HeatingAndCooling.i2m', 'a') as f:
|
||||||
|
await f.write(i2Doc)
|
||||||
|
await f.close()
|
||||||
|
|
||||||
|
async def makeRecord():
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
l.info("Writing HeatingAndCooling record.")
|
||||||
|
|
||||||
|
header = '<Data type="HeatingAndCooling">'
|
||||||
|
footer = '</Data>'
|
||||||
|
|
||||||
|
async with aiofiles.open('./.temp/HeatingAndCooling.i2m', 'a') as doc:
|
||||||
|
await doc.write(header)
|
||||||
|
|
||||||
|
for (x, y) in zip(coopIds, geocodes):
|
||||||
|
await getData(x,y)
|
||||||
|
|
||||||
|
async with aiofiles.open('./.temp/HeatingAndCooling.i2m', 'a') as end:
|
||||||
|
await end.write(footer)
|
||||||
|
|
||||||
|
dom = xml.dom.minidom.parse('./.temp/HeatingAndCooling.i2m')
|
||||||
|
xmlPretty = dom.toprettyxml(indent= " ")
|
||||||
|
|
||||||
|
async with aiofiles.open('./.temp/HeatingAndCooling.i2m', 'w') as g:
|
||||||
|
await g.write(xmlPretty[23:])
|
||||||
|
await g.close()
|
||||||
|
|
||||||
|
|
||||||
|
# Compresss i2m to gzip
|
||||||
|
with open ('./.temp/HeatingAndCooling.i2m', 'rb') as f_in:
|
||||||
|
with gzip.open('./.temp/HeatingAndCooling.gz', 'wb') as f_out:
|
||||||
|
shutil.copyfileobj(f_in, f_out)
|
||||||
|
|
||||||
|
file = "./.temp/HeatingAndCooling.gz"
|
||||||
|
command = '<MSG><Exec workRequest="storeData(File={0},QGROUP=__HeatingAndCooling__,Feed=HeatingAndCooling)" /><GzipCompressedMsg fname="HeatingAndCooling" /></MSG>'
|
||||||
|
|
||||||
|
bit.sendFile([file], [command], 1, 0)
|
||||||
|
|
||||||
|
remove('./.temp/HeatingAndCooling.i2m')
|
||||||
|
remove('./.temp/HeatingAndCooling.gz')
|
||||||
96
recordGenerators/hourlyForecast.py
Normal file
96
recordGenerators/hourlyForecast.py
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
import requests
|
||||||
|
import gzip
|
||||||
|
import uuid
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import xml.dom.minidom
|
||||||
|
import logging,coloredlogs
|
||||||
|
import aiohttp, aiofiles, asyncio, asyncio
|
||||||
|
|
||||||
|
import py2Lib.bit
|
||||||
|
import util.machineProductCfg as MPC
|
||||||
|
import records.lfRecord as LFR
|
||||||
|
|
||||||
|
l = logging.getLogger(__name__)
|
||||||
|
coloredlogs.install()
|
||||||
|
|
||||||
|
tecciId = []
|
||||||
|
zipCodes = []
|
||||||
|
|
||||||
|
# Auto-grab the tecci and zip codes
|
||||||
|
for i in MPC.getPrimaryLocations():
|
||||||
|
tecciId.append(LFR.getCoopId(i))
|
||||||
|
zipCodes.append(LFR.getZip(i))
|
||||||
|
|
||||||
|
for i in MPC.getMetroCities():
|
||||||
|
tecciId.append(LFR.getCoopId(i))
|
||||||
|
zipCodes.append(LFR.getZip(i))
|
||||||
|
|
||||||
|
|
||||||
|
# Open the config file and make it accessible via "cfg"
|
||||||
|
import json
|
||||||
|
with open("config.json", "r") as file:
|
||||||
|
cfg = json.load(file)
|
||||||
|
|
||||||
|
apiKey = cfg["twcApiKey"]
|
||||||
|
|
||||||
|
async def getData(tecci, zipCode):
|
||||||
|
l.debug('Gathering data for location id ' + tecci)
|
||||||
|
fetchUrl = 'https://api.weather.com/v1/location/' + zipCode + ':4:US/forecast/hourly/360hour.xml?language=en-US&units=e&apiKey=' + apiKey
|
||||||
|
data = ""
|
||||||
|
|
||||||
|
#Fetch data
|
||||||
|
async with aiohttp.ClientSession() as s:
|
||||||
|
async with s.get(fetchUrl) as r:
|
||||||
|
data = await r.text()
|
||||||
|
|
||||||
|
newData = data[48:-11]
|
||||||
|
|
||||||
|
#Write to .i2m file
|
||||||
|
i2Doc = '<HourlyForecast id="000000000" locationKey="' + str(tecci) + '" isWxscan="0">' + '' + newData + '<clientKey>' + str(tecci) + '</clientKey></HourlyForecast>'
|
||||||
|
|
||||||
|
async with aiofiles.open('./.temp/HourlyForecast.i2m', 'a') as f:
|
||||||
|
await f.write(i2Doc)
|
||||||
|
await f.close()
|
||||||
|
|
||||||
|
|
||||||
|
async def makeDataFile():
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
l.info("Writing an HourlyForecast record.")
|
||||||
|
header = '<Data type="HourlyForecast">'
|
||||||
|
footer = '</Data>'
|
||||||
|
|
||||||
|
async with aiofiles.open("./.temp/HourlyForecast.i2m", 'w') as doc:
|
||||||
|
await doc.write(header)
|
||||||
|
|
||||||
|
|
||||||
|
for x, y in zip(tecciId, zipCodes):
|
||||||
|
await getData(x, y)
|
||||||
|
|
||||||
|
async with aiofiles.open("./.temp/HourlyForecast.i2m", 'a') as end:
|
||||||
|
await end.write(footer)
|
||||||
|
|
||||||
|
|
||||||
|
dom = xml.dom.minidom.parse("./.temp/HourlyForecast.i2m")
|
||||||
|
pretty_xml_as_string = dom.toprettyxml(indent = " ")
|
||||||
|
|
||||||
|
async with aiofiles.open("./.temp/HourlyForecast.i2m", "w") as g:
|
||||||
|
await g.write(pretty_xml_as_string[23:])
|
||||||
|
await g.close()
|
||||||
|
|
||||||
|
files = []
|
||||||
|
commands = []
|
||||||
|
with open("./.temp/HourlyForecast.i2m", 'rb') as f_in:
|
||||||
|
with gzip.open("./.temp/HourlyForecast.gz", 'wb') as f_out:
|
||||||
|
shutil.copyfileobj(f_in, f_out)
|
||||||
|
|
||||||
|
gZipFile = "./.temp/HourlyForecast.gz"
|
||||||
|
|
||||||
|
files.append(gZipFile)
|
||||||
|
command = commands.append('<MSG><Exec workRequest="storeData(File={0},QGROUP=__HourlyForecast__,Feed=HourlyForecast)" /><GzipCompressedMsg fname="HourlyForecast" /></MSG>')
|
||||||
|
numFiles = len(files)
|
||||||
|
|
||||||
|
bit.sendFile(files, commands, numFiles, 0)
|
||||||
|
|
||||||
|
os.remove("./.temp/HourlyForecast.i2m")
|
||||||
|
os.remove("./.temp/HourlyForecast.gz")
|
||||||
85
recordGenerators/mosquitoActivity.py
Normal file
85
recordGenerators/mosquitoActivity.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
import shutil
|
||||||
|
import requests
|
||||||
|
import logging,coloredlogs
|
||||||
|
import py2Lib.bit
|
||||||
|
import util.machineProductCfg as MPC
|
||||||
|
import records.lfRecord as LFR
|
||||||
|
import gzip
|
||||||
|
from os import remove
|
||||||
|
import xml.dom.minidom
|
||||||
|
import aiohttp, aiofiles, asyncio
|
||||||
|
|
||||||
|
l = logging.getLogger(__name__)
|
||||||
|
coloredlogs.install()
|
||||||
|
|
||||||
|
geocodes = []
|
||||||
|
coopIds = []
|
||||||
|
|
||||||
|
for i in MPC.getPrimaryLocations():
|
||||||
|
coopIds.append(LFR.getCoopId(i))
|
||||||
|
geocodes.append(LFR.getLatLong(i).replace('/', ','))
|
||||||
|
|
||||||
|
# Open the config file and make it accessible via "cfg"
|
||||||
|
import json
|
||||||
|
with open("config.json", "r") as file:
|
||||||
|
cfg = json.load(file)
|
||||||
|
|
||||||
|
apiKey = cfg["twcApiKey"]
|
||||||
|
|
||||||
|
async def getData(coopId, geocode):
|
||||||
|
fetchUrl = f"https://api.weather.com/v2/indices/mosquito/daily/7day?geocode={geocode}&language=en-US&format=xml&apiKey={apiKey}"
|
||||||
|
data = ""
|
||||||
|
|
||||||
|
async with aiohttp.ClientSession() as s:
|
||||||
|
async with s.get(fetchUrl) as r:
|
||||||
|
if r.status != 200:
|
||||||
|
l.error(f"Failed to write MosquitoActivity record -- status code {r.status}")
|
||||||
|
return
|
||||||
|
|
||||||
|
data = await r.text()
|
||||||
|
|
||||||
|
|
||||||
|
newData = data[63:-26]
|
||||||
|
|
||||||
|
i2Doc = f'\n <MosquitoActivity id="000000000" locationKey="{coopId}" isWxScan="0">\n {newData}\n <clientKey>{coopId}</clientKey>\n </MosquitoActivity>'
|
||||||
|
|
||||||
|
async with aiofiles.open('./.temp/MosquitoActivity.i2m', 'a') as f:
|
||||||
|
await f.write(i2Doc)
|
||||||
|
await f.close()
|
||||||
|
|
||||||
|
async def makeRecord():
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
l.info("Writing MosquitoActivity record.")
|
||||||
|
|
||||||
|
header = '<Data type="MosquitoActivity">'
|
||||||
|
footer = '</Data>'
|
||||||
|
|
||||||
|
async with aiofiles.open('./.temp/MosquitoActivity.i2m', 'a') as doc:
|
||||||
|
await doc.write(header)
|
||||||
|
|
||||||
|
for (x, y) in zip(coopIds, geocodes):
|
||||||
|
await getData(x,y)
|
||||||
|
|
||||||
|
async with aiofiles.open('./.temp/MosquitoActivity.i2m', 'a') as end:
|
||||||
|
await end.write(footer)
|
||||||
|
|
||||||
|
dom = xml.dom.minidom.parse('./.temp/MosquitoActivity.i2m')
|
||||||
|
xmlPretty = dom.toprettyxml(indent= " ")
|
||||||
|
|
||||||
|
async with aiofiles.open('./.temp/MosquitoActivity.i2m', 'w') as g:
|
||||||
|
await g.write(xmlPretty[23:])
|
||||||
|
await g.close()
|
||||||
|
|
||||||
|
|
||||||
|
# Compresss i2m to gzip
|
||||||
|
with open ('./.temp/MosquitoActivity.i2m', 'rb') as f_in:
|
||||||
|
with gzip.open('./.temp/MosquitoActivity.gz', 'wb') as f_out:
|
||||||
|
shutil.copyfileobj(f_in, f_out)
|
||||||
|
|
||||||
|
file = "./.temp/MosquitoActivity.gz"
|
||||||
|
command = '<MSG><Exec workRequest="storeData(File={0},QGROUP=__MosquitoActivity__,Feed=MosquitoActivity)" /><GzipCompressedMsg fname="MosquitoActivity" /></MSG>'
|
||||||
|
|
||||||
|
bit.sendFile([file], [command], 1, 0)
|
||||||
|
|
||||||
|
remove('./.temp/MosquitoActivity.i2m')
|
||||||
|
remove('./.temp/MosquitoActivity.gz')
|
||||||
93
recordGenerators/pollenForecast.py
Normal file
93
recordGenerators/pollenForecast.py
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
import requests
|
||||||
|
import gzip
|
||||||
|
import uuid
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import xml.dom.minidom
|
||||||
|
import logging, coloredlogs
|
||||||
|
import aiohttp, aiofiles, asyncio
|
||||||
|
|
||||||
|
import py2Lib.bit
|
||||||
|
import util.machineProductCfg as MPC
|
||||||
|
import records.lfRecord as LFR
|
||||||
|
|
||||||
|
|
||||||
|
l = logging.getLogger(__name__)
|
||||||
|
coloredlogs.install()
|
||||||
|
|
||||||
|
pollenIds = []
|
||||||
|
geocodes = []
|
||||||
|
|
||||||
|
|
||||||
|
# Auto-grab the tecci and zip codes
|
||||||
|
for i in MPC.getPrimaryLocations():
|
||||||
|
pollenIds.append(LFR.getPollenInfo(i))
|
||||||
|
geocodes.append(LFR.getLatLong(i).replace('/', ','))
|
||||||
|
|
||||||
|
l.debug(pollenIds, geocodes)
|
||||||
|
|
||||||
|
# Open the config file and make it accessible via "cfg"
|
||||||
|
import json
|
||||||
|
with open("config.json", "r") as file:
|
||||||
|
cfg = json.load(file)
|
||||||
|
|
||||||
|
apiKey = cfg["twcApiKey"]
|
||||||
|
|
||||||
|
async def getData(pollenId, geocode):
|
||||||
|
fetchUrl = f"https://api.weather.com/v2/indices/pollen/daypart/7day?geocode={geocode}&language=en-US&format=xml&apiKey={apiKey}"
|
||||||
|
data = ""
|
||||||
|
#Fetch data
|
||||||
|
async with aiohttp.ClientSession() as s:
|
||||||
|
async with s.get(fetchUrl) as r:
|
||||||
|
data = await r.text()
|
||||||
|
|
||||||
|
newData = data[63:-26]
|
||||||
|
|
||||||
|
l.debug('Gathering data for location id ' + pollenId)
|
||||||
|
#Write to .i2m file
|
||||||
|
i2Doc = '<PollenForecast id="000000000" locationKey="' + str(pollenId) + '" isWxscan="0">' + '' + newData + '<clientKey>' + str(pollenId) + '</clientKey></PollenForecast>'
|
||||||
|
|
||||||
|
async with aiofiles.open("./.temp/PollenForecast.i2m", "a") as f:
|
||||||
|
await f.write(i2Doc)
|
||||||
|
await f.close()
|
||||||
|
|
||||||
|
|
||||||
|
async def makeDataFile():
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
l.info("Writing a PollenForecast record.")
|
||||||
|
header = '<Data type="PollenForecast">'
|
||||||
|
footer = '</Data>'
|
||||||
|
|
||||||
|
async with aiofiles.open("./.temp/PollenForecast.i2m", 'w') as doc:
|
||||||
|
await doc.write(header)
|
||||||
|
|
||||||
|
for x, y in zip(pollenIds, geocodes):
|
||||||
|
await getData(x, y)
|
||||||
|
|
||||||
|
async with aiofiles.open("./.temp/PollenForecast.i2m", 'a') as end:
|
||||||
|
await end.write(footer)
|
||||||
|
|
||||||
|
|
||||||
|
dom = xml.dom.minidom.parse("./.temp/PollenForecast.i2m")
|
||||||
|
pretty_xml_as_string = dom.toprettyxml(indent = " ")
|
||||||
|
|
||||||
|
async with aiofiles.open("./.temp/PollenForecast.i2m", "w") as g:
|
||||||
|
await g.write(pretty_xml_as_string[23:])
|
||||||
|
await g.close()
|
||||||
|
|
||||||
|
files = []
|
||||||
|
commands = []
|
||||||
|
with open("./.temp/PollenForecast.i2m", 'rb') as f_in:
|
||||||
|
with gzip.open("./.temp/PollenForecast.gz", 'wb') as f_out:
|
||||||
|
shutil.copyfileobj(f_in, f_out)
|
||||||
|
|
||||||
|
gZipFile = "./.temp/PollenForecast.gz"
|
||||||
|
|
||||||
|
files.append(gZipFile)
|
||||||
|
command = commands.append('<MSG><Exec workRequest="storeData(File={0},QGROUP=__PollenForecast__,Feed=PollenForecast)" /><GzipCompressedMsg fname="PollenForecast" /></MSG>')
|
||||||
|
numFiles = len(files)
|
||||||
|
|
||||||
|
bit.sendFile(files, commands, numFiles, 0)
|
||||||
|
|
||||||
|
os.remove("./.temp/PollenForecast.i2m")
|
||||||
|
os.remove("./.temp/PollenForecast.gz")
|
||||||
94
recordGenerators/tideForecast.py
Normal file
94
recordGenerators/tideForecast.py
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
import shutil
|
||||||
|
import logging,coloredlogs
|
||||||
|
import datetime
|
||||||
|
import py2Lib.bit
|
||||||
|
import util.machineProductCfg as MPC
|
||||||
|
import records.lfRecord as LFR
|
||||||
|
import gzip
|
||||||
|
from os import remove
|
||||||
|
import xml.dom.minidom
|
||||||
|
import aiohttp, aiofiles, asyncio
|
||||||
|
|
||||||
|
l = logging.getLogger(__name__)
|
||||||
|
coloredlogs.install()
|
||||||
|
|
||||||
|
geocodes = []
|
||||||
|
tideStations = []
|
||||||
|
|
||||||
|
for i in MPC.getTideStations():
|
||||||
|
tideStations.append(i)
|
||||||
|
geocodes.append(LFR.getLatLong(i))
|
||||||
|
|
||||||
|
# Open the config file and make it accessible via "cfg"
|
||||||
|
import json
|
||||||
|
with open("config.json", "r") as file:
|
||||||
|
cfg = json.load(file)
|
||||||
|
|
||||||
|
apiKey = cfg["twcApiKey"]
|
||||||
|
|
||||||
|
async def getData(tideStation, geocode):
|
||||||
|
today = datetime.date.today()
|
||||||
|
startDate = today.strftime('%Y%m%d')
|
||||||
|
endDate_unformatted = datetime.datetime.strptime(startDate, '%Y%m%d') + datetime.timedelta(days=5)
|
||||||
|
endDate = endDate_unformatted.strftime('%Y%m%d')
|
||||||
|
data = ""
|
||||||
|
|
||||||
|
fetchUrl = f"https://api.weather.com/v1/geocode/{geocode}/forecast/tides.xml?language=en-US&units=e&startDate={startDate}&endDate={endDate}&apiKey={apiKey}"
|
||||||
|
|
||||||
|
async with aiohttp.ClientSession() as s:
|
||||||
|
async with s.get(fetchUrl) as r:
|
||||||
|
if r.status != 200:
|
||||||
|
l.error(f"Failed to write TideForecast -- status code {r.status}")
|
||||||
|
return
|
||||||
|
|
||||||
|
data = await r.text()
|
||||||
|
|
||||||
|
|
||||||
|
newData = data[53:-16]
|
||||||
|
|
||||||
|
i2Doc = f'\n <TidesForecast id="000000000" locationKey="{tideStation}" isWxScan="0">\n {newData}\n <clientKey>{tideStation}</clientKey>\n </TidesForecast>'
|
||||||
|
|
||||||
|
async with aiofiles.open('./.temp/TidesForecast.i2m', 'a') as f:
|
||||||
|
await f.write(i2Doc)
|
||||||
|
await f.close()
|
||||||
|
|
||||||
|
async def makeRecord():
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
if len(tideStations) < 1:
|
||||||
|
l.debug("Skipping TidesForecast -- No locations.")
|
||||||
|
return
|
||||||
|
|
||||||
|
l.info("Writing TidesForecast record.")
|
||||||
|
|
||||||
|
header = '<Data type="TidesForecast">'
|
||||||
|
footer = '</Data>'
|
||||||
|
|
||||||
|
async with aiofiles.open('./.temp/TidesForecast.i2m', 'a') as doc:
|
||||||
|
await doc.write(header)
|
||||||
|
|
||||||
|
for (x, y) in zip(tideStations, geocodes):
|
||||||
|
await getData(x,y)
|
||||||
|
|
||||||
|
async with aiofiles.open('./.temp/TidesForecast.i2m', 'a') as end:
|
||||||
|
await end.write(footer)
|
||||||
|
|
||||||
|
dom = xml.dom.minidom.parse('./.temp/TidesForecast.i2m')
|
||||||
|
xmlPretty = dom.toprettyxml(indent= " ")
|
||||||
|
|
||||||
|
async with aiofiles.open('./.temp/TidesForecast.i2m', 'w') as g:
|
||||||
|
await g.write(xmlPretty[23:])
|
||||||
|
await g.close()
|
||||||
|
|
||||||
|
|
||||||
|
# Compresss i2m to gzip
|
||||||
|
with open ('./.temp/TidesForecast.i2m', 'rb') as f_in:
|
||||||
|
with gzip.open('./.temp/TidesForecast.gz', 'wb') as f_out:
|
||||||
|
shutil.copyfileobj(f_in, f_out)
|
||||||
|
|
||||||
|
file = "./.temp/TidesForecast.gz"
|
||||||
|
command = '<MSG><Exec workRequest="storeData(File={0},QGROUP=__TidesForecast__,Feed=TidesForecast)" /><GzipCompressedMsg fname="TidesForecast" /></MSG>'
|
||||||
|
|
||||||
|
bit.sendFile([file], [command], 1, 0)
|
||||||
|
|
||||||
|
remove('./.temp/TidesForecast.i2m')
|
||||||
|
remove('./.temp/TidesForecast.gz')
|
||||||
84
recordGenerators/wateringNeeds.py
Normal file
84
recordGenerators/wateringNeeds.py
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
import shutil
|
||||||
|
import requests
|
||||||
|
import logging,coloredlogs
|
||||||
|
import py2Lib.bit
|
||||||
|
import util.machineProductCfg as MPC
|
||||||
|
import records.lfRecord as LFR
|
||||||
|
import gzip
|
||||||
|
from os import remove
|
||||||
|
import xml.dom.minidom
|
||||||
|
import aiohttp, aiofiles, asyncio
|
||||||
|
|
||||||
|
l = logging.getLogger(__name__)
|
||||||
|
coloredlogs.install()
|
||||||
|
|
||||||
|
geocodes = []
|
||||||
|
coopIds = []
|
||||||
|
|
||||||
|
for i in MPC.getPrimaryLocations():
|
||||||
|
coopIds.append(LFR.getCoopId(i))
|
||||||
|
geocodes.append(LFR.getLatLong(i).replace('/', ','))
|
||||||
|
|
||||||
|
# Open the config file and make it accessible via "cfg"
|
||||||
|
import json
|
||||||
|
with open("config.json", "r") as file:
|
||||||
|
cfg = json.load(file)
|
||||||
|
|
||||||
|
apiKey = cfg["twcApiKey"]
|
||||||
|
|
||||||
|
async def getData(coopId, geocode):
|
||||||
|
fetchUrl = f"https://api.weather.com/v2/indices/wateringNeeds/daypart/7day?geocode={geocode}&language=en-US&format=xml&apiKey={apiKey}"
|
||||||
|
data = ""
|
||||||
|
|
||||||
|
async with aiohttp.ClientSession() as s:
|
||||||
|
async with s.get(fetchUrl) as r:
|
||||||
|
if r.status != 200:
|
||||||
|
l.error(f"Failed to WateringNeeds -- status code {r.status}")
|
||||||
|
return
|
||||||
|
|
||||||
|
data = await r.text()
|
||||||
|
|
||||||
|
newData = data[63:-26]
|
||||||
|
|
||||||
|
i2Doc = f'\n <WateringNeeds id="000000000" locationKey="{coopId}" isWxScan="0">\n {newData}\n <clientKey>{coopId}</clientKey>\n </WateringNeeds>'
|
||||||
|
|
||||||
|
async with aiofiles.open('./.temp/WateringNeeds.i2m', 'a') as f:
|
||||||
|
await f.write(i2Doc)
|
||||||
|
await f.close()
|
||||||
|
|
||||||
|
async def makeRecord():
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
l.info("Writing WateringNeeds record.")
|
||||||
|
|
||||||
|
header = '<Data type="WateringNeeds">'
|
||||||
|
footer = '</Data>'
|
||||||
|
|
||||||
|
async with aiofiles.open('./.temp/WateringNeeds.i2m', 'a') as doc:
|
||||||
|
await doc.write(header)
|
||||||
|
|
||||||
|
for (x, y) in zip(coopIds, geocodes):
|
||||||
|
await getData(x,y)
|
||||||
|
|
||||||
|
async with aiofiles.open('./.temp/WateringNeeds.i2m', 'a') as end:
|
||||||
|
await end.write(footer)
|
||||||
|
|
||||||
|
dom = xml.dom.minidom.parse('./.temp/WateringNeeds.i2m')
|
||||||
|
xmlPretty = dom.toprettyxml(indent= " ")
|
||||||
|
|
||||||
|
async with aiofiles.open('./.temp/WateringNeeds.i2m', 'w') as g:
|
||||||
|
await g.write(xmlPretty[23:])
|
||||||
|
await g.close()
|
||||||
|
|
||||||
|
|
||||||
|
# Compresss i2m to gzip
|
||||||
|
with open ('./.temp/WateringNeeds.i2m', 'rb') as f_in:
|
||||||
|
with gzip.open('./.temp/WateringNeeds.gz', 'wb') as f_out:
|
||||||
|
shutil.copyfileobj(f_in, f_out)
|
||||||
|
|
||||||
|
file = "./.temp/WateringNeeds.gz"
|
||||||
|
command = '<MSG><Exec workRequest="storeData(File={0},QGROUP=__WateringNeeds__,Feed=WateringNeeds)" /><GzipCompressedMsg fname="WateringNeeds" /></MSG>'
|
||||||
|
|
||||||
|
bit.sendFile([file], [command], 1, 0)
|
||||||
|
|
||||||
|
remove('./.temp/WateringNeeds.i2m')
|
||||||
|
remove('./.temp/WateringNeeds.gz')
|
||||||
BIN
records/LFRecord.db
Normal file
BIN
records/LFRecord.db
Normal file
Binary file not shown.
40
records/lfRecord.py
Normal file
40
records/lfRecord.py
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
import sqlite3
|
||||||
|
|
||||||
|
# Make a connection to the LFRecord database
|
||||||
|
con = sqlite3.connect("records/LFRecord.db")
|
||||||
|
cur = con.cursor()
|
||||||
|
|
||||||
|
|
||||||
|
def getZip(locId: str):
|
||||||
|
""" Returns the zip code for a given location """
|
||||||
|
COMMAND = (f"SELECT zip2locId FROM lfrecord WHERE locId='{locId}'")
|
||||||
|
cur.execute(COMMAND)
|
||||||
|
return cur.fetchone()[0]
|
||||||
|
|
||||||
|
def getCoopId(locId: str):
|
||||||
|
""" Returns the TWC co-op ID for a given location """
|
||||||
|
COMMAND = (f"SELECT coopId FROM lfrecord WHERE locId='{locId}'")
|
||||||
|
cur.execute(COMMAND)
|
||||||
|
return cur.fetchone()[0]
|
||||||
|
|
||||||
|
def getEpaId(locId: str):
|
||||||
|
""" Return the Air Quality station id for a given location. """
|
||||||
|
COMMAND = (f"SELECT epaId FROM lfrecord WHERE locId='{locId}'")
|
||||||
|
cur.execute(COMMAND)
|
||||||
|
return cur.fetchone()[0]
|
||||||
|
|
||||||
|
def getPollenInfo(locId: str):
|
||||||
|
""" Return the Pollen forecast id for a given location. """
|
||||||
|
COMMAND = (f"SELECT pllnId FROM lfrecord WHERE locId='{locId}'")
|
||||||
|
cur.execute(COMMAND)
|
||||||
|
return cur.fetchone()[0]
|
||||||
|
|
||||||
|
def getLatLong(locId: str):
|
||||||
|
""" Return the Pollen forecast id for a given location. """
|
||||||
|
COMMAND = (f"SELECT lat,long FROM lfrecord WHERE locId='{locId}'")
|
||||||
|
cur.execute(COMMAND)
|
||||||
|
fetched = cur.fetchone()
|
||||||
|
return fetched[0] + "/" + fetched[1]
|
||||||
|
|
||||||
|
def getLocationInfo(locId: str):
|
||||||
|
pass
|
||||||
0
util/__init__.py
Normal file
0
util/__init__.py
Normal file
160
util/machineProductCfg.py
Normal file
160
util/machineProductCfg.py
Normal file
@@ -0,0 +1,160 @@
|
|||||||
|
import json
|
||||||
|
import sys
|
||||||
|
import xmltodict
|
||||||
|
|
||||||
|
|
||||||
|
# Open the MachineProductCfg.xml file in the root directory
|
||||||
|
try:
|
||||||
|
with open("MachineProductCfg.xml", mode = 'r', encoding= 'utf-8') as MPCxml:
|
||||||
|
MPCdict = xmltodict.parse(MPCxml.read())
|
||||||
|
MPCdump = json.dumps(MPCdict)
|
||||||
|
data = json.loads(MPCdump)
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
sys.exit("There was an error opening your MachineProductCfg.xml. Is the file in the root folder?")
|
||||||
|
|
||||||
|
|
||||||
|
def getPrimaryLocations():
|
||||||
|
""" Returns all of the primary locations in the MachineProductCfg """
|
||||||
|
locationIds = []
|
||||||
|
# iterate on the json data and grab anything that has PrimaryLocation.
|
||||||
|
# Also needs to return anything in the Regional area.
|
||||||
|
for i in data['Config']['ConfigDef']['ConfigItems']['ConfigItem']:
|
||||||
|
if "PrimaryLocation" in i['@key'] and i['@value'] != "":
|
||||||
|
# Split the string up
|
||||||
|
locationIds.append(i['@value'].split("_")[2])
|
||||||
|
|
||||||
|
if "NearbyLocation" in i['@key'] and i['@value'] != "":
|
||||||
|
locationIds.append(i['@value'].split("_")[2])
|
||||||
|
|
||||||
|
return locationIds
|
||||||
|
|
||||||
|
def getMetroCities():
|
||||||
|
""" Returns all Metro Map locations in the MPC """
|
||||||
|
locationIds = []
|
||||||
|
|
||||||
|
for i in data['Config']['ConfigDef']['ConfigItems']['ConfigItem']:
|
||||||
|
if 'MetroMapCity' in i['@key'] and i['@value'] != "":
|
||||||
|
locationIds.append(i['@value'].split("_")[2])
|
||||||
|
|
||||||
|
return locationIds
|
||||||
|
|
||||||
|
def getTideStations():
|
||||||
|
""" Returns all of the tide stations present in the MachineProductCfg """
|
||||||
|
stations = []
|
||||||
|
for i in data['Config']['ConfigDef']['ConfigItems']['ConfigItem']:
|
||||||
|
if "TideStation" in i['@key'] and i['@value'] != "":
|
||||||
|
stations.append(i['@value'].split("_")[2])
|
||||||
|
|
||||||
|
return stations
|
||||||
|
|
||||||
|
def getAirportCodes():
|
||||||
|
""" Returns all of the airport identifiers present in the MachineProductCfg """
|
||||||
|
airports = [
|
||||||
|
'ATL',
|
||||||
|
'LAX',
|
||||||
|
'ORD',
|
||||||
|
'DFW',
|
||||||
|
'JFK',
|
||||||
|
'DEN',
|
||||||
|
'SFO',
|
||||||
|
'CLT',
|
||||||
|
'LAS',
|
||||||
|
'PHX',
|
||||||
|
'IAH',
|
||||||
|
'MIA',
|
||||||
|
'SEA',
|
||||||
|
'EWR',
|
||||||
|
'MCO',
|
||||||
|
'MSP',
|
||||||
|
'DTW',
|
||||||
|
'BOS',
|
||||||
|
'PHL',
|
||||||
|
'LGA',
|
||||||
|
'FLL',
|
||||||
|
'BWI',
|
||||||
|
'IAD',
|
||||||
|
'MDW',
|
||||||
|
'SLC',
|
||||||
|
'DCA',
|
||||||
|
'HNL',
|
||||||
|
'SAN',
|
||||||
|
'TPA',
|
||||||
|
'PDX',
|
||||||
|
'STL',
|
||||||
|
'HOU',
|
||||||
|
'BNA',
|
||||||
|
'AUS',
|
||||||
|
'OAK',
|
||||||
|
'MSY',
|
||||||
|
'RDU',
|
||||||
|
'SJC',
|
||||||
|
'SNA',
|
||||||
|
'DAL',
|
||||||
|
'SMF',
|
||||||
|
'SAT',
|
||||||
|
'RSW',
|
||||||
|
'PIT',
|
||||||
|
'CLE',
|
||||||
|
'IND',
|
||||||
|
'MKE',
|
||||||
|
'CMH',
|
||||||
|
'OGG',
|
||||||
|
'PBI',
|
||||||
|
'BDL',
|
||||||
|
'CVG',
|
||||||
|
'JAX',
|
||||||
|
'ANC',
|
||||||
|
'BUF',
|
||||||
|
'ABQ',
|
||||||
|
'ONT',
|
||||||
|
'OMA',
|
||||||
|
'BUR',
|
||||||
|
'OKC',
|
||||||
|
'MEM',
|
||||||
|
'PVD',
|
||||||
|
'RIC',
|
||||||
|
'SDF',
|
||||||
|
'RNO',
|
||||||
|
'TUS',
|
||||||
|
'CHS',
|
||||||
|
'ORF',
|
||||||
|
'PWM',
|
||||||
|
'GRR',
|
||||||
|
'BHM',
|
||||||
|
'LIT',
|
||||||
|
'DSM',
|
||||||
|
'FAR',
|
||||||
|
'FSD',
|
||||||
|
'ICT',
|
||||||
|
'LBB',
|
||||||
|
'BIL',
|
||||||
|
'BOI',
|
||||||
|
'GEG'
|
||||||
|
]
|
||||||
|
for i in data['Config']['ConfigDef']['ConfigItems']['ConfigItem']:
|
||||||
|
if "Airport" in i['@key'] and i['@value'] != "" and not i['@value'] in airports:
|
||||||
|
# Split the string up
|
||||||
|
airports.append(i['@value'].split("_")[2])
|
||||||
|
|
||||||
|
return airports
|
||||||
|
|
||||||
|
def getAlertZones():
|
||||||
|
""" Returns a list of zones present in the MachineProductCfg """
|
||||||
|
zones = []
|
||||||
|
for i in data['Config']['ConfigDef']['ConfigItems']['ConfigItem']:
|
||||||
|
if i['@key'] == "primaryZone" and i['@value'] != "":
|
||||||
|
zones.append(i['@value']) # This should only be one value
|
||||||
|
|
||||||
|
if i['@key'] == "secondaryZones" and i['@value'] != "":
|
||||||
|
for x in i['@value'].split(','):
|
||||||
|
zones.append(x)
|
||||||
|
|
||||||
|
if i['@key'] == 'primaryCounty' and i['@value'] != "":
|
||||||
|
zones.append(i['@value'])
|
||||||
|
|
||||||
|
if i['@key'] == "secondaryCounties" and i['@value'] != "":
|
||||||
|
for x in i['@value'].split(','):
|
||||||
|
zones.append(x)
|
||||||
|
|
||||||
|
return zones
|
||||||
7
util/util.py
Normal file
7
util/util.py
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
import re
|
||||||
|
|
||||||
|
def sort_alphanumeric(data):
|
||||||
|
""" Sorts a list alphanumerically """
|
||||||
|
convert = lambda text: int(text) if text.isdigit() else text.lower()
|
||||||
|
alphanum_key = lambda key: [convert(c) for c in re.split('([0.9]+)', key)]
|
||||||
|
return(sorted(data, key=alphanum_key))
|
||||||
Reference in New Issue
Block a user