commit 63e79c5a4cef2a5f221d54226c8511c61abf25c8 Author: iRaven4522 Date: Tue Jan 28 00:58:23 2025 -0600 Initial commit diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..d26e866 --- /dev/null +++ b/.gitignore @@ -0,0 +1,14 @@ +# Python Virtual Environment +venv/ + +# MachineProductCfg +MachineProductCfg.xml + +# Ignore tiles +tiles/* + +# Python compiled files +__pycache__/ + +# Temp directory +.temp/ \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..3fa0095 --- /dev/null +++ b/LICENSE @@ -0,0 +1,675 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + i2ME Legacy Build + Copyright (C) 2025 April Pizzata + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. + diff --git a/README.md b/README.md new file mode 100644 index 0000000..0978442 --- /dev/null +++ b/README.md @@ -0,0 +1,35 @@ +

IntelliStar 2 Message Encoder / Data Collector

+ +# Requirements +* Properly set up interface for UDP. +* TWC API Key + +## Completed Records +- [X] Aches and Pains +- [X] Air Quality +- [X] Airport Delays + National Airport delays +- [X] Alerts *(BERecord)* +- [X] Breathing +- [X] Current Conditions +- [X] Daily Forecasts +- [X] Hourly Forecasts +- [X] Heating and Cooling +- [X] Mosquito Activity +- [X] Pollen Forecasts +- [X] Tide Station Forecasts +- [X] Watering Needs +- [ ] Marine Forecasts +- [ ] Traffic Forecasts **(API access missing)** + +# Usage instructions +1) Ensure that [Python is installed](https://www.python.org), or from your package repository (apt/winget). +2) [Download the code or git clone this URL] and unzip to the wanted directory. +3) Use command prompt to enter the directory of the scripts, then install package requirements:
+```pip install -r requirements.txt```
+4) Copy **config.example.json** to **config.json** and edit to your specific config. +4) Drop your unit's **MachineProductConfiguration.xml** file into the root of the script +5) Run ``py main.py`` + +### Attributions & Disclaimers +Air Quality reports are powered by Copernicus Atmosphere Monitoring Service Information 2022. +Neither the European Commission nor ECMWF is responsible for any use that may be made of the Copernicus Information or Data it contains. diff --git a/RecordTasks.py b/RecordTasks.py new file mode 100644 index 0000000..8f5b924 --- /dev/null +++ b/RecordTasks.py @@ -0,0 +1,91 @@ +import asyncio +from recordGenerators import Alerts,CurrentObservations,HourlyForecast,DailyForecast, AirQuality, AirportDelays, AchesAndPains, Breathing, HeatingAndCooling, MosquitoActivity, PollenForecast, TideForecast, WateringNeeds +from radar import TWCRadarCollector +from datetime import datetime + + +async def updateMosaicTask(): + mosaicUpdateIntervals = [i+1 for i in range(0, 60, 5)] + + while True: + # Mosaic intervals are 5+1 minutes, so instead of waiting 40 seconds and running "Datetime.now()" twice, We run it once and wait for 60. + if datetime.now().minute in mosaicUpdateIntervals: + await TWCRadarCollector.collect("radarmosaic") + await asyncio.sleep(1) + +async def updateSatradTask(): + satradUpdateIntervals = [i+1 for i in range(0, 60, 10)] + + while True: + #Satrad intervals are 10+1 minutes, so instead of waiting 40 seconds and running "Datetime.now()" twice, We run it once and wait for 60. + if datetime.now().minute in satradUpdateIntervals: + await TWCRadarCollector.collect("satrad") + await asyncio.sleep(1) + + +async def alertsTask(): + while True: + await Alerts.makeRecord() + await asyncio.sleep(60) + +async def coTask(): + while True: + await CurrentObservations.makeDataFile() + await asyncio.sleep(5 * 60) + +# These tasks should be updated every hour + +async def hfTask(): + while True: + await HourlyForecast.makeDataFile() + await asyncio.sleep(60 * 60) + +async def dfTask(): + while True: + await DailyForecast.makeDataFile() + await asyncio.sleep(60 * 60) + +async def aqTask(): + while True: + await AirQuality.writeData() + await asyncio.sleep(60 * 60) + +async def aptTask(): + while True: + await AirportDelays.writeData() + await asyncio.sleep(60 * 60) + +async def apTask(): + while True: + await AchesAndPains.makeRecord() + await asyncio.sleep(60 * 60) + +async def brTask(): + while True: + await Breathing.makeDataFile() + await asyncio.sleep(60 * 60) + +async def hcTask(): + while True: + await HeatingAndCooling.makeRecord() + await asyncio.sleep(60 * 60) + +async def maTask(): + while True: + await MosquitoActivity.makeRecord() + await asyncio.sleep(60 * 60) + +async def pTask(): + while True: + await PollenForecast.makeDataFile() + await asyncio.sleep(60 * 60) + +async def tTask(): + while True: + await TideForecast.makeRecord() + await asyncio.sleep(60 * 60) + +async def wnTask(): + while True: + await WateringNeeds.makeRecord() + await asyncio.sleep(60 * 60) diff --git a/Util/MachineProductCfg.py b/Util/MachineProductCfg.py new file mode 100644 index 0000000..6549eb2 --- /dev/null +++ b/Util/MachineProductCfg.py @@ -0,0 +1,160 @@ +import json +import sys +import xmltodict + + +# Open the MachineProductCfg.xml file in the root directory +try: + with open("MachineProductCfg.xml", mode = 'r', encoding= 'utf-8') as MPCxml: + MPCdict = xmltodict.parse(MPCxml.read()) + MPCdump = json.dumps(MPCdict) + data = json.loads(MPCdump) +except Exception as e: + print(e) + sys.exit("There was an error opening your MachineProductCfg.xml. Is the file in the root folder?") + + +def getPrimaryLocations(): + """ Returns all of the primary locations in the MachineProductCfg """ + locationIds = [] + # iterate on the json data and grab anything that has PrimaryLocation. + # Also needs to return anything in the Regional area. + for i in data['Config']['ConfigDef']['ConfigItems']['ConfigItem']: + if "PrimaryLocation" in i['@key'] and i['@value'] != "": + # Split the string up + locationIds.append(i['@value'].split("_")[2]) + + if "NearbyLocation" in i['@key'] and i['@value'] != "": + locationIds.append(i['@value'].split("_")[2]) + + return locationIds + +def getMetroCities(): + """ Returns all Metro Map locations in the MPC """ + locationIds = [] + + for i in data['Config']['ConfigDef']['ConfigItems']['ConfigItem']: + if 'MetroMapCity' in i['@key'] and i['@value'] != "": + locationIds.append(i['@value'].split("_")[2]) + + return locationIds + +def getTideStations(): + """ Returns all of the tide stations present in the MachineProductCfg """ + stations = [] + for i in data['Config']['ConfigDef']['ConfigItems']['ConfigItem']: + if "TideStation" in i['@key'] and i['@value'] != "": + stations.append(i['@value'].split("_")[2]) + + return stations + +def getAirportCodes(): + """ Returns all of the airport identifiers present in the MachineProductCfg """ + airports = [ + 'ATL', + 'LAX', + 'ORD', + 'DFW', + 'JFK', + 'DEN', + 'SFO', + 'CLT', + 'LAS', + 'PHX', + 'IAH', + 'MIA', + 'SEA', + 'EWR', + 'MCO', + 'MSP', + 'DTW', + 'BOS', + 'PHL', + 'LGA', + 'FLL', + 'BWI', + 'IAD', + 'MDW', + 'SLC', + 'DCA', + 'HNL', + 'SAN', + 'TPA', + 'PDX', + 'STL', + 'HOU', + 'BNA', + 'AUS', + 'OAK', + 'MSY', + 'RDU', + 'SJC', + 'SNA', + 'DAL', + 'SMF', + 'SAT', + 'RSW', + 'PIT', + 'CLE', + 'IND', + 'MKE', + 'CMH', + 'OGG', + 'PBI', + 'BDL', + 'CVG', + 'JAX', + 'ANC', + 'BUF', + 'ABQ', + 'ONT', + 'OMA', + 'BUR', + 'OKC', + 'MEM', + 'PVD', + 'RIC', + 'SDF', + 'RNO', + 'TUS', + 'CHS', + 'ORF', + 'PWM', + 'GRR', + 'BHM', + 'LIT', + 'DSM', + 'FAR', + 'FSD', + 'ICT', + 'LBB', + 'BIL', + 'BOI', + 'GEG' + ] + for i in data['Config']['ConfigDef']['ConfigItems']['ConfigItem']: + if "Airport" in i['@key'] and i['@value'] != "" and not i['@value'] in airports: + # Split the string up + airports.append(i['@value'].split("_")[2]) + + return airports + +def getAlertZones(): + """ Returns a list of zones present in the MachineProductCfg """ + zones = [] + for i in data['Config']['ConfigDef']['ConfigItems']['ConfigItem']: + if i['@key'] == "primaryZone" and i['@value'] != "": + zones.append(i['@value']) # This should only be one value + + if i['@key'] == "secondaryZones" and i['@value'] != "": + for x in i['@value'].split(','): + zones.append(x) + + if i['@key'] == 'primaryCounty' and i['@value'] != "": + zones.append(i['@value']) + + if i['@key'] == "secondaryCounties" and i['@value'] != "": + for x in i['@value'].split(','): + zones.append(x) + + return zones \ No newline at end of file diff --git a/Util/Util.py b/Util/Util.py new file mode 100644 index 0000000..c67f32c --- /dev/null +++ b/Util/Util.py @@ -0,0 +1,7 @@ +import re + +def sort_alphanumeric(data): + """ Sorts a list alphanumerically """ + convert = lambda text: int(text) if text.isdigit() else text.lower() + alphanum_key = lambda key: [convert(c) for c in re.split('([0.9]+)', key)] + return(sorted(data, key=alphanum_key)) \ No newline at end of file diff --git a/Util/__init__.py b/Util/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/config.example.json b/config.example.json new file mode 100644 index 0000000..2eb50d2 --- /dev/null +++ b/config.example.json @@ -0,0 +1,7 @@ +{ + "twcApiKey": "balls", + "multicastIf": "192.168.1.100", + "multicastGroup": "224.1.1.77", + "useRadarServer": "False", + "radarBaseUrl": "balls.com/radar" +} \ No newline at end of file diff --git a/lo8s.py b/lo8s.py new file mode 100644 index 0000000..b26e7b3 --- /dev/null +++ b/lo8s.py @@ -0,0 +1,120 @@ +import py2Lib.bit as bit +from time import sleep +from datetime import datetime, timedelta +from random import choice + +def runLo8s(flavor, duration, LDL, logo = None, LDLColor = None, EmergencyLFCancel = None): + Id = ''.join(choice('ABCDEF0123456789') for i in range(16)) + nowUTC = datetime.utcnow() + now = datetime.now() + friendlyLo8sRunTime = (now + timedelta(seconds=30)).strftime('%m/%d/%Y %I:%M:%S %p') + runTime = nowUTC + timedelta(seconds=30) + ldlCancelTime = runTime.strftime('%m/%d/%Y %H:%M:%S:02') + lo8sRunTime = runTime.strftime('%m/%d/%Y %H:%M:%S:00') + if flavor == 'Z': + nextLDLRunTime = runTime + timedelta(seconds=91) + else: + nextLDLRunTime = runTime + timedelta(seconds=65) + nextLDLRun = nextLDLRunTime.strftime('%m/%d/%Y %H:%M:%S:02') + if duration == '60': + duration = '1800' + elif duration == '65': + duration = '1950' + elif duration == '90': + duration = '2700' + elif duration == '120': + duration = '3600' + else: + print('Invalid Duration specified. Please specifiy length of the local forecast in seconds.\n60 for 1 minute, 65 for 1 minute 5 seconds, 90 for 1 minute 30 seconds, 120 for 2 minutes.\n\nScript will now terminate...') + exit() + if EmergencyLFCancel == 1: + print(f'Emergency Local On The 8s Kill Switch is Activated. No Local On The 8s Will Air. Maybe b3atdropp3r Hacked An i2 Again???\n{friendlyLo8sRunTime}') + sleep(27) + elif logo != '': + print(f'Sending Load Command To All Stars. The Local On The 8s is expected to start at {friendlyLo8sRunTime} ...') + bit.sendCommand([f''], 1) + sleep(27) + else: + print(f'Sending Load Command To All Stars. The Local On The 8s is expected to start at {friendlyLo8sRunTime} ...') + bit.sendCommand([f''], 1) + sleep(27) + print('\nCanceling LDL...') + bit.sendCommand([f''], 1) + + if EmergencyLFCancel == 1: + print('Not Airing Local On The 8s Due To Kill Switch Activated. Will Reload LDL After National DBS Forecast Finishes...') + sleep(53) + else: + print('\nSending The Run Command. Stand By For Your Local Forecast...') + bit.sendCommand([f''], 1) + sleep(53) + + if EmergencyLFCancel == 1: + print("\nGetting The LDL Ready So It'll Cue After The National DBS Local Forecast") + bit.sendCommand([f''], 1) + sleep(10) + print("\nSending The Run Command For The LDL...") + bit.sendCommand([f''], 1) + elif LDL == 1: + print("\nGetting The LDL Ready So It'll Cue After This Local Forecast...") + bit.sendCommand([f''], 1) + sleep(10) + print('\nSending The Run Command For The LDL. As Dave Schwartz Would Say... "That\'s a Wrap!"') + bit.sendCommand([f''], 1) + else: + sleep(10) + print('That\'s It Folks. As Dave Schwartz Would Say... "That\'s a Wrap!"') + +#----- SET BACKGROUNDS HERE ----------------------# +BGCatastrophic = ['3094', '3095', '3103', '3115'] +BGStorm = [] +BGAlert = ['3094', '3095', '3103', '3115'] +#BGNight = ['3091', '3092', '3102', '3114', '3191'] +BGNorm = ['3091', '3092', '3102', '3114'] + +#------ BG RULES SECTION ------------------------# + +if BGCatastrophic: + brandedCatastrophic = choice(BGCatastrophic) +else: + brandedCatastrophic = '' +if BGStorm: + brandedStorm = choice(BGStorm) +else: + brandedStorm = '' +if BGAlert: + brandedAlert = choice(BGAlert) +else: + brandedAlert = '' + +if BGNorm: + brandedNormal = choice(BGNorm) +else: + brandedNormal = '' +#---------------------------------------------- + +while True: + mode = input('Please specify mode.\n0 - "Unbranded"\n1 - "Normal"\n2 - "Alert"\n3 - "Storm Mode"\n4 - "Catastrophic"\n5 - "Tag"\n9 - Exit LOT8\n==>') + if mode == '0': + runLo8s('V', '65', 1, '', 'E') + elif mode == '1': + runLo8s('V', '65', 1, brandedNormal, 'E') + elif mode == '2': + runLo8s('V', '65', 1, brandedAlert, 'E') + elif mode == '3': + runLo8s('V1', '65', 1, brandedStorm, 'STORM') + elif mode == '4': + runLo8s('V2', '65', 1, brandedCatastrophic, 'F') + elif mode == '5': + ad = input('Please specify Tag Number.\n==>') + flavor = input("Flavor Overide? Default is 'V'.\nEnter flavor letter, or press enter to bypass.\n==>") + if flavor == '': + runLo8s('V', '65', 1, ad, 'E') + else: + runLo8s(flavor, '65', 1, ad, 'E') + elif mode == '9': + print("\n\nGoodbye.\n\n") + exit() + else: + print("Not a valid selection.\n\n") + sleep(1) diff --git a/main.py b/main.py new file mode 100644 index 0000000..88eae6d --- /dev/null +++ b/main.py @@ -0,0 +1,83 @@ +import asyncio, aiofiles +from asyncio.log import logger +from asyncore import loop +import logging,coloredlogs +from radar import TWCRadarCollector +import os +from datetime import datetime +import RecordTasks + +# Open the config file and make it accessible via "cfg" +import json +with open("conf.json", "r") as file: + cfg = json.load(file) + +l = logging.getLogger(__name__) +coloredlogs.install(logger=l) + +useRadarServer = cfg[useRadarServer] + +async def createTemp(): + """ Used on a first time run, creates necessary files & directories for the message encoder to work properly. """ + if not (os.path.exists('./.temp/')): + l.info("Creating necessary directories & files..") + os.mkdir('./.temp') + + # Used for the record generator + os.mkdir('./.temp/tiles/') + os.mkdir('./.temp/tiles/output/') + + # Used for radar server downloads + os.mkdir('./.temp/output') + os.mkdir('./.temp/output/radarmosaic') + os.mkdir('./.temp/output/satrad') + + # Create msgId file for bit.py + async with aiofiles.open('./.temp/msgId.txt', 'w') as msgId: + await msgId.write('410080515') + await msgId.close() + else: + l.debug(".temp file exists") + return + + +async def main(): + await createTemp() + + mosaicTask = asyncio.create_task(RecordTasks.updateMosaicTask()) + satradTask = asyncio.create_task(RecordTasks.updateSatradTask()) + alertsTask = asyncio.create_task(RecordTasks.alertsTask()) + coTask = asyncio.create_task(RecordTasks.coTask()) + hfTask = asyncio.create_task(RecordTasks.hfTask()) + dfTask = asyncio.create_task(RecordTasks.dfTask()) + aqTask = asyncio.create_task(RecordTasks.aqTask()) + aptTask = asyncio.create_task(RecordTasks.aptTask()) + apTask = asyncio.create_task(RecordTasks.apTask()) + brTask = asyncio.create_task(RecordTasks.brTask()) + hcTask = asyncio.create_task(RecordTasks.hcTask()) + maTask = asyncio.create_task(RecordTasks.maTask()) + pTask = asyncio.create_task(RecordTasks.pTask()) + tTask = asyncio.create_task(RecordTasks.tTask()) + wnTask = asyncio.create_task(RecordTasks.wnTask()) + + # In theory, these should all run concurrently without problems + await alertsTask + await coTask + await hfTask + await dfTask + await aqTask + await aptTask + await apTask + await brTask + await hcTask + await maTask + await pTask + await tTask + await wnTask + + if useRadarServer: + await mosaicTask + await satradTask + +if __name__ == "__main__": + asyncio.run(main()) \ No newline at end of file diff --git a/py2Lib/README.md b/py2Lib/README.md new file mode 100644 index 0000000..46486d3 --- /dev/null +++ b/py2Lib/README.md @@ -0,0 +1,3 @@ +# py2Lib + +A python implementation of i2Lib from the original i2MessageEncoder. \ No newline at end of file diff --git a/py2Lib/__init__.py b/py2Lib/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/py2Lib/bit.py b/py2Lib/bit.py new file mode 100644 index 0000000..988fdd6 --- /dev/null +++ b/py2Lib/bit.py @@ -0,0 +1,216 @@ +import socket +import sys +import os +import struct +import binascii +import math +import time +import logging,coloredlogs + +# Open the config file and make it accessible via "cfg" +import json +with open("conf.json", "r") as file: + cfg = json.load(file) + +l = logging.getLogger(__name__) +coloredlogs.install() + +MCAST_GRP = cfg[multicastGroup] +MCAST_IF = cfg[multicastIf] +BUF_SIZE = 1396 + +MULTICAST_TTL = 2 + +conn = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) +conn.setsockopt(socket.IPPROTO_IP,socket.IP_ADD_MEMBERSHIP,socket.inet_aton(MCAST_GRP)+socket.inet_aton(MCAST_IF)) + +test = b"This is a test" + +def sendFile(files, commands, numSgmts, Pri): + if Pri == 0: + MCAST_PORT = 7787 + elif Pri == 1: + MCAST_PORT = 7788 + else: + l.critical("Invalid Priority Flag. 0 = Routine Message 1 = High Priority Message\n\nScript will now terminate...") + exit() + #Get the next message ID + with open('./.temp/msgId.txt', "r") as f: + oMsgId = f.read() + msgNum = int(oMsgId) + f.close() + + nMsgNum = msgNum + 1 + h = open('./.temp/msgId.txt', "w") + h.write(str(nMsgNum)) + h.close() + segnmNum = 0 + if Pri == 0: + l.info("Sending Routine Msg-" + str(msgNum) + " on UDP " + MCAST_GRP + " " + str(MCAST_PORT) + "....") + elif Pri == 1: + l.info("Sending High Priority Msg-" + str(msgNum) + " on UDP " + MCAST_GRP + " " + str(MCAST_PORT) + "....") + startFlag = False + + for x, y in zip(files, commands): + size = os.path.getsize(x) + check = size - BUF_SIZE + pToBeSent = size / 1405 + packRounded = math.ceil(pToBeSent) + 1 + numSegments = numSgmts + 3 + total_sent = 0 + payloadLength = 0 + packet_count = 1 + j = 0 + pc = packet_count.to_bytes(1, byteorder='big') + i = 0 + encode1 = bytes(y + 'I2MSG', 'UTF-8') + commandLength = len(y) + encode2 = commandLength.to_bytes(4, byteorder='little') + theCommand = b"".join([encode1, encode2]) + char = '' + new_file = open(x, "ab") + new_file.write(theCommand) # Append command to end of the file + new_file.close() + new_size = os.path.getsize(x) + + if startFlag == False: + #Our 34 byte beginning packet + p1 = struct.pack(">BHHHIIBBBBBBBIBIBBB", 18, 1, 0 , 16, msgNum, 0, segnmNum, 0, 0, 8, numSegments, 3, 0, 0, 8, packRounded, 0, 0, 0) + conn.sendto(p1, (MCAST_GRP, MCAST_PORT)) + startFlag = True + with open(x,"rb") as message: + message.seek(0) + data = message.read(BUF_SIZE) + while data: + packetHeader = struct.pack(">BHHHIIBBB", 18, 1, 0, 1405, msgNum, packet_count, 0, 0, 0) + fec = struct.pack("BHHHIIBBBBBBBI", 18, 1, 1, 8, msgNum, 0, segnmNum, 0, 0, 8, 0, 0, 0, 67108864) + p4 = struct.pack(">BHHHIIBBB", 18, 1, 1, 14, msgNum, 1, segnmNum, 0, 0) + test + conn.sendto(p3, (MCAST_GRP, MCAST_PORT)) + conn.sendto(p4, (MCAST_GRP, MCAST_PORT)) + segnmNum += 1 + w -= 1 + #------------------------------------------------------------------------------------------------------- +def sendCommand(command, Pri, msgNum = None): + if Pri == 0: + MCAST_PORT = 7787 + elif Pri == 1: + MCAST_PORT = 7788 + else: + l.critical("Invalid Priority Flag. 0 = Routine Message 1 = High Priority Message\n\nScript will now terminate...") + exit() + #Get the next message ID + with open('./.temp/msgId.txt', "r") as f: + oMsgId = f.read() + msgNum = int(oMsgId) + f.close() + + nMsgNum = msgNum + 1 + h = open('./.temp/msgId.txt', "w") + h.write(str(nMsgNum)) + h.close() + segnmNum = 0 + if Pri == 0: + l.info("Sending Routine Msg-" + str(msgNum) + " on UDP " + MCAST_GRP + " " + str(MCAST_PORT) + "....") + elif Pri == 1: + l.info("Sending High Priority Msg-" + str(msgNum) + " on UDP " + MCAST_GRP + " " + str(MCAST_PORT) + "....") + startFlag = False + + for x in command: + bx = bytes(x, 'utf-8') + with open('./.temp/command', 'wb') as c: + c.write(bx) + c.close() + size = os.path.getsize('./.temp/command') + encode1 = bytes('I2MSG', 'UTF-8') + commandLength = size + encode2 = commandLength.to_bytes(4, byteorder='little') + theCommand = b"".join([encode1, encode2]) + with open('./.temp/command', 'ab') as d: + d.write(theCommand) + d.close() + check = size - BUF_SIZE + pToBeSent = size / 1405 + packRounded = math.ceil(pToBeSent) + 1 + numSegments = 4 + total_sent = 0 + payloadLength = 0 + packet_count = 1 + j = 0 + pc = packet_count.to_bytes(4, byteorder='little') + i = 0 + char = '' + new_size = os.path.getsize('./.temp/command') + + if startFlag == False: + #Our 34 byte beginning packet + p1 = struct.pack(">BHHHIIBBBBBBBIBIBBB", 18, 1, 0 , 16, msgNum, 0, segnmNum, 0, 0, 8, numSegments, 3, 0, 0, 8, packRounded, 0, 0, 0) + conn.sendto(p1, (MCAST_GRP, MCAST_PORT)) + startFlag = True + with open('./.temp/Command',"rb") as message: + message.seek(0) + data = message.read(BUF_SIZE) + while data: + packetHeader = struct.pack(">BHHHIIBBB", 18, 1, 0, 1405, msgNum, packet_count, 0, 0, 0) + fec = struct.pack("BHHHIIBBBBBBBI", 18, 1, 1, 8, msgNum, 0, segnmNum, 0, 0, 8, 0, 0, 0, 67108864) + p4 = struct.pack(">BHHHIIBBB", 18, 1, 1, 14, msgNum, 1, segnmNum, 0, 0) + test + conn.sendto(p3, (MCAST_GRP, MCAST_PORT)) + conn.sendto(p4, (MCAST_GRP, MCAST_PORT)) + segnmNum += 1 + w -= 1 + #------------------------------------------------------------------------------------------------------- diff --git a/py2Lib/command.py b/py2Lib/command.py new file mode 100644 index 0000000..4de51e8 --- /dev/null +++ b/py2Lib/command.py @@ -0,0 +1,309 @@ +import bit +import os +import shutil +import math +import time +from datetime import datetime + +def restartI2Service(headendIds): + + HeadendList = '' + + for x in headendIds: + HeadendList += ('' + x + '') + bit.sendCommand('' + HeadendList + 'I2MSG', 1) + #print('' + HeadendList + '') + +def rebootI2(headendIds): + + HeadendList = '' + + for x in headendIds: + HeadendList += ('' + x + '') + + commands = [] + command = '' + HeadendList + '' + commands.append(command) + bit.sendCommand(commands, 1) + +def clearStarBundle(headendIds, btype): + + HeadendList = '' + + for x in headendIds: + HeadendList += ('' + x + '') + + commands = [] + command = '' + HeadendList + '' + commands.append(command) + bit.sendCommand(commands, 1) + +def changePasswords(PasswordFile, headendIds): + HeadendList = '' + files = [] + files.append(PasswordFile) + commands = [] + numSegs = 1 + if headendIds != None: + for x in headendIds: + HeadendList += ('' + x + '') + command = '' + HeadendList + '' + commands.append(command) + bit.sendFile(files, commands, numSegs, 0) + else: + command = '' + commands.append(command) + bit.sendFile(files, commands, numSegs, 0) + os.remove(PasswordFile) + +def sendMaintCommand(File, headendIds): + HeadendList = '' + files = [] + files.append(File) + commands = [] + numSegs = 1 + if headendIds != None: + for x in headendIds: + HeadendList += ('' + x + '') + #command = '' + HeadendList + '' + #command = '' + HeadendList + '' + #command = '' + HeadendList + '' + #command = '' + HeadendList + '' + #command = '' + HeadendList + '' + #command = '' + HeadendList + '' + #command = '' + HeadendList + '' + + #I2 HD + #command = '' + HeadendList + '' + commands.append(command) + bit.sendFile(files, commands, numSegs, 0) + else: + #command = '' + commands.append(command) + bit.sendFile(files, commands, numSegs, 0) + os.remove(File) + +def loadRunPres(headendIds, Flavor, Logo, Duration, Id): + HeadendList = '' + + for x in headendIds: + HeadendList += ('' + x + '') + + if Logo == '': + command = ['' + HeadendList + ''] + bit.sendCommand(command, 1) + else: + command = ['' + HeadendList + ''] + bit.sendCommand(command, 1) + +def sendStarBundle(File): + + size = os.path.getsize(File) + if size >= 67550000: + CHUNK_SIZE = 67550000 + file_number = 1 + fPath = os.path.dirname(File) + fpToSplit = os.path.splitext(File) + splitFn = fpToSplit[0].split('\\') + newFn = splitFn[-1] + eCount = size / CHUNK_SIZE + estCount = math.ceil(eCount) + + print("File size is greater than 64MB. Will need to split the files for transmission...\n") + time.sleep(1) + + + with open(File, "rb") as f: + chunk = f.read(CHUNK_SIZE) + while chunk: + if file_number < 10: + fileNum = '0' + str(file_number) + else: + fileNum = file_number + with open(fPath + '\\split\\' + newFn + '_' + str(fileNum), "wb") as chunk_file: + chunk_file.write(chunk) + print('Successfully split file ' + str(file_number) + ' out of ' + str(estCount) + '\n') + file_number += 1 + chunk = f.read(CHUNK_SIZE) + + count = file_number - 1 + part = 1 + + with open('./.temp/msgId.txt', "r") as f: + oMsgId = f.read() + Id = int(oMsgId) + f.close() + + for x in os.listdir(fPath + '\\split'): + path = fPath + '\\split\\' + y = path + x + if part != count: + print(x) + bit.sendFile([y],[''], 1, 0) + part += 1 + time.sleep(30) + elif part == count: + print(x) + bit.sendFile([y],[''], 1, 0) + time.sleep(30) + bit.sendCommand([''], 0) + for f in os.listdir(path): + os.remove(os.path.join(path, f)) + else: + bit.sendFile([File],[''], 1, 0) + os.remove(File) + +def sendUpgrade(File, RelName): + + size = os.path.getsize(File) + if size >= 67550000: + CHUNK_SIZE = 67550000 + file_number = 1 + fPath = os.path.dirname(File) + fpToSplit = os.path.splitext(File) + splitFn = fpToSplit[0].split('\\') + newFn = splitFn[-1] + eCount = size / CHUNK_SIZE + estCount = math.ceil(eCount) + + print("File size is greater than 64MB. Will need to split the files for transmission...\n") + time.sleep(1) + + + with open(File, "rb") as f: + chunk = f.read(CHUNK_SIZE) + while chunk: + if file_number < 10: + fileNum = '0' + str(file_number) + else: + fileNum = file_number + with open(fPath + '\\split\\' + newFn + '_' + str(fileNum), "wb") as chunk_file: + chunk_file.write(chunk) + print('Successfully split file ' + str(file_number) + ' out of ' + str(estCount) + '\n') + file_number += 1 + chunk = f.read(CHUNK_SIZE) + + count = file_number - 1 + part = 1 + + with open('./.temp/msgId.txt', "r") as f: + oMsgId = f.read() + Id = int(oMsgId) + f.close() + + for x in os.listdir(fPath + '\\split'): + path = fPath + '\\split\\' + y = path + x + if part != count: + print(x) + bit.sendFile([y],[''], 1, 0) + part += 1 + time.sleep(10) + elif part == count: + print(x) + bit.sendFile([y],[''], 1, 0) + part += 1 + time.sleep(15) + bit.sendCommand([''], 0) + for f in os.listdir(path): + os.remove(os.path.join(path, f)) + else: + bit.sendFile([File],[''], 1, 0) + os.remove(File) + +def makeStarBundle(Directory, Type, flag, Version, date, sendAfter): + header = '\n ' + Version + '\n ' + date + '\n ' + Type + '\n \n' + with open('./.temp/i2State\\SD\\ChangesetBundle\\MetaData\\manifest.xml', 'w') as ma: + ma.write(header) + ma.close() + + for (root,dirs,files) in os.walk(Directory, topdown=True): + for name in files: + rootDir = root[24:] + bDest = os.path.join(rootDir,name) + fDest = os.path.join(root,name) + shutil.copy(fDest, './.temp/i2State\\SD\\ChangesetBundle') + for name in files: + if flag == 'Domestic_Universe': + flag = 'flags="Domestic_Universe"' + elif flag == 'Domestic_SD_Universe': + flag = 'flags="Domestic_SD_Universe"' + else: + pass + with open('./.temp/i2State\\SD\\ChangesetBundle\\MetaData\\manifest.xml', 'a') as mb: + mb.write(' \n') + mb.close() + closer = ' \n' + with open('./.temp/i2State\\SD\\ChangesetBundle\\MetaData\\manifest.xml', 'a') as ma: + ma.write(closer) + ma.close() + + +#restartI2Service(['006833']) + +#rebootI2(['006833']) + +#clearStarBundle(['006833'], 'Changeset') + +#bit.sendCommand([''], 1) + +#loadRunPres(['038488'], 'domestic/ldlE', '', '72000', 'LDL1') + +#changePasswords('./.temp/passwords2.gz', ['006833']) + +#sendMaintCommand('./.temp/maint\\temp\\passwords',['040500']) + +#sendStarBundle("./.temp/Bundle.zip") + +#sendUpgrade("./.temp/Upgrades\\wireshark_1.4.6.0.zip", "wireshark_1.4.6.0") + +#For splitting +#sendUpgrade("./.temp/ChangesetHD.zip", "PipelineMaint_6.15.1.5714") + +#For no split upgrades +#bit.sendFile('./.temp/Upgrades\\vizRequiredFilesForI2_1.2.0.0.zip', '040500I2MSG', 0) + +#For split upgrades +#bit.sendFile('./.temp/split\\ChangesetHD_04', 'I2MSG', 0) + +#Command for split upgrades + +#commands = [] +#command = '' + +#commands.append(command) +#bit.sendCommand(commands, 1, 0) + +#bit.sendFile(['./.temp/Alert.gz'], [''], 1, 0) +''' +bit.sendCommand([''], 0) +bit.sendCommand([''], 0) +bit.sendCommand([''], 0) +bit.sendCommand([''], 0) +bit.sendCommand([''], 0) +bit.sendCommand([''], 0) +bit.sendCommand([''], 0) +bit.sendCommand([''], 0) +bit.sendCommand([''], 0) +bit.sendCommand([''], 0) +bit.sendCommand([''], 0) +bit.sendCommand([''], 0) +bit.sendCommand([''], 0) +bit.sendCommand([''], 0) +''' +#bit.sendCommand(['006833'], 1) +#bit.sendCommand('./.temp/Upgrades\\split\\PipelineMaint_6.15.1.5714_03', 'I2MSG', 0) + + + + +#bit.sendCommand([''], 0) + +#bit.sendCommand([''], 1) + +#bit.sendCommand([''], 1) + +#bit.sendCommand([''], 1) + +#makeStarBundle('./.temp/i2State\\SD\\Changeset\\audio\\domesticSD\\vocalLocal\\Cantore', 'Changeset', 'Domestic_SD_Universe', '63702614401035937', '09/19/2022', 0) + diff --git a/py2Lib/starbundle.py b/py2Lib/starbundle.py new file mode 100644 index 0000000..306cfd8 --- /dev/null +++ b/py2Lib/starbundle.py @@ -0,0 +1,45 @@ +import os +import shutil +import random + +def makeStarBundle(Directory, Type, flag, Version, date, sendAfter): + header = '\n ' + Version + '\n ' + date + '\n ' + Type + '\n \n' + with open('C:\\Bundle\\MetaData\\manifest.xml', 'w') as ma: + ma.write(header) + ma.close() + + for (root,dirs,files) in os.walk(Directory, topdown=True): + num = 0 + for name in files: + rootDir = None + if Type == "Managed": + rootDir= root[22:] + else: + rootDir = root[24:] + bDest = os.path.join(rootDir,name) + fDest = os.path.join(root,name) + signature = ''.join(random.choice('abcdef0123456789') for i in range(32)) + splitExt = os.path.splitext(name) + bName = splitExt[0] + '_' + signature + shutil.copy(fDest, 'C:\\Bundle\\' + bName) + if flag == 'Domestic_Universe': + flag = 'flags="Domestic_Universe"' + elif flag == 'Domestic_SD_Universe': + flag = 'flags="Domestic_SD_Universe"' + else: + pass + with open('C:\\Bundle\\MetaData\\manifest.xml', 'a') as mb: + mb.write(' \n') + mb.close() + num += 1 + closer = ' \n' + with open('C:\\Bundle\\MetaData\\manifest.xml', 'a') as ma: + ma.write(closer) + ma.close() + + + + + + #Directory which contains Files to be bundled Type Flags Version Date SendImmediately(Does not apply to this script) +# makeStarBundle('./.temp/i2State/SD/Managed/Events', 'Managed', 'Domestic_SD_Universe', '637898877227230030', '09/28/2022', 0) \ No newline at end of file diff --git a/radar/ImageSequenceDefs.json b/radar/ImageSequenceDefs.json new file mode 100644 index 0000000..4a4ee14 --- /dev/null +++ b/radar/ImageSequenceDefs.json @@ -0,0 +1,67 @@ +{ + "ImageSequenceDefs": { + "Radar-US": { + "LowerLeftLong": -126.834935, + "LowerLeftLat": 22.197152, + "UpperRightLong": -65.178922, + "UpperRightLat": 50.231604, + "VerticalAdjustment": 1.1985928, + "OriginalImageWidth": 4096, + "OriginalImageHeight": 1968, + "MaxImages": 36, + "Gap": 4, + "ImagesInterval": 300, + "Expiration": 10800, + "DeletePadding": 1800, + "FileNameDateFormat": "yyyyMMddHHmm" + }, + + "Radar-PR": { + "LowerLeftLong": -162.633484, + "LowerLeftLat": 16.569253, + "UpperRightLong": -151.702146, + "UpperRightLat": 24.773036, + "VerticalAdjustment": 1.199, + "OriginalImageWidth": 1300, + "OriginalImageHeight": 600, + "MaxImages": 12, + "Gap": 4, + "ImagesInterval": 900, + "Expiration": 10800, + "DeletePadding": 1800, + "FileNameDateFormat": "yyyyMMddHHmm" + }, + + "Radar-HI": { + "LowerLeftLong": -73.427336, + "LowerLeftLat": 14.558724, + "UpperRightLong": -59.620365, + "UpperRightLat": 21.826707, + "VerticalAdjustment": 1.1985928, + "OriginalImageWidth": 1500, + "OriginalImageHeight": 1500, + "MaxImages": 36, + "Gap": 4, + "ImagesInterval": 300, + "Expiration": 10800, + "DeletePadding": 1800, + "FileNameDateFormat": "yyyyMMddHHmm" + }, + + "Radar-AK": { + "LowerLeftLong": -178.505920, + "LowerLeftLat": 51.379081, + "UpperRightLong": -124.517227, + "UpperRightLat": 71.504753, + "VerticalAdjustment": 1.0175897, + "OriginalImageWidth": 2000, + "OriginalImageHeight": 1600, + "MaxImages": 36, + "Gap": 4, + "ImagesInterval": 300, + "Expiration": 10800, + "DeletePadding": 1800, + "FileNameDateFormat": "yyyyMMddHHmm" + } + } +} \ No newline at end of file diff --git a/radar/RadarProcessor.py b/radar/RadarProcessor.py new file mode 100644 index 0000000..65d62bf --- /dev/null +++ b/radar/RadarProcessor.py @@ -0,0 +1,77 @@ +from datetime import datetime +import math + + +class Point(): + def __init__(self, x, y): + self.x = x + self.y = y + +class LatLong(): + def __init__(self, x, y): + self.x = x + self.y = y + +class ImageBoundaries(): + def __init__(self, LowerLeftLong,LowerLeftLat,UpperRightLong,UpperRightLat,VerticalAdjustment,OGImgW,OGImgH,ImagesInterval,Expiration): + self.LowerLeftLong = LowerLeftLong + self.LowerLeftLat = LowerLeftLat + self.UpperRightLong = UpperRightLong + self.UpperRightLat = UpperRightLat + + self.VerticalAdjustment = VerticalAdjustment + + self.OGImgW = OGImgW + self.OGImgH = OGImgH + self.ImageInterval = ImagesInterval + self.Expiration = Expiration + + def GetUpperRight(self) -> LatLong: + return LatLong( + x = self.UpperRightLat, + y = self.UpperRightLong + ) + + def GetLowerLeft(self) -> LatLong: + return LatLong( + x = self.LowerLeftLat, + y = self.LowerLeftLong + ) + + def GetUpperLeft(self) -> LatLong: + return LatLong( + x = self.UpperRightLat, y = self.LowerLeftLong + ) + + def GetLowerRight(self) -> LatLong: + return LatLong( + x = self.LowerLeftLat, y = self.UpperRightLong + ) + + +# Utils + +def WorldCoordinateToTile(coord: Point) -> Point: + scale = 1 << 6 + + return Point( + x = math.floor(coord.x * scale / 255), + y = math.floor(coord.y * scale / 255) + ) + +def WorldCoordinateToPixel(coord: Point) -> Point: + scale = 1 << 6 + + return Point( + x = math.floor(coord.x * scale), + y = math.floor(coord.y * scale) + ) + +def LatLongProject(lat, long) -> Point: + siny = math.sin(lat * math.pi / 180) + siny = min(max(siny, -0.9999), 0.9999) + + return Point( + x = 256 * (0.5 + long / 360), + y = 256 * (0.5 - math.log((1 + siny) / (1 - siny)) / (4 * math.pi)) + ) \ No newline at end of file diff --git a/radar/TWCRadarCollector.py b/radar/TWCRadarCollector.py new file mode 100644 index 0000000..d766625 --- /dev/null +++ b/radar/TWCRadarCollector.py @@ -0,0 +1,109 @@ +import asyncio +import aiohttp +import aiofiles +import logging, coloredlogs +from py2Lib import bit +from datetime import datetime +from os import path, listdir, remove + + +# Open the config file and make it accessible via "cfg" +import json +with open("conf.json", "r") as file: + cfg = json.load(file) + +l = logging.getLogger(__name__) +coloredlogs.install(level="DEBUG") + +async def getValidTimestamps(radarType:str) -> list: + times = [] + maxImages = 0 + url = None + series = None + + async with aiohttp.ClientSession() as s: + if (radarType == "satrad"): + maxImages = 12 + url = "https://api.weather.com/v3/TileServer/series/productSet?apiKey=" + cfg[twcApiKey] + "&filter=satrad" + series = 'satrad' + + elif (radarType == "radarmosaic"): + maxImages = 36 + url = "https://api.weather.com/v3/TileServer/series/productSet?apiKey=" + cfg[twcApiKey] + "&filter=twcRadarMosaic" + series = 'twcRadarMosaic' + + else: + l.error(f'Invalid series filter "{radarType}" -- Valid filters include "satrad", "radarmosaic"') + return times + + async with s.get(url) as r: + res = await r.json() + + for t in range(0, len(res['seriesInfo'][series]['series'])): + if (t <= (maxImages - 1)): + time = res['seriesInfo'][series]['series'][t]['ts'] + + times.append(time) + + return times + +async def downloadRadarFrames(radarType:str, timestamps: list) -> list: + url_root = None + imagesToSend = [] + + if (radarType == "satrad"): + url_root = cfg[radarBaseUrl] + elif (radarType == "radarmosaic"): + url_root = cfg[radarBaseUrl] + else: + l.error(f'Invalid radar type "{radarType}" -- Valid radar types include "satrad", "radarmosaic"') + return + + # Clear out expired radar frames + for i in listdir(f'.temp/output/{radarType}'): + if i.split('.')[0] not in [str(x) for x in timestamps] and i != "Thumbs.db": + l.debug(f"Deleting {i} as it is no longer valid.") + remove(f".temp/output/{radarType}/" + i) + + async with aiohttp.ClientSession() as s: + + for ts in timestamps: + if path.exists(f".temp/output/{radarType}/{ts}.tiff"): + l.debug(f"{radarType}/{ts}.tiff exists, skipping.") + continue + + async with s.get(url_root + f"{ts}.tiff") as r: + l.info(f"Downloading {radarType} frame {timestamps.index(ts) + 1} / {len(timestamps)}") + + if r.status == 404: + l.warning(f"Failed to download {radarType}/{ts}.tiff -- Server likely has not generated this frame yet.") + continue + + f = await aiofiles.open(f'.temp/output/{radarType}/{ts}.tiff', mode='wb') + await f.write(await r.read()) + await f.close() + + imagesToSend.append(f'.temp/output/{radarType}/{ts}.tiff') + + return imagesToSend + + +def getTime(timestamp) -> str: + time:datetime = datetime.utcfromtimestamp(timestamp).strftime("%m/%d/%Y %H:%M:%S") + + return str(time) + +async def collect(radarType: str): + loop = asyncio.get_running_loop() + ts = await getValidTimestamps(radarType) + frames = await downloadRadarFrames(radarType, ts) + + commands = [] + for i in range(0, len(frames)): + if radarType == "radarmosaic": + commands.append( '' ) + + if radarType == "satrad": + commands.append( '' ) + + bit.sendFile([frames[i]], [commands[i]], 1, 0) diff --git a/radar/TWCRadarProcessor.py b/radar/TWCRadarProcessor.py new file mode 100644 index 0000000..152dc1b --- /dev/null +++ b/radar/TWCRadarProcessor.py @@ -0,0 +1,331 @@ +import asyncio +import collections +from genericpath import exists +import gzip +from multiprocessing import Pool +import aiohttp +import json +import time as epochTime +import requests +import logging,coloredlogs + +from os import path, mkdir, listdir, remove, cpu_count +from shutil import rmtree +from PIL import Image as PILImage +from wand.image import Image as wandImage +from wand.color import Color + + +radarType = "Radar-US" + +l = logging.getLogger(__name__) +coloredlogs.install() + +upperLeftX,upperLeftY,lowerRightX,lowerRightY = 0,0,0,0 +xStart,xEnd,yStart,yEnd = 0,0,0,0 +imgW = 0 +imgH = 0 + +import sys +sys.path.append("./py2lib") +sys.path.append("./radar") +from RadarProcessor import * +import bit + +async def getValidTimestamps(boundaries:ImageBoundaries) -> list: + """Gets all valid UNIX timestamps for the TWCRadarMosaic product """ + l.info("Getting timestamps for the radar..") + times = [] + + async with aiohttp.ClientSession() as session: + url = "https://api.weather.com/v3/TileServer/series/productSet?apiKey=" + cfg[twcApiKey] + "&filter=twcRadarMosaic" + async with session.get(url) as r: + response = await r.json() + + for t in range(0, len(response['seriesInfo']['twcRadarMosaic']['series'])): + + if (t <= 35): + time = response['seriesInfo']['twcRadarMosaic']['series'][t]['ts'] + + # Don't add frames that aren't at the correct interval + if (time % boundaries.ImageInterval != 0): + l.debug(f"Ignoring {time} -- Not at the correct frame interval.") + continue + + # Don't add frames that are expired + if (time < (datetime.utcnow().timestamp() - epochTime.time()) / 1000 - boundaries.Expiration): + l.debug(f"Ignoring {time} -- Expired.") + continue + + times.append(time) + + return times + +def downloadRadarTile(url, p, fn): + img = requests.get(url, stream=True) + ts = fn.split("_")[0] + download = True + + # Make the path if it doesn't exist + if exists(f"./.temp/tiles/output/{ts}.tiff"): + l.debug("Not downloading tiles for timestamp " + str(ts) + " since a frame for it already exists." ) + download = False + if not path.exists(p): + mkdir(p) + l.debug(f"Download {ts}") + if exists(f"{p}/{fn}"): + l.debug(f"Not downloading new tiles for {ts} as they already exist.") + download = False + + if (img.status_code == 200 and download): + with open(f'{p}/{fn}', 'wb') as tile: + for data in img: + tile.write(data) + elif (img.status_code != 200): + l.error("ERROR DOWNLOADING " + p + "\nSTATUS CODE " + str(img.status_code)) + elif (download == False): + pass + + + +def getImageBoundaries() -> ImageBoundaries: + """ Gets the image boundaries for the specified radar definition """ + with open('radar/ImageSequenceDefs.json', 'r') as f: + ImageSequenceDefs = json.loads(f.read()) + + seqDef = ImageSequenceDefs['ImageSequenceDefs'][radarType] + + return ImageBoundaries( + LowerLeftLong = seqDef['LowerLeftLong'], + LowerLeftLat= seqDef['LowerLeftLat'], + UpperRightLong= seqDef['UpperRightLong'], + UpperRightLat= seqDef['UpperRightLat'], + VerticalAdjustment= seqDef['VerticalAdjustment'], + OGImgW= seqDef['OriginalImageWidth'], + OGImgH= seqDef['OriginalImageHeight'], + ImagesInterval= seqDef['ImagesInterval'], + Expiration= seqDef['Expiration'] + ) + +def CalculateBounds(upperRight:LatLong, lowerLeft:LatLong, upperLeft:LatLong, lowerRight: LatLong): + """ Calculates the image bounds for radar stitching & tile downloading """ + upperRightTile:Point = WorldCoordinateToTile(LatLongProject(upperRight.x, upperRight.y)) + lowerLeftTile:Point = WorldCoordinateToTile(LatLongProject(lowerLeft.x, lowerLeft.y)) + upperLeftTile:Point = WorldCoordinateToTile(LatLongProject(upperLeft.x, upperLeft.y)) + lowerRightTile:Point = WorldCoordinateToTile(LatLongProject(lowerRight.x,lowerRight.y)) + + upperLeftPx:Point = WorldCoordinateToPixel(LatLongProject(upperLeft.x, upperLeft.y)) + lowerRightPx:Point = WorldCoordinateToPixel(LatLongProject(lowerRight.x,lowerRight.y)) + + global upperLeftX,upperLeftY,lowerRightX,lowerRightY + global xStart,xEnd,yStart,yEnd + global imgW,imgH + + upperLeftX = upperLeftPx.x - upperLeftTile.x * 256 + upperLeftY = upperLeftPx.y - upperLeftTile.y * 256 + lowerRightX = lowerRightPx.x - upperLeftTile.x * 256 + lowerRightY = lowerRightPx.y - upperLeftTile.y * 256 + + # Set the xStart, xEnd, yStart, and yEnd positions so we can download tiles that are within the tile coordinate regions + xStart = int(upperLeftTile.x) + xEnd = int(upperRightTile.x) + yStart = int(upperLeftTile.y) + yEnd = int(lowerLeftTile.y) + + # Set the image width & height based off the x and y tile amounts + + # These should amount to the amount of tiles needed to be downloaded + # for both the x and y coordinates. + xTiles:int = xEnd - xStart + yTiles:int = yEnd - yStart + + imgW = 256 * (xTiles + 1) + imgH = 256 * (yTiles + 1) + print(f"{imgW} x {imgH}") + +def convertPaletteToWXPro(filepath:str): + """ Converts the color palette of a radar frame to one acceptable to the i2 """ + img = wandImage(filename = filepath) + + + rainColors = [ + Color('rgb(64,204,85'), # lightest green + Color('rgb(0,153,0'), # med green + Color('rgb(0,102,0)'), # darkest green + Color('rgb(191,204,85)'), # yellow + Color('rgb(191,153,0)'), # orange + Color('rgb(255,51,0)'), # ... + Color('rgb(191,51,0)'), # red + Color('rgb(64,0,0)') # dark red + ] + + mixColors = [ + Color('rgb(253,130,215)'), # light purple + Color('rgb(208,94,176)'), # ... + Color('rgb(190,70,150)'), # ... + Color('rgb(170,50,130)') # dark purple + ] + + snowColors = [ + Color('rgb(150,150,150)'), # dark grey + Color('rgb(180,180,180)'), # light grey + Color('rgb(210,210,210)'), # grey + Color('rgb(230,230,230)') # white + ] + + # Replace rain colors + img.opaque_paint(Color('rgb(99, 235, 99)'), rainColors[0], 7000.0) + img.opaque_paint(Color('rgb(28,158,52)'), rainColors[1], 7000.0) + img.opaque_paint(Color('rgb(0, 63, 0)'), rainColors[2], 7000.0) + + img.opaque_paint(Color('rgb(251,235,2)'), rainColors[3], 7000.0) + img.opaque_paint(Color('rgb(238, 109, 2)'), rainColors[4], 7000.0) + img.opaque_paint(Color('rgb(210,11,6)'), rainColors[5], 7000.0) + img.opaque_paint(Color('rgb(169,5,3)'), rainColors[6], 7000.0) + img.opaque_paint(Color('rgb(128,0,0)'), rainColors[7], 7000.0) + + # Replace mix colors + img.opaque_paint(Color('rgb(255,160,207)'), mixColors[0], 7000.0) + img.opaque_paint(Color('rgb(217,110,163)'), mixColors[1], 7000.0) + img.opaque_paint(Color('rgb(192,77,134)'), mixColors[2], 7000.0) + img.opaque_paint(Color('rgb(174,51,112)'), mixColors[3], 7000.0) + img.opaque_paint(Color('rgb(146,13,79)'), mixColors[3], 7000.0) + + # Replace snow colors + img.opaque_paint(Color('rgb(138,248,255)'), snowColors[0], 7000.0) + img.opaque_paint(Color('rgb(110,203,212)'), snowColors[1], 7000.0) + img.opaque_paint(Color('rgb(82,159,170)'), snowColors[2], 7000.0) + img.opaque_paint(Color('rgb(40,93,106)'), snowColors[3], 7000.0) + img.opaque_paint(Color('rgb(13,49,64)'), snowColors[3]), 7000.0 + + img.compression = 'lzw' + img.save(filename=filepath) + + + +def getTime(timestamp) -> str: + time:datetime = datetime.utcfromtimestamp(timestamp).strftime("%m/%d/%Y %H:%M:%S") + + return str(time) + + +async def makeRadarImages(): + """ Creates proper radar frames for the i2 """ + l.info("Downloading frames for the Regional Radar...") + + combinedCoordinates = [] + + boundaries = getImageBoundaries() + upperRight:LatLong = boundaries.GetUpperRight() + lowerLeft:LatLong = boundaries.GetLowerLeft() + upperLeft:LatLong = boundaries.GetUpperLeft() + lowerRight:LatLong = boundaries.GetLowerRight() + + CalculateBounds(upperRight, lowerLeft, upperLeft, lowerRight) + times = await getValidTimestamps(boundaries) + + # Get rid of invalid radar frames + for i in listdir('./.temp/tiles/output'): + if i.split('.')[0] not in [str(x) for x in times] and i != "Thumbs.db": + l.debug(f"Deleting {i} as it is no longer valid.") + remove("./.temp/tiles/output/" + i) + + # Collect coordinates for the frame tiles + for y in range(yStart, yEnd): + if y <= yEnd: + for x in range(xStart, xEnd): + if x <= xEnd: + combinedCoordinates.append(Point(x,y)) + + # Create urls, paths, and filenames to download tiles for. + urls = [] + paths = [] + filenames = [] + for i in range(0, len(times)): + for c in range(0, len(combinedCoordinates)): + if not exists(f'./.temp/tiles/output/{times[i]}.tiff'): + urls.append(f"https://api.weather.com/v3/TileServer/tile?product=twcRadarMosaic&ts={str(times[i])}&xyz={combinedCoordinates[c].x}:{combinedCoordinates[c].y}:6&apiKey={cfg[twcApiKey]}") + paths.append(f"./.temp/tiles/{times[i]}") + filenames.append(f"{times[i]}_{combinedCoordinates[c].x}_{combinedCoordinates[c].y}.png") + + l.debug(len(urls)) + if len(urls) != 0 and len(urls) >= 6: + with Pool(cpu_count() - 1) as p: + p.starmap(downloadRadarTile, zip(urls, paths, filenames)) + p.close() + p.join() + elif len(urls) < 6 and len(urls) != 0: # We don't need to run more threads than we need to, that's how we get halted. + with Pool(len(urls)) as p: + p.starmap(downloadRadarTile, zip(urls, paths, filenames)) + p.close() + p.join() + elif len(urls) == 0: + l.info("No new radar frames need to be downloaded.") + return + + # Stitch them all together! + + imgsToGenerate = [] + framesToComposite = [] + finished = [] + files = [] + + for t in times: + imgsToGenerate.append(PILImage.new("RGB", (imgW, imgH))) + + # Stitch the frames together + for i in range(0, len(imgsToGenerate)): + if not exists(F"./.temp/tiles/output/{times[i]}.tiff"): + l.debug(f"Generate frame for {times[i]}") + for c in combinedCoordinates: + path = f"./.temp/tiles/{times[i]}/{times[i]}_{c.x}_{c.y}.png" + + xPlacement = (c.x - xStart) * 256 + yPlacement = (c.y - yStart) * 256 + + placeTile = PILImage.open(path) + + imgsToGenerate[i].paste(placeTile, (xPlacement, yPlacement)) + + # Don't render it with an alpha channel + imgsToGenerate[i].save(f"./.temp/tiles/output/{times[i]}.tiff", compression = 'tiff_lzw') + framesToComposite.append(f"./.temp/tiles/output/{times[i]}.tiff") # Store the path so we can composite it using WAND and PIL + + # Remove the tileset as we don't need it anymore! + rmtree(f'./.temp/tiles/{times[i]}') + + # Composite images for the i2 + imgsProcessed = 0 + for img in framesToComposite: + imgsProcessed += 1 + l.debug("Attempting to composite " + img) + l.info(f"Processing radar frame {imgsProcessed} / 36") + + # Crop the radar images something that the i2 will actually take + img_raw = wandImage(filename=img) + img_raw.crop(upperLeftX, upperLeftY, width = int(lowerRightX - upperLeftX), height = int(lowerRightY - upperLeftY)) + img_raw.compression = 'lzw' + img_raw.save(filename=img) + + # Resize using PIL + imgPIL = PILImage.open(img) + imgPIL = imgPIL.resize((boundaries.OGImgW, boundaries.OGImgH), 0) + imgPIL.save(img) + + convertPaletteToWXPro(img) + + finished.append(img) + + commands = [] + # Send them all to the i2! + for i in range(0, len(finished)): + commands.append( '' ) + # print(file + "\n" + command) + + bit.sendFile([finished[i]], [commands[i]], 1, 0) + + l.info("Downloaded and sent Regional Radar frames!") + + +if __name__ == "__main__": + asyncio.run(makeRadarImages()) \ No newline at end of file diff --git a/radar/TWCSatRadProcessor.py b/radar/TWCSatRadProcessor.py new file mode 100644 index 0000000..7e6362f --- /dev/null +++ b/radar/TWCSatRadProcessor.py @@ -0,0 +1,69 @@ +import asyncio +import aiohttp +import time as epochTime +import datetime +import requests +from RadarProcessor import * +from os import mkdir, path +from genericpath import exists + + +upperLeftX,upperLeftY,lowerRightX,lowerRightY = 0,0,0,0 +xStart,xEnd,yStart,yEnd = 0,0,0,0 +imgW = 0 +imgH = 0 + +async def getValidTimestamps(boundaries:ImageBoundaries) -> list: + """Gets all valid UNIX timestamps for the TWCRadarMosaic product """ + print("Getting timestamps for the radar..") + times = [] + + async with aiohttp.ClientSession() as session: + url = "https://api.weather.com/v3/TileServer/series/productSet?apiKey=" + cfg[twcApiKey] + "&filter=twcRadarMosaic" + async with session.get(url) as r: + response = await r.json() + + for t in range(0, len(response['seriesInfo']['twcRadarMosaic']['series'])): + + if (t <= 35): + time = response['seriesInfo']['twcRadarMosaic']['series'][t]['ts'] + + # Don't add frames that aren't at the correct interval + if (time % boundaries.ImageInterval != 0): + print(f"Ignoring {time} -- Not at the correct frame interval.") + continue + + # Don't add frames that are expired + if (time < (datetime.utcnow().timestamp() - epochTime.time()) / 1000 - boundaries.Expiration): + print(f"Ignoring {time} -- Expired.") + continue + + times.append(time) + + return times + +def downloadRadarTile(url, p, fn): + img = requests.get(url, stream=True) + ts = fn.split("_")[0] + download = True + + # Make the path if it doesn't exist + if exists(f"tiles/output/{ts}.tiff"): + print("Not downloading tiles for timestamp " + str(ts) + " since a frame for it already exists." ) + download = False + if not path.exists(p): + mkdir(p) + print(f"Download {ts}") + if exists(f"{p}/{fn}"): + print(f"Not downloading new tiles for {ts} as they already exist.") + download = False + + if (img.status_code == 200 and download): + with open(f'{p}/{fn}', 'wb') as tile: + for data in img: + tile.write(data) + elif (img.status_code != 200): + print("ERROR DOWNLOADING " + p + "\nSTATUS CODE " + str(img.status_code)) + elif (download == False): + pass + diff --git a/radar/__init__.py b/radar/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/recordGenerators/AchesAndPains.py b/recordGenerators/AchesAndPains.py new file mode 100644 index 0000000..0aa9a51 --- /dev/null +++ b/recordGenerators/AchesAndPains.py @@ -0,0 +1,85 @@ +import shutil +import requests +import logging,coloredlogs +from py2Lib import bit +import Util.MachineProductCfg as MPC +import records.LFRecord as LFR +import gzip +from os import remove +import xml.dom.minidom +import aiohttp, aiofiles, asyncio + +l = logging.getLogger(__name__) +coloredlogs.install() + +geocodes = [] +coopIds = [] + +for i in MPC.getPrimaryLocations(): + coopIds.append(LFR.getCoopId(i)) + geocodes.append(LFR.getLatLong(i).replace('/', ',')) + +# Open the config file and make it accessible via "cfg" +import json +with open("conf.json", "r") as file: + cfg = json.load(file) + +apiKey = cfg[twcApiKey] + +async def getData(coopId, geocode): + fetchUrl = f"https://api.weather.com/v2/indices/achePain/daypart/7day?geocode={geocode}&language=en-US&format=xml&apiKey={apiKey}" + data = "" + + async with aiohttp.ClientSession() as s: + async with s.get(fetchUrl) as r: + if r.status != 200: + l.error(f"Failed to write AchesAndPains record -- status code {r.status}") + return + + data = await r.text() + + + newData = data[63:-26] + + i2Doc = f'\n \n {newData}\n {coopId}\n ' + + async with aiofiles.open('./.temp/AchesAndPains.i2m', 'a') as f: + await f.write(i2Doc) + await f.close() + +async def makeRecord(): + loop = asyncio.get_running_loop() + l.info("Writing AchesAndPains record.") + + header = '' + footer = '' + + async with aiofiles.open('./.temp/AchesAndPains.i2m', 'a') as doc: + await doc.write(header) + + for (x, y) in zip(coopIds, geocodes): + await getData(x,y) + + async with aiofiles.open('./.temp/AchesAndPains.i2m', 'a') as end: + await end.write(footer) + + dom = xml.dom.minidom.parse('./.temp/AchesAndPains.i2m') + xmlPretty = dom.toprettyxml(indent= " ") + + async with aiofiles.open('./.temp/AchesAndPains.i2m', 'w') as g: + await g.write(xmlPretty[23:]) + await g.close() + + + # Compresss i2m to gzip + with open ('./.temp/AchesAndPains.i2m', 'rb') as f_in: + with gzip.open('./.temp/AchesAndPains.gz', 'wb') as f_out: + shutil.copyfileobj(f_in, f_out) + + file = "./.temp/AchesAndPains.gz" + command = '' + + bit.sendFile([file], [command], 1, 0) + + remove('./.temp/AchesAndPains.i2m') + remove('./.temp/AchesAndPains.gz') \ No newline at end of file diff --git a/recordGenerators/AirQuality.py b/recordGenerators/AirQuality.py new file mode 100644 index 0000000..1583671 --- /dev/null +++ b/recordGenerators/AirQuality.py @@ -0,0 +1,114 @@ +import requests +import gzip +import os +import shutil +import xml.dom.minidom +import logging,coloredlogs +import aiohttp, aiofiles, asyncio + +l = logging.getLogger(__name__) +coloredlogs.install() + +import sys +sys.path.append("./py2lib") +sys.path.append("./Util") +sys.path.append("./records") +import bit +import MachineProductCfg as MPC +import LFRecord as LFR + +locationIds = [] +zipCodes = [] +epaIds = [] + +for i in MPC.getPrimaryLocations(): + locationIds.append(LFR.getCoopId(i)) + zipCodes.append(LFR.getZip(i)) + epaIds.append(LFR.getEpaId(i)) + +# Open the config file and make it accessible via "cfg" +import json +with open("conf.json", "r") as file: + cfg = json.load(file) + +apiKey = cfg[twcApiKey] + +async def getData(epaId, zipcode): + url = f"https://api.weather.com/v1/location/{zipcode}:4:US/airquality.xml?language=en-US&apiKey={apiKey}" + data = "" + + async with aiohttp.ClientSession() as s: + async with s.get(url) as r: + data = await r.text() + + newData = data[57:-11] + + # Write to i2doc file + i2Doc = f'' + '' + newData + f'{epaId}' + + async with aiofiles.open("./.temp/AirQuality.i2m", 'a') as f: + await f.write(i2Doc) + await f.close() + +async def writeData(): + loop = asyncio.get_running_loop() + useData = False + workingEpaIds = [] + + for i in epaIds: + if i == None: + l.debug(f"No EPA ID found for location -- Skipping.") + else: + l.debug(f"EPA ID found for location! Writing data for Air Quality.") + workingEpaIds.append(i) + useData = True + + + # Check to see if we even have EPA ids, as some areas don't have air quality reports + if (useData): + try: + l.info("Writing an AirQuality record.") + header = '' + footer = "" + + async with aiofiles.open("./.temp/AirQuality.i2m", 'w') as doc: + await doc.write(header) + + for (x, y) in zip(workingEpaIds, zipCodes): + await getData(x, y) + + async with aiofiles.open("./.temp/AirQuality.i2m", 'a') as end: + await end.write(footer) + + dom = xml.dom.minidom.parse("./.temp/AirQuality.i2m") + xmlPretty = dom.toprettyxml(indent = " ") + + async with aiofiles.open("./.temp/AirQuality.i2m", 'w') as g: + await g.write(xmlPretty[23:]) + await g.close() + + files = [] + commands = [] + with open("./.temp/AirQuality.i2m", 'rb') as f_in: + with gzip.open("./.temp/AirQuality.gz", 'wb') as f_out: + shutil.copyfileobj(f_in, f_out) + + gZipFile = "./.temp/AirQuality.gz" + + files.append(gZipFile) + comand = commands.append('') + numFiles = len(files) + + bit.sendFile(files, commands, numFiles, 0) + + os.remove("./.temp/AirQuality.i2m") + os.remove("./.temp/AirQuality.gz") + except Exception as e: + l.error("DO NOT REPORT THE ERROR BELOW") + l.error("Failed to write an AirQuality record.") + os.remove('./.temp/AirQuality.i2m') + else: + l.info("Not writing an AirQuality record due to a lack of working EPA ids.") + + + diff --git a/recordGenerators/AirportDelays.py b/recordGenerators/AirportDelays.py new file mode 100644 index 0000000..b8f952c --- /dev/null +++ b/recordGenerators/AirportDelays.py @@ -0,0 +1,107 @@ +import requests +import gzip +import os +import shutil +import xml.dom.minidom +import logging,coloredlogs +import aiohttp, aiofiles, asyncio + +import sys +sys.path.append("./py2lib") +sys.path.append("./Util") +sys.path.append("./records") +import bit +import MachineProductCfg as MPC +import LFRecord as LFR + +l = logging.getLogger(__name__) +coloredlogs.install() + +locationIds = [] +zipCodes = [] +airports = [] + +for i in MPC.getPrimaryLocations(): + locationIds.append(LFR.getCoopId(i)) + zipCodes.append(LFR.getZip(i)) + +airports = MPC.getAirportCodes() +l.debug(airports) + +# Open the config file and make it accessible via "cfg" +import json +with open("conf.json", "r") as file: + cfg = json.load(file) + +apiKey = cfg[twcApiKey] + +async def getData(airport): + url = f"https://api.weather.com/v1/airportcode/{airport}/airport/delays.xml?language=en-US&apiKey={apiKey}" + data = "" + + async with aiohttp.ClientSession() as s: + async with s.get(url) as r: + data = await r.text() + + newData = data[48:-11].replace('¿', '-') + + # Write to i2doc file + i2Doc = f'' + '' + newData + f'{airport}' + + async with aiofiles.open("./.temp/AirportDelays.i2m", 'a') as f: + await f.write(i2Doc) + await f.close() + +async def writeData(): + loop = asyncio.get_running_loop() + useData = False + airportsWithDelays = [] + + for x in airports: + async with aiohttp.ClientSession() as s: + async with s.get(f"https://api.weather.com/v1/airportcode/{x}/airport/delays.xml?language=en-US&apiKey={apiKey}") as r: + if r.status != 200: + l.debug(f"No delay for {x} found, skipping..") + else: + airportsWithDelays.append(x) + useData = True + + if (useData): + l.info("Writing an AirportDelays record.") + header = '' + footer = "" + + async with aiofiles.open("./.temp/AirportDelays.i2m", 'w') as doc: + await doc.write(header) + + for x in airportsWithDelays: + await getData(x) + + async with aiofiles.open("./.temp/AirportDelays.i2m", 'a') as end: + await end.write(footer) + + dom = xml.dom.minidom.parse("./.temp/AirportDelays.i2m") + prettyXml = dom.toprettyxml(indent=" ") + + async with aiofiles.open("./.temp/AirportDelays.i2m", 'w') as g: + await g.write(prettyXml) + await g.close() + + files = [] + commands = [] + with open("./.temp/AirportDelays.i2m", 'rb') as f_in: + with gzip.open("./.temp/AirportDelays.gz", 'wb') as f_out: + shutil.copyfileobj(f_in, f_out) + + gZipFile = "./.temp/AirportDelays.gz" + + files.append(gZipFile) + comand = commands.append('') + numFiles = len(files) + + bit.sendFile(files, commands, numFiles, 0) + + os.remove("./.temp/AirportDelays.i2m") + os.remove("./.temp/AirportDelays.gz") + else: + l.info("No airport delays found.") diff --git a/recordGenerators/Alerts.py b/recordGenerators/Alerts.py new file mode 100644 index 0000000..f7cfec3 --- /dev/null +++ b/recordGenerators/Alerts.py @@ -0,0 +1,369 @@ +import requests +import json +import os +from datetime import datetime,timedelta +from Util.MachineProductCfg import getAlertZones +import time +import pytz +import xml.dom.minidom +import shutil +import gzip +import logging,coloredlogs +import aiohttp, aiofiles, asyncio + + +import sys +sys.path.append("./py2lib") +import bit + +l = logging.getLogger(__name__) +coloredlogs.install() + +#Zones/Counties to fetch alerts for +alertLocations = getAlertZones() +# Open the config file and make it accessible via "cfg" +import json +with open("conf.json", "r") as file: + cfg = json.load(file) + +headlineApiKey = cfg[twcApiKey] +detailsApiKey = cfg[twcApiKey] + +k = 0 +async def getAlerts(location): + global k + fetchUrl = 'https://api.weather.com/v3/alerts/headlines?areaId=' + location + ':US&format=json&language=en-US&apiKey=' + headlineApiKey + # response = requests.get(fetchUrl) + + # theCode = response.status_code + + theCode = 0 + + async with aiohttp.ClientSession() as s: + async with s.get(fetchUrl) as r: + theCode = r.status + + #Set the actions based on response code + if theCode == 204: + l.info('No alerts for area ' + location + '.\n') + return + elif theCode == 403: + l.critical("Uh oh! Your API key may not be authorized for alerts. Tsk Tsk. Maybe you shouldn't pirate IBM data :)\n") + return + elif theCode == 401: + l.critical("Uh oh! This request requires authentication. Maybe you shouldn't try to access resources for IBM employee's only :)\n") + return + elif theCode == 404: + l.error("Uh oh! The requested resource cannot be found. This means either the URL is wrong or IBM is having technical difficulties :(\n Or.... They deleted the API :O\n") + return + elif theCode == 405: + l.error("Uh oh! Got a 405! This means that somehow.... someway..... this script made an invalid request. So sad..... So terrible..... :(\n") + return + elif theCode == 406: + l.critical("Uh oh! Got a 406! This means that IBM doesn't like us. :(\n") + return + elif theCode == 408: + l.error("Uh oh! We were too slow in providing IBM our alert request. Although I prefer to say we were Slowly Capable! :)\n") + return + elif theCode == 500: + l.error("Uh oh! Seems IBM's on call IT Tech spilled coffee on the server! Looks like no alerts for a while. Please check back later :)\n") + return + elif theCode == 502 or theCode == 503 or theCode == 504: + l.error("Uh oh! This is why you don't have interns messing with the server configuration. Please stand by while IBM's on call IT Tech resolves the issue :)\n") + return + elif theCode == 200: + pass + + # Map headline variables + l.debug('Found Alert for ' + location + '\n') + dataH = await r.json() + alertsRoot = dataH['alerts'] + + for x in alertsRoot: + detailKey = x['detailKey'] + #Lets get map our detail variables. + detailsUrl = 'https://api.weather.com/v3/alerts/detail?alertId=' + detailKey + '&format=json&language=en-US&apiKey=' + detailsApiKey + detailsResponse = requests.get(detailsUrl) + dataD = detailsResponse.json() + detailsRoot = dataD['alertDetail'] + theDetailsText = detailsRoot['texts'] + detailsText = theDetailsText[0] + descriptionRaw = detailsText['description'] + language = detailsText['languageCode'] + Identifier = location + '_' + x['phenomena'] + '_' + x['significance'] + '_' + str(x['processTimeUTC']) + + #Is this for a NWS Zone or County? + last4 = location[2:] + locationType = None + if 'C' in last4: + locationType = 'C' + elif 'Z' in last4: + locationType = 'Z' + + #theIdent = str(Identifier) + try: + async with aiofiles.open('./.temp/alertmanifest.txt', 'r' ) as checkFile: + c = await checkFile.read() + + if c.find(Identifier) != -1: + l.debug(f"{Identifier} was sent already, skipping..") + return + except FileNotFoundError: + l.warning("alert manifest does not exist (yet)") + + k += 1 #We have an alert to send! + + #Lets Map Our Vocal Codes! + vocalCheck = x['phenomena'] + '_' + x['significance'] + vocalCode = None + + if vocalCheck == 'HU_W': + vocalCode = 'HE001' + elif vocalCheck == 'TY_W': + vocalCode = 'HE002' + elif vocalCheck == 'HI_W': + vocalCode = 'HE003' + elif vocalCheck == 'TO_A': + vocalCode = 'HE004' + elif vocalCheck == 'SV_A': + vocalCode = 'HE005' + elif vocalCheck == 'HU_A': + vocalCode = 'HE006' + elif vocalCheck == 'TY_A': + vocalCode = 'HE007' + elif vocalCheck == 'TR_W': + vocalCode = 'HE008' + elif vocalCheck == 'TR_A': + vocalCode = 'HE009' + elif vocalCheck == 'TI_W': + vocalCode = 'HE010' + elif vocalCheck == 'HI_A': + vocalCode = 'HE011' + elif vocalCheck == 'TI_A': + vocalCode = 'HE012' + elif vocalCheck == 'BZ_W': + vocalCode = 'HE013' + elif vocalCheck == 'IS_W': + vocalCode = 'HE014' + elif vocalCheck == 'WS_W': + vocalCode = 'HE015' + elif vocalCheck == 'HW_W': + vocalCode = 'HE016' + elif vocalCheck == 'LE_W': + vocalCode = 'HE017' + elif vocalCheck == 'ZR_Y': + vocalCode = 'HE018' + elif vocalCheck == 'CF_W': + vocalCode = 'HE019' + elif vocalCheck == 'LS_W': + vocalCode = 'HE020' + elif vocalCheck == 'WW_Y': + vocalCode = 'HE021' + elif vocalCheck == 'LB_Y': + vocalCode = 'HE022' + elif vocalCheck == 'LE_Y': + vocalCode = 'HE023' + elif vocalCheck == 'BZ_A': + vocalCode = 'HE024' + elif vocalCheck == 'WS_A': + vocalCode = 'HE025' + elif vocalCheck == 'FF_A': + vocalCode = 'HE026' + elif vocalCheck == 'FA_A': + vocalCode = 'HE027' + elif vocalCheck == 'FA_Y': + vocalCode = 'HE028' + elif vocalCheck == 'HW_A': + vocalCode = 'HE029' + elif vocalCheck == 'LE_A': + vocalCode = 'HE030' + elif vocalCheck == 'SU_W': + vocalCode = 'HE031' + elif vocalCheck == 'LS_Y': + vocalCode = 'HE032' + elif vocalCheck == 'CF_A': + vocalCode = 'HE033' + elif vocalCheck == 'ZF_Y': + vocalCode = 'HE034' + elif vocalCheck == 'FG_Y': + vocalCode = 'HE035' + elif vocalCheck == 'SM_Y': + vocalCode = 'HE036' + elif vocalCheck == 'EC_W': + vocalCode = 'HE037' + elif vocalCheck == 'EH_W': + vocalCode = 'HE038' + elif vocalCheck == 'HZ_W': + vocalCode = 'HE039' + elif vocalCheck == 'FZ_W': + vocalCode = 'HE040' + elif vocalCheck == 'HT_Y': + vocalCode = 'HE041' + elif vocalCheck == 'WC_Y': + vocalCode = 'HE042' + elif vocalCheck == 'FR_Y': + vocalCode = 'HE043' + elif vocalCheck == 'EC_A': + vocalCode = 'HE044' + elif vocalCheck == 'EH_A': + vocalCode = 'HE045' + elif vocalCheck == 'HZ_A': + vocalCode = 'HE046' + elif vocalCheck == 'DS_W': + vocalCode = 'HE047' + elif vocalCheck == 'WI_Y': + vocalCode = 'HE048' + elif vocalCheck == 'SU_Y': + vocalCode = 'HE049' + elif vocalCheck == 'AS_Y': + vocalCode = 'HE050' + elif vocalCheck == 'WC_W': + vocalCode = 'HE051' + elif vocalCheck == 'FZ_A': + vocalCode = 'HE052' + elif vocalCheck == 'WC_A': + vocalCode = 'HE053' + elif vocalCheck == 'AF_W': + vocalCode = 'HE054' + elif vocalCheck == 'AF_Y': + vocalCode = 'HE055' + elif vocalCheck == 'DU_Y': + vocalCode = 'HE056' + elif vocalCheck == 'LW_Y': + vocalCode = 'HE057' + elif vocalCheck == 'LS_A': + vocalCode = 'HE058' + elif vocalCheck == 'HF_W': + vocalCode = 'HE059' + elif vocalCheck == 'SR_W': + vocalCode = 'HE060' + elif vocalCheck == 'GL_W': + vocalCode = 'HE061' + elif vocalCheck == 'HF_A': + vocalCode = 'HE062' + elif vocalCheck == 'UP_W': + vocalCode = 'HE063' + elif vocalCheck == 'SE_W': + vocalCode = 'HE064' + elif vocalCheck == 'SR_A': + vocalCode = 'HE065' + elif vocalCheck == 'GL_A': + vocalCode = 'HE066' + elif vocalCheck == 'MF_Y': + vocalCode = 'HE067' + elif vocalCheck == 'MS_Y': + vocalCode = 'HE068' + elif vocalCheck == 'SC_Y': + vocalCode = 'HE069' + elif vocalCheck == 'UP_Y': + vocalCode = 'HE073' + elif vocalCheck == 'LO_Y': + vocalCode = 'HE074' + elif vocalCheck == 'AF_V': + vocalCode = 'HE075' + elif vocalCheck == 'UP_A': + vocalCode = 'HE076' + elif vocalCheck == 'TAV_W': + vocalCode = 'HE077' + elif vocalCheck == 'TAV_A': + vocalCode = 'HE078' + elif vocalCheck == 'TO_W': + vocalCode = 'HE110' + else: + vocalCode = '' + + #Do some date/time conversions + EndTimeUTCEpoch = x['expireTimeUTC'] + EndTimeUTC = datetime.utcfromtimestamp(EndTimeUTCEpoch).strftime('%Y%m%d%H%M') + #EndTimeUTC = EndTimeUTCString.astimezone(pytz.UTC) + + expireTimeEpoch = x['expireTimeUTC'] + expireTimeUTC = datetime.utcfromtimestamp(expireTimeEpoch).strftime('%Y%m%d%H%M') + + #V3 Alert API doesn't give us issueTime in UTC. So we have to convert ourselves. Ughhh!! + iTLDTS = x['issueTimeLocal'] + iTLDTO = datetime.strptime(iTLDTS, '%Y-%m-%dT%H:%M:%S%z') + issueTimeToUTC = iTLDTO.astimezone(pytz.UTC) + issueTimeUtc = issueTimeToUTC.strftime('%Y%m%d%H%M') + + processTimeEpoch = x['processTimeUTC'] + processTime = datetime.fromtimestamp(processTimeEpoch).strftime('%Y%m%d%H%M%S') + + #What is the action of this alert? + Action = None + if x['messageType'] == 'Update': + Action = 'CON' + elif x['messageType'] == 'New': + Action = 'NEW' + + #Fix description to replace new lines with space and add XML escape Chars. when needed + + description = ' '.join(descriptionRaw.splitlines()) + description = description.replace('&', '&') + description = description.replace('<', '<') + description = description.replace('>', '>') + description = description.replace('-', '') + description = description.replace(':', '') + + #Is this alert urgent? + urgency ='piss' + if vocalCheck == 'TO_W' or vocalCheck == 'SV_W' or vocalCheck == 'FF_W': + urgency = 'BEUrgent' + else: + urgency = 'BERecord' + + alertMsg = 'NOT_USED' + x['productIdentifier'] + 'NOT_USED' + Action + '' + x['officeCode'] + '' + x['phenomena'] + '' + x['significance'] + '' + x['eventTrackingNumber'] + '' + x['eventDescription'] + 'NOT_USED' + EndTimeUTC + '' + str(x['severityCode']) + 'NOT_USED' + expireTimeUTC + '' + location + '' + x['adminDistrictCode'] + 'NOT_USEDNOT_USEDNOT_USED' + x['identifier'] + '' + processTime + '' + issueTimeUtc + '' + x['headlineText'] + '' + vocalCode + 'NOT_USED' + description + 'NOT_USED' + location + '_' + x['phenomena'] + '_' + x['significance'] + '_' + x['eventTrackingNumber'] + '_' + x['officeCode'] + '' + + #Append BERecord + async with aiofiles.open('./.temp/BERecord.xml', "a") as b: + await b.write(alertMsg) + await b.close() + + #Add our alert to the manifest so we don't keep sending in the same alert every 60 seconds unless an update is issued. + async with aiofiles.open('./.temp/alertmanifest.txt', "a") as c: + await c.write('\n' + location + '_' + x['phenomena'] + '_' + x['significance'] + '_' + str(x['processTimeUTC'])) + await c.close() + + + +async def makeRecord(): + loop = asyncio.get_running_loop() + global k + + async with aiofiles.open("./.temp/BERecord.xml", 'w') as BERecord: + await BERecord.write('') + await BERecord.close() + + for z in alertLocations: + await getAlerts(z) + + async with aiofiles.open('./.temp/BERecord.xml', 'a') as BERecord: + await BERecord.write("") + await BERecord.close() + + dom = xml.dom.minidom.parse("./.temp/BERecord.xml") + pretty_xml_as_string = dom.toprettyxml(indent = " ") + + async with aiofiles.open("./.temp/BERecord.i2m", 'w') as h: + await h.write(pretty_xml_as_string[23:]) + await h.close() + + # The BERecord XML doesn't need to be written if there's no alerts. + if k > 0: + l.info("Sending alert(s) to the IntelliStar 2!") + with open("./.temp/BERecord.i2m", 'rb') as f_in: + with gzip.open("./.temp/BERecord.gz", 'wb') as f_out: + shutil.copyfileobj(f_in, f_out) + + files = [] + commands = [] + gZipFile = "./.temp/BERecord.gz" + files.append(gZipFile) + command = commands.append('') + bit.sendFile(files, commands, 1, 0) + os.remove(gZipFile) + k = 0 + + os.remove("./.temp/BERecord.xml") + os.remove("./.temp/BERecord.i2m") + diff --git a/recordGenerators/Breathing.py b/recordGenerators/Breathing.py new file mode 100644 index 0000000..96431b2 --- /dev/null +++ b/recordGenerators/Breathing.py @@ -0,0 +1,97 @@ +import requests +import sys +import gzip +import uuid +import os +import shutil +import xml.dom.minidom +import logging,coloredlogs +import aiohttp, aiofiles, asyncio + +sys.path.append("./py2lib") +sys.path.append("./Util") +sys.path.append("./records") +import bit +import MachineProductCfg as MPC +import LFRecord as LFR + +l = logging.getLogger(__name__) +coloredlogs.install() + +coopIds = [] +geocodes = [] + + +# Auto-grab the tecci and zip codes +for i in MPC.getPrimaryLocations(): + coopIds.append(LFR.getCoopId(i)) + geocodes.append(LFR.getLatLong(i).replace('/', ',')) + +l.debug(coopIds, geocodes) + +# Open the config file and make it accessible via "cfg" +import json +with open("conf.json", "r") as file: + cfg = json.load(file) + +apiKey = cfg[twcApiKey] + +async def getData(coopId, geocode): + fetchUrl = f"https://api.weather.com/v2/indices/breathing/daypart/7day?geocode={geocode}&language=en-US&format=xml&apiKey={apiKey}" + data = "" + + #Fetch data + async with aiohttp.ClientSession() as s: + async with s.get(fetchUrl) as r: + data = await r.text() + + newData = data[63:-26] + + l.debug('Gathering data for location id ' + coopId) + #Write to .i2m file + i2Doc = '' + '' + newData + '' + str(coopId) + '' + + async with aiofiles.open("./.temp/Breathing.i2m", "a") as f: + await f.write(i2Doc) + await f.close() + + +async def makeDataFile(): + loop = asyncio.get_running_loop() + l.info("Writing a Breathing forecast record.") + header = '' + footer = '' + + async with aiofiles.open("./.temp/Breathing.i2m", 'w') as doc: + await doc.write(header) + + for x, y in zip(coopIds, geocodes): + await getData(x, y) + + async with aiofiles.open("./.temp/Breathing.i2m", 'a') as end: + await end.write(footer) + + + dom = xml.dom.minidom.parse("./.temp/Breathing.i2m") + pretty_xml_as_string = dom.toprettyxml(indent = " ") + + async with aiofiles.open("./.temp/Breathing.i2m", "w") as g: + await g.write(pretty_xml_as_string[23:]) + await g.close() + + files = [] + commands = [] + with open("./.temp/Breathing.i2m", 'rb') as f_in: + with gzip.open("./.temp/Breathing.gz", 'wb') as f_out: + shutil.copyfileobj(f_in, f_out) + + gZipFile = "./.temp/Breathing.gz" + + files.append(gZipFile) + command = commands.append('') + numFiles = len(files) + + bit.sendFile(files, commands, numFiles, 0) + + os.remove("./.temp/Breathing.i2m") + os.remove("./.temp/Breathing.gz") \ No newline at end of file diff --git a/recordGenerators/CurrentObservations.py b/recordGenerators/CurrentObservations.py new file mode 100644 index 0000000..21cc015 --- /dev/null +++ b/recordGenerators/CurrentObservations.py @@ -0,0 +1,103 @@ +import requests +import py2Lib.bit as bit +import gzip +import uuid +import os +import shutil +import xml.dom.minidom +import logging,coloredlogs +import aiohttp, aiofiles, asyncio + +import sys +sys.path.append("./py2lib") +sys.path.append("./Util") +sys.path.append("./records") +import bit +import MachineProductCfg as MPC +import LFRecord as LFR + +l = logging.getLogger(__name__) +coloredlogs.install() + +tecciId = [] +zipCodes = [] + +# Auto-grab the tecci and zip codes +for i in MPC.getPrimaryLocations(): + tecciId.append("T" + LFR.getCoopId(i)) + zipCodes.append(LFR.getZip(i)) + +# Obtain metro map city TECCI and zips: +for i in MPC.getMetroCities(): + tecciId.append("T" + LFR.getCoopId(i)) + zipCodes.append(LFR.getZip(i)) + + +# Open the config file and make it accessible via "cfg" +import json +with open("conf.json", "r") as file: + cfg = json.load(file) + +apiKey = cfg[twcApiKey] + +async def getData(tecci, zipCode): + l.debug('Gathering data for location id ' + tecci) + fetchUrl = 'https://api.weather.com/v1/location/' + zipCode + ':4:US/observations/current.xml?language=en-US&units=e&apiKey=' + apiKey + data = "" + + async with aiohttp.ClientSession() as s: + async with s.get(fetchUrl) as r: + data = await r.text() + + newData = data[67:-30] + + #Write to .i2m file + i2Doc = '' + '' + newData + '' + str(tecci) + '' + async with aiofiles.open("./.temp/CurrentObservations.i2m", 'a') as f: + await f.write(i2Doc) + await f.close() + + +async def makeDataFile(): + loop = asyncio.get_running_loop() + l.info("Writing a CurrentObservations record.") + header = '' + footer = '' + + async with aiofiles.open("./.temp/CurrentObservations.i2m", 'w') as doc: + await doc.write(header) + + for x, y in zip(tecciId, zipCodes): + await getData(x, y) + + async with aiofiles.open("./.temp/CurrentObservations.i2m", 'a') as end: + await end.write(footer) + + dom = xml.dom.minidom.parse("./.temp/CurrentObservations.i2m") + pretty_xml_as_string = dom.toprettyxml(indent = " ") + + async with aiofiles.open("./.temp/CurrentObservations.i2m", "w") as g: + await g.write(pretty_xml_as_string[23:]) + await g.close() + + files = [] + commands = [] + + """ + TODO: This can be ran in a seperate thread using loop.run_in_executor() according to the python discord. + ! This should probably be implemented ASAP. + """ + with open("./.temp/CurrentObservations.i2m", 'rb') as f_in: + with gzip.open("./.temp/CurrentObservations.gz", 'wb') as f_out: + shutil.copyfileobj(f_in, f_out) + + gZipFile = "./.temp/CurrentObservations.gz" + + files.append(gZipFile) + command = commands.append('') + numFiles = len(files) + + bit.sendFile(files, commands, numFiles, 0) + + os.remove("./.temp/CurrentObservations.i2m") + os.remove("./.temp/CurrentObservations.gz") diff --git a/recordGenerators/DailyForecast.py b/recordGenerators/DailyForecast.py new file mode 100644 index 0000000..45e659d --- /dev/null +++ b/recordGenerators/DailyForecast.py @@ -0,0 +1,98 @@ +import requests +import sys +import gzip +import uuid +import os +import shutil +import xml.dom.minidom +import logging,coloredlogs +import aiohttp, aiofiles, asyncio + +sys.path.append("./py2lib") +sys.path.append("./Util") +sys.path.append("./records") +import bit +import MachineProductCfg as MPC +import LFRecord as LFR + +l = logging.getLogger(__name__) +coloredlogs.install() + +tecciId = [] +zipCodes = [] + +# Auto-grab the tecci and zip codes +for i in MPC.getPrimaryLocations(): + tecciId.append(LFR.getCoopId(i)) + zipCodes.append(LFR.getZip(i)) + +# Grab metro map city tecci and zip codes +for i in MPC.getMetroCities(): + tecciId.append(LFR.getCoopId(i)) + zipCodes.append(LFR.getZip(i)) + +# Open the config file and make it accessible via "cfg" +import json +with open("conf.json", "r") as file: + cfg = json.load(file) + +apiKey = cfg[twcApiKey] + +async def getData(tecci, zipCode): + fetchUrl = 'https://api.weather.com/v1/location/' + zipCode + ':4:US/forecast/daily/7day.xml?language=en-US&units=e&apiKey=' + apiKey + data = "" + + async with aiohttp.ClientSession() as s: + async with s.get(fetchUrl) as r: + data = await r.text() + + newData = data[61:-24] + + l.debug('Gathering data for location id ' + tecci) + #Write to .i2m file + i2Doc = '' + '' + newData + '' + str(tecci) + '' + + async with aiofiles.open('./.temp/DailyForecast.i2m', 'a') as f: + await f.write(i2Doc) + await f.close() + + +async def makeDataFile(): + loop = asyncio.get_running_loop() + l.info("Writing a DailyForecast record.") + header = '' + footer = '' + + async with aiofiles.open("./.temp/DailyForecast.i2m", 'w') as doc: + await doc.write(header) + + for x, y in zip(tecciId, zipCodes): + await getData(x, y) + + async with aiofiles.open("./.temp/DailyForecast.i2m", 'a') as end: + await end.write(footer) + + + dom = xml.dom.minidom.parse("./.temp/DailyForecast.i2m") + pretty_xml_as_string = dom.toprettyxml(indent = " ") + + async with aiofiles.open("./.temp/DailyForecast.i2m", "w") as g: + await g.write(pretty_xml_as_string[23:]) + await g.close() + + files = [] + commands = [] + with open("./.temp/DailyForecast.i2m", 'rb') as f_in: + with gzip.open("./.temp/DailyForecast.gz", 'wb') as f_out: + shutil.copyfileobj(f_in, f_out) + + gZipFile = "./.temp/DailyForecast.gz" + + files.append(gZipFile) + command = commands.append('') + numFiles = len(files) + + bit.sendFile(files, commands, numFiles, 0) + + os.remove("./.temp/DailyForecast.i2m") + os.remove("./.temp/DailyForecast.gz") \ No newline at end of file diff --git a/recordGenerators/HeatingAndCooling.py b/recordGenerators/HeatingAndCooling.py new file mode 100644 index 0000000..98176f4 --- /dev/null +++ b/recordGenerators/HeatingAndCooling.py @@ -0,0 +1,85 @@ +import shutil +import requests +import logging,coloredlogs +from py2Lib import bit +import Util.MachineProductCfg as MPC +import records.LFRecord as LFR +import gzip +from os import remove +import xml.dom.minidom +import aiohttp, aiofiles, asyncio + +l = logging.getLogger(__name__) +coloredlogs.install() + +geocodes = [] +coopIds = [] + +for i in MPC.getPrimaryLocations(): + coopIds.append(LFR.getCoopId(i)) + geocodes.append(LFR.getLatLong(i).replace('/', ',')) + +# Open the config file and make it accessible via "cfg" +import json +with open("conf.json", "r") as file: + cfg = json.load(file) + +apiKey = cfg[twcApiKey] + +async def getData(coopId, geocode): + fetchUrl = f"https://api.weather.com/v2/indices/heatCool/daypart/7day?geocode={geocode}&language=en-US&format=xml&apiKey={apiKey}" + data = "" + + async with aiohttp.ClientSession() as s: + async with s.get(fetchUrl) as r: + if r.status != 200: + l.error(f"Failed to write HeatingAndCooling record -- Status code {r.status}") + return + + data = await r.text() + + # data = res.text + newData = data[63:-26] + + i2Doc = f'\n \n {newData}\n {coopId}\n ' + + async with aiofiles.open('./.temp/HeatingAndCooling.i2m', 'a') as f: + await f.write(i2Doc) + await f.close() + +async def makeRecord(): + loop = asyncio.get_running_loop() + l.info("Writing HeatingAndCooling record.") + + header = '' + footer = '' + + async with aiofiles.open('./.temp/HeatingAndCooling.i2m', 'a') as doc: + await doc.write(header) + + for (x, y) in zip(coopIds, geocodes): + await getData(x,y) + + async with aiofiles.open('./.temp/HeatingAndCooling.i2m', 'a') as end: + await end.write(footer) + + dom = xml.dom.minidom.parse('./.temp/HeatingAndCooling.i2m') + xmlPretty = dom.toprettyxml(indent= " ") + + async with aiofiles.open('./.temp/HeatingAndCooling.i2m', 'w') as g: + await g.write(xmlPretty[23:]) + await g.close() + + + # Compresss i2m to gzip + with open ('./.temp/HeatingAndCooling.i2m', 'rb') as f_in: + with gzip.open('./.temp/HeatingAndCooling.gz', 'wb') as f_out: + shutil.copyfileobj(f_in, f_out) + + file = "./.temp/HeatingAndCooling.gz" + command = '' + + bit.sendFile([file], [command], 1, 0) + + remove('./.temp/HeatingAndCooling.i2m') + remove('./.temp/HeatingAndCooling.gz') \ No newline at end of file diff --git a/recordGenerators/HourlyForecast.py b/recordGenerators/HourlyForecast.py new file mode 100644 index 0000000..b8258d8 --- /dev/null +++ b/recordGenerators/HourlyForecast.py @@ -0,0 +1,100 @@ +import requests +import gzip +import uuid +import os +import shutil +import xml.dom.minidom +import logging,coloredlogs +import aiohttp, aiofiles, asyncio, asyncio + +import sys +sys.path.append("./py2lib") +sys.path.append("./Util") +sys.path.append("./records") +import bit +import MachineProductCfg as MPC +import LFRecord as LFR + +l = logging.getLogger(__name__) +coloredlogs.install() + +tecciId = [] +zipCodes = [] + +# Auto-grab the tecci and zip codes +for i in MPC.getPrimaryLocations(): + tecciId.append(LFR.getCoopId(i)) + zipCodes.append(LFR.getZip(i)) + +for i in MPC.getMetroCities(): + tecciId.append(LFR.getCoopId(i)) + zipCodes.append(LFR.getZip(i)) + + +# Open the config file and make it accessible via "cfg" +import json +with open("conf.json", "r") as file: + cfg = json.load(file) + +apiKey = cfg[twcApiKey] + +async def getData(tecci, zipCode): + l.debug('Gathering data for location id ' + tecci) + fetchUrl = 'https://api.weather.com/v1/location/' + zipCode + ':4:US/forecast/hourly/360hour.xml?language=en-US&units=e&apiKey=' + apiKey + data = "" + + #Fetch data + async with aiohttp.ClientSession() as s: + async with s.get(fetchUrl) as r: + data = await r.text() + + newData = data[48:-11] + + #Write to .i2m file + i2Doc = '' + '' + newData + '' + str(tecci) + '' + + async with aiofiles.open('./.temp/HourlyForecast.i2m', 'a') as f: + await f.write(i2Doc) + await f.close() + + +async def makeDataFile(): + loop = asyncio.get_running_loop() + l.info("Writing an HourlyForecast record.") + header = '' + footer = '' + + async with aiofiles.open("./.temp/HourlyForecast.i2m", 'w') as doc: + await doc.write(header) + + + for x, y in zip(tecciId, zipCodes): + await getData(x, y) + + async with aiofiles.open("./.temp/HourlyForecast.i2m", 'a') as end: + await end.write(footer) + + + dom = xml.dom.minidom.parse("./.temp/HourlyForecast.i2m") + pretty_xml_as_string = dom.toprettyxml(indent = " ") + + async with aiofiles.open("./.temp/HourlyForecast.i2m", "w") as g: + await g.write(pretty_xml_as_string[23:]) + await g.close() + + files = [] + commands = [] + with open("./.temp/HourlyForecast.i2m", 'rb') as f_in: + with gzip.open("./.temp/HourlyForecast.gz", 'wb') as f_out: + shutil.copyfileobj(f_in, f_out) + + gZipFile = "./.temp/HourlyForecast.gz" + + files.append(gZipFile) + command = commands.append('') + numFiles = len(files) + + bit.sendFile(files, commands, numFiles, 0) + + os.remove("./.temp/HourlyForecast.i2m") + os.remove("./.temp/HourlyForecast.gz") \ No newline at end of file diff --git a/recordGenerators/MosquitoActivity.py b/recordGenerators/MosquitoActivity.py new file mode 100644 index 0000000..87dafa7 --- /dev/null +++ b/recordGenerators/MosquitoActivity.py @@ -0,0 +1,85 @@ +import shutil +import requests +import logging,coloredlogs +from py2Lib import bit +import Util.MachineProductCfg as MPC +import records.LFRecord as LFR +import gzip +from os import remove +import xml.dom.minidom +import aiohttp, aiofiles, asyncio + +l = logging.getLogger(__name__) +coloredlogs.install() + +geocodes = [] +coopIds = [] + +for i in MPC.getPrimaryLocations(): + coopIds.append(LFR.getCoopId(i)) + geocodes.append(LFR.getLatLong(i).replace('/', ',')) + +# Open the config file and make it accessible via "cfg" +import json +with open("conf.json", "r") as file: + cfg = json.load(file) + +apiKey = cfg[twcApiKey] + +async def getData(coopId, geocode): + fetchUrl = f"https://api.weather.com/v2/indices/mosquito/daily/7day?geocode={geocode}&language=en-US&format=xml&apiKey={apiKey}" + data = "" + + async with aiohttp.ClientSession() as s: + async with s.get(fetchUrl) as r: + if r.status != 200: + l.error(f"Failed to write MosquitoActivity record -- status code {r.status}") + return + + data = await r.text() + + + newData = data[63:-26] + + i2Doc = f'\n \n {newData}\n {coopId}\n ' + + async with aiofiles.open('./.temp/MosquitoActivity.i2m', 'a') as f: + await f.write(i2Doc) + await f.close() + +async def makeRecord(): + loop = asyncio.get_running_loop() + l.info("Writing MosquitoActivity record.") + + header = '' + footer = '' + + async with aiofiles.open('./.temp/MosquitoActivity.i2m', 'a') as doc: + await doc.write(header) + + for (x, y) in zip(coopIds, geocodes): + await getData(x,y) + + async with aiofiles.open('./.temp/MosquitoActivity.i2m', 'a') as end: + await end.write(footer) + + dom = xml.dom.minidom.parse('./.temp/MosquitoActivity.i2m') + xmlPretty = dom.toprettyxml(indent= " ") + + async with aiofiles.open('./.temp/MosquitoActivity.i2m', 'w') as g: + await g.write(xmlPretty[23:]) + await g.close() + + + # Compresss i2m to gzip + with open ('./.temp/MosquitoActivity.i2m', 'rb') as f_in: + with gzip.open('./.temp/MosquitoActivity.gz', 'wb') as f_out: + shutil.copyfileobj(f_in, f_out) + + file = "./.temp/MosquitoActivity.gz" + command = '' + + bit.sendFile([file], [command], 1, 0) + + remove('./.temp/MosquitoActivity.i2m') + remove('./.temp/MosquitoActivity.gz') \ No newline at end of file diff --git a/recordGenerators/PollenForecast.py b/recordGenerators/PollenForecast.py new file mode 100644 index 0000000..e04b535 --- /dev/null +++ b/recordGenerators/PollenForecast.py @@ -0,0 +1,97 @@ +import requests +import sys +import gzip +import uuid +import os +import shutil +import xml.dom.minidom +import logging, coloredlogs +import aiohttp, aiofiles, asyncio + +sys.path.append("./py2lib") +sys.path.append("./Util") +sys.path.append("./records") +import bit +import MachineProductCfg as MPC +import LFRecord as LFR + + +l = logging.getLogger(__name__) +coloredlogs.install() + +pollenIds = [] +geocodes = [] + + +# Auto-grab the tecci and zip codes +for i in MPC.getPrimaryLocations(): + pollenIds.append(LFR.getPollenInfo(i)) + geocodes.append(LFR.getLatLong(i).replace('/', ',')) + +l.debug(pollenIds, geocodes) + +# Open the config file and make it accessible via "cfg" +import json +with open("conf.json", "r") as file: + cfg = json.load(file) + +apiKey = cfg[twcApiKey] + +async def getData(pollenId, geocode): + fetchUrl = f"https://api.weather.com/v2/indices/pollen/daypart/7day?geocode={geocode}&language=en-US&format=xml&apiKey={apiKey}" + data = "" + #Fetch data + async with aiohttp.ClientSession() as s: + async with s.get(fetchUrl) as r: + data = await r.text() + + newData = data[63:-26] + + l.debug('Gathering data for location id ' + pollenId) + #Write to .i2m file + i2Doc = '' + '' + newData + '' + str(pollenId) + '' + + async with aiofiles.open("./.temp/PollenForecast.i2m", "a") as f: + await f.write(i2Doc) + await f.close() + + +async def makeDataFile(): + loop = asyncio.get_running_loop() + l.info("Writing a PollenForecast record.") + header = '' + footer = '' + + async with aiofiles.open("./.temp/PollenForecast.i2m", 'w') as doc: + await doc.write(header) + + for x, y in zip(pollenIds, geocodes): + await getData(x, y) + + async with aiofiles.open("./.temp/PollenForecast.i2m", 'a') as end: + await end.write(footer) + + + dom = xml.dom.minidom.parse("./.temp/PollenForecast.i2m") + pretty_xml_as_string = dom.toprettyxml(indent = " ") + + async with aiofiles.open("./.temp/PollenForecast.i2m", "w") as g: + await g.write(pretty_xml_as_string[23:]) + await g.close() + + files = [] + commands = [] + with open("./.temp/PollenForecast.i2m", 'rb') as f_in: + with gzip.open("./.temp/PollenForecast.gz", 'wb') as f_out: + shutil.copyfileobj(f_in, f_out) + + gZipFile = "./.temp/PollenForecast.gz" + + files.append(gZipFile) + command = commands.append('') + numFiles = len(files) + + bit.sendFile(files, commands, numFiles, 0) + + os.remove("./.temp/PollenForecast.i2m") + os.remove("./.temp/PollenForecast.gz") \ No newline at end of file diff --git a/recordGenerators/TideForecast.py b/recordGenerators/TideForecast.py new file mode 100644 index 0000000..3a37928 --- /dev/null +++ b/recordGenerators/TideForecast.py @@ -0,0 +1,94 @@ +import shutil +import logging,coloredlogs +import datetime +from py2Lib import bit +import Util.MachineProductCfg as MPC +import records.LFRecord as LFR +import gzip +from os import remove +import xml.dom.minidom +import aiohttp, aiofiles, asyncio + +l = logging.getLogger(__name__) +coloredlogs.install() + +geocodes = [] +tideStations = [] + +for i in MPC.getTideStations(): + tideStations.append(i) + geocodes.append(LFR.getLatLong(i)) + +# Open the config file and make it accessible via "cfg" +import json +with open("conf.json", "r") as file: + cfg = json.load(file) + +apiKey = cfg[twcApiKey] + +async def getData(tideStation, geocode): + today = datetime.date.today() + startDate = today.strftime('%Y%m%d') + endDate_unformatted = datetime.datetime.strptime(startDate, '%Y%m%d') + datetime.timedelta(days=5) + endDate = endDate_unformatted.strftime('%Y%m%d') + data = "" + + fetchUrl = f"https://api.weather.com/v1/geocode/{geocode}/forecast/tides.xml?language=en-US&units=e&startDate={startDate}&endDate={endDate}&apiKey={apiKey}" + + async with aiohttp.ClientSession() as s: + async with s.get(fetchUrl) as r: + if r.status != 200: + l.error(f"Failed to write TideForecast -- status code {r.status}") + return + + data = await r.text() + + + newData = data[53:-16] + + i2Doc = f'\n \n {newData}\n {tideStation}\n ' + + async with aiofiles.open('./.temp/TidesForecast.i2m', 'a') as f: + await f.write(i2Doc) + await f.close() + +async def makeRecord(): + loop = asyncio.get_running_loop() + if len(tideStations) < 1: + l.debug("Skipping TidesForecast -- No locations.") + return + + l.info("Writing TidesForecast record.") + + header = '' + footer = '' + + async with aiofiles.open('./.temp/TidesForecast.i2m', 'a') as doc: + await doc.write(header) + + for (x, y) in zip(tideStations, geocodes): + await getData(x,y) + + async with aiofiles.open('./.temp/TidesForecast.i2m', 'a') as end: + await end.write(footer) + + dom = xml.dom.minidom.parse('./.temp/TidesForecast.i2m') + xmlPretty = dom.toprettyxml(indent= " ") + + async with aiofiles.open('./.temp/TidesForecast.i2m', 'w') as g: + await g.write(xmlPretty[23:]) + await g.close() + + + # Compresss i2m to gzip + with open ('./.temp/TidesForecast.i2m', 'rb') as f_in: + with gzip.open('./.temp/TidesForecast.gz', 'wb') as f_out: + shutil.copyfileobj(f_in, f_out) + + file = "./.temp/TidesForecast.gz" + command = '' + + bit.sendFile([file], [command], 1, 0) + + remove('./.temp/TidesForecast.i2m') + remove('./.temp/TidesForecast.gz') \ No newline at end of file diff --git a/recordGenerators/WateringNeeds.py b/recordGenerators/WateringNeeds.py new file mode 100644 index 0000000..198c28d --- /dev/null +++ b/recordGenerators/WateringNeeds.py @@ -0,0 +1,84 @@ +import shutil +import requests +import logging,coloredlogs +from py2Lib import bit +import Util.MachineProductCfg as MPC +import records.LFRecord as LFR +import gzip +from os import remove +import xml.dom.minidom +import aiohttp, aiofiles, asyncio + +l = logging.getLogger(__name__) +coloredlogs.install() + +geocodes = [] +coopIds = [] + +for i in MPC.getPrimaryLocations(): + coopIds.append(LFR.getCoopId(i)) + geocodes.append(LFR.getLatLong(i).replace('/', ',')) + +# Open the config file and make it accessible via "cfg" +import json +with open("conf.json", "r") as file: + cfg = json.load(file) + +apiKey = cfg[twcApiKey] + +async def getData(coopId, geocode): + fetchUrl = f"https://api.weather.com/v2/indices/wateringNeeds/daypart/7day?geocode={geocode}&language=en-US&format=xml&apiKey={apiKey}" + data = "" + + async with aiohttp.ClientSession() as s: + async with s.get(fetchUrl) as r: + if r.status != 200: + l.error(f"Failed to WateringNeeds -- status code {r.status}") + return + + data = await r.text() + + newData = data[63:-26] + + i2Doc = f'\n \n {newData}\n {coopId}\n ' + + async with aiofiles.open('./.temp/WateringNeeds.i2m', 'a') as f: + await f.write(i2Doc) + await f.close() + +async def makeRecord(): + loop = asyncio.get_running_loop() + l.info("Writing WateringNeeds record.") + + header = '' + footer = '' + + async with aiofiles.open('./.temp/WateringNeeds.i2m', 'a') as doc: + await doc.write(header) + + for (x, y) in zip(coopIds, geocodes): + await getData(x,y) + + async with aiofiles.open('./.temp/WateringNeeds.i2m', 'a') as end: + await end.write(footer) + + dom = xml.dom.minidom.parse('./.temp/WateringNeeds.i2m') + xmlPretty = dom.toprettyxml(indent= " ") + + async with aiofiles.open('./.temp/WateringNeeds.i2m', 'w') as g: + await g.write(xmlPretty[23:]) + await g.close() + + + # Compresss i2m to gzip + with open ('./.temp/WateringNeeds.i2m', 'rb') as f_in: + with gzip.open('./.temp/WateringNeeds.gz', 'wb') as f_out: + shutil.copyfileobj(f_in, f_out) + + file = "./.temp/WateringNeeds.gz" + command = '' + + bit.sendFile([file], [command], 1, 0) + + remove('./.temp/WateringNeeds.i2m') + remove('./.temp/WateringNeeds.gz') \ No newline at end of file diff --git a/recordGenerators/__init__.py b/recordGenerators/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/records/LFRecord.db b/records/LFRecord.db new file mode 100644 index 0000000..d3fe24c Binary files /dev/null and b/records/LFRecord.db differ diff --git a/records/LFRecord.py b/records/LFRecord.py new file mode 100644 index 0000000..cd15b95 --- /dev/null +++ b/records/LFRecord.py @@ -0,0 +1,40 @@ +import sqlite3 + +# Make a connection to the LFRecord database +con = sqlite3.connect("records/LFRecord.db") +cur = con.cursor() + + +def getZip(locId: str): + """ Returns the zip code for a given location """ + COMMAND = (f"SELECT zip2locId FROM lfrecord WHERE locId='{locId}'") + cur.execute(COMMAND) + return cur.fetchone()[0] + +def getCoopId(locId: str): + """ Returns the TWC co-op ID for a given location """ + COMMAND = (f"SELECT coopId FROM lfrecord WHERE locId='{locId}'") + cur.execute(COMMAND) + return cur.fetchone()[0] + +def getEpaId(locId: str): + """ Return the Air Quality station id for a given location. """ + COMMAND = (f"SELECT epaId FROM lfrecord WHERE locId='{locId}'") + cur.execute(COMMAND) + return cur.fetchone()[0] + +def getPollenInfo(locId: str): + """ Return the Pollen forecast id for a given location. """ + COMMAND = (f"SELECT pllnId FROM lfrecord WHERE locId='{locId}'") + cur.execute(COMMAND) + return cur.fetchone()[0] + +def getLatLong(locId: str): + """ Return the Pollen forecast id for a given location. """ + COMMAND = (f"SELECT lat,long FROM lfrecord WHERE locId='{locId}'") + cur.execute(COMMAND) + fetched = cur.fetchone() + return fetched[0] + "/" + fetched[1] + +def getLocationInfo(locId: str): + pass \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..f3d5995 Binary files /dev/null and b/requirements.txt differ