diff --git a/ACKNOWLEDGMENTS.md b/ACKNOWLEDGMENTS.md index 7935e4829d..9c176790b2 100644 --- a/ACKNOWLEDGMENTS.md +++ b/ACKNOWLEDGMENTS.md @@ -429,30 +429,6 @@ Signal Desktop makes use of the following open source projects. OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -## @signalapp/better-sqlite3 - - The MIT License (MIT) - - Copyright (c) 2017 Joshua Wise - - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to deal - in the Software without restriction, including without limitation the rights - to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all - copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - SOFTWARE. - ## @signalapp/quill-cjs Copyright (c) 2017-2024, Slab @@ -487,6 +463,670 @@ Signal Desktop makes use of the following open source projects. (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +## @signalapp/sqlcipher + + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for + software and other kinds of works, specifically designed to ensure + cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed + to take away your freedom to share and change the works. By contrast, + our General Public Licenses are intended to guarantee your freedom to + share and change all versions of a program--to make sure it remains free + software for all its users. + + When we speak of free software, we are referring to freedom, not + price. Our General Public Licenses are designed to make sure that you + have the freedom to distribute copies of free software (and charge for + them if you wish), that you receive source code or can get it if you + want it, that you can change the software or use pieces of it in new + free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights + with two steps: (1) assert copyright on the software, and (2) offer + you this License which gives you legal permission to copy, distribute + and/or modify the software. + + A secondary benefit of defending all users' freedom is that + improvements made in alternate versions of the program, if they + receive widespread use, become available for other developers to + incorporate. Many developers of free software are heartened and + encouraged by the resulting cooperation. However, in the case of + software used on network servers, this result may fail to come about. + The GNU General Public License permits making a modified version and + letting the public access it on a server without ever releasing its + source code to the public. + + The GNU Affero General Public License is designed specifically to + ensure that, in such cases, the modified source code becomes available + to the community. It requires the operator of a network server to + provide the source code of the modified version running there to the + users of that server. Therefore, public use of a modified version, on + a publicly accessible server, gives the public access to the source + code of the modified version. + + An older license, called the Affero General Public License and + published by Affero, was designed to accomplish similar goals. This is + a different license, not a version of the Affero GPL, but Affero has + released a new version of the Affero GPL which permits relicensing under + this license. + + The precise terms and conditions for copying, distribution and + modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of + works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this + License. Each licensee is addressed as "you". "Licensees" and + "recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work + in a fashion requiring copyright permission, other than the making of an + exact copy. The resulting work is called a "modified version" of the + earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based + on the Program. + + To "propagate" a work means to do anything with it that, without + permission, would make you directly or secondarily liable for + infringement under applicable copyright law, except executing it on a + computer or modifying a private copy. Propagation includes copying, + distribution (with or without modification), making available to the + public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other + parties to make or receive copies. Mere interaction with a user through + a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" + to the extent that it includes a convenient and prominently visible + feature that (1) displays an appropriate copyright notice, and (2) + tells the user that there is no warranty for the work (except to the + extent that warranties are provided), that licensees may convey the + work under this License, and how to view a copy of this License. If + the interface presents a list of user commands or options, such as a + menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work + for making modifications to it. "Object code" means any non-source + form of a work. + + A "Standard Interface" means an interface that either is an official + standard defined by a recognized standards body, or, in the case of + interfaces specified for a particular programming language, one that + is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other + than the work as a whole, that (a) is included in the normal form of + packaging a Major Component, but which is not part of that Major + Component, and (b) serves only to enable use of the work with that + Major Component, or to implement a Standard Interface for which an + implementation is available to the public in source code form. A + "Major Component", in this context, means a major essential component + (kernel, window system, and so on) of the specific operating system + (if any) on which the executable work runs, or a compiler used to + produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all + the source code needed to generate, install, and (for an executable + work) run the object code and to modify the work, including scripts to + control those activities. However, it does not include the work's + System Libraries, or general-purpose tools or generally available free + programs which are used unmodified in performing those activities but + which are not part of the work. For example, Corresponding Source + includes interface definition files associated with source files for + the work, and the source code for shared libraries and dynamically + linked subprograms that the work is specifically designed to require, + such as by intimate data communication or control flow between those + subprograms and other parts of the work. + + The Corresponding Source need not include anything that users + can regenerate automatically from other parts of the Corresponding + Source. + + The Corresponding Source for a work in source code form is that + same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of + copyright on the Program, and are irrevocable provided the stated + conditions are met. This License explicitly affirms your unlimited + permission to run the unmodified Program. The output from running a + covered work is covered by this License only if the output, given its + content, constitutes a covered work. This License acknowledges your + rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not + convey, without conditions so long as your license otherwise remains + in force. You may convey covered works to others for the sole purpose + of having them make modifications exclusively for you, or provide you + with facilities for running those works, provided that you comply with + the terms of this License in conveying all material for which you do + not control copyright. Those thus making or running the covered works + for you must do so exclusively on your behalf, under your direction + and control, on terms that prohibit them from making any copies of + your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under + the conditions stated below. Sublicensing is not allowed; section 10 + makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological + measure under any applicable law fulfilling obligations under article + 11 of the WIPO copyright treaty adopted on 20 December 1996, or + similar laws prohibiting or restricting circumvention of such + measures. + + When you convey a covered work, you waive any legal power to forbid + circumvention of technological measures to the extent such circumvention + is effected by exercising rights under this License with respect to + the covered work, and you disclaim any intention to limit operation or + modification of the work as a means of enforcing, against the work's + users, your or third parties' legal rights to forbid circumvention of + technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you + receive it, in any medium, provided that you conspicuously and + appropriately publish on each copy an appropriate copyright notice; + keep intact all notices stating that this License and any + non-permissive terms added in accord with section 7 apply to the code; + keep intact all notices of the absence of any warranty; and give all + recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, + and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to + produce it from the Program, in the form of source code under the + terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent + works, which are not by their nature extensions of the covered work, + and which are not combined with it such as to form a larger program, + in or on a volume of a storage or distribution medium, is called an + "aggregate" if the compilation and its resulting copyright are not + used to limit the access or legal rights of the compilation's users + beyond what the individual works permit. Inclusion of a covered work + in an aggregate does not cause this License to apply to the other + parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms + of sections 4 and 5, provided that you also convey the + machine-readable Corresponding Source under the terms of this License, + in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded + from the Corresponding Source as a System Library, need not be + included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any + tangible personal property which is normally used for personal, family, + or household purposes, or (2) anything designed or sold for incorporation + into a dwelling. In determining whether a product is a consumer product, + doubtful cases shall be resolved in favor of coverage. For a particular + product received by a particular user, "normally used" refers to a + typical or common use of that class of product, regardless of the status + of the particular user or of the way in which the particular user + actually uses, or expects or is expected to use, the product. A product + is a consumer product regardless of whether the product has substantial + commercial, industrial or non-consumer uses, unless such uses represent + the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, + procedures, authorization keys, or other information required to install + and execute modified versions of a covered work in that User Product from + a modified version of its Corresponding Source. The information must + suffice to ensure that the continued functioning of the modified object + code is in no case prevented or interfered with solely because + modification has been made. + + If you convey an object code work under this section in, or with, or + specifically for use in, a User Product, and the conveying occurs as + part of a transaction in which the right of possession and use of the + User Product is transferred to the recipient in perpetuity or for a + fixed term (regardless of how the transaction is characterized), the + Corresponding Source conveyed under this section must be accompanied + by the Installation Information. But this requirement does not apply + if neither you nor any third party retains the ability to install + modified object code on the User Product (for example, the work has + been installed in ROM). + + The requirement to provide Installation Information does not include a + requirement to continue to provide support service, warranty, or updates + for a work that has been modified or installed by the recipient, or for + the User Product in which it has been modified or installed. Access to a + network may be denied when the modification itself materially and + adversely affects the operation of the network or violates the rules and + protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, + in accord with this section must be in a format that is publicly + documented (and with an implementation available to the public in + source code form), and must require no special password or key for + unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this + License by making exceptions from one or more of its conditions. + Additional permissions that are applicable to the entire Program shall + be treated as though they were included in this License, to the extent + that they are valid under applicable law. If additional permissions + apply only to part of the Program, that part may be used separately + under those permissions, but the entire Program remains governed by + this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option + remove any additional permissions from that copy, or from any part of + it. (Additional permissions may be written to require their own + removal in certain cases when you modify the work.) You may place + additional permissions on material, added by you to a covered work, + for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you + add to a covered work, you may (if authorized by the copyright holders of + that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further + restrictions" within the meaning of section 10. If the Program as you + received it, or any part of it, contains a notice stating that it is + governed by this License along with a term that is a further + restriction, you may remove that term. If a license document contains + a further restriction but permits relicensing or conveying under this + License, you may add to a covered work material governed by the terms + of that license document, provided that the further restriction does + not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you + must place, in the relevant source files, a statement of the + additional terms that apply to those files, or a notice indicating + where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the + form of a separately written license, or stated as exceptions; + the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly + provided under this License. Any attempt otherwise to propagate or + modify it is void, and will automatically terminate your rights under + this License (including any patent licenses granted under the third + paragraph of section 11). + + However, if you cease all violation of this License, then your + license from a particular copyright holder is reinstated (a) + provisionally, unless and until the copyright holder explicitly and + finally terminates your license, and (b) permanently, if the copyright + holder fails to notify you of the violation by some reasonable means + prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is + reinstated permanently if the copyright holder notifies you of the + violation by some reasonable means, this is the first time you have + received notice of violation of this License (for any work) from that + copyright holder, and you cure the violation prior to 30 days after + your receipt of the notice. + + Termination of your rights under this section does not terminate the + licenses of parties who have received copies or rights from you under + this License. If your rights have been terminated and not permanently + reinstated, you do not qualify to receive new licenses for the same + material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or + run a copy of the Program. Ancillary propagation of a covered work + occurring solely as a consequence of using peer-to-peer transmission + to receive a copy likewise does not require acceptance. However, + nothing other than this License grants you permission to propagate or + modify any covered work. These actions infringe copyright if you do + not accept this License. Therefore, by modifying or propagating a + covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically + receives a license from the original licensors, to run, modify and + propagate that work, subject to this License. You are not responsible + for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an + organization, or substantially all assets of one, or subdividing an + organization, or merging organizations. If propagation of a covered + work results from an entity transaction, each party to that + transaction who receives a copy of the work also receives whatever + licenses to the work the party's predecessor in interest had or could + give under the previous paragraph, plus a right to possession of the + Corresponding Source of the work from the predecessor in interest, if + the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the + rights granted or affirmed under this License. For example, you may + not impose a license fee, royalty, or other charge for exercise of + rights granted under this License, and you may not initiate litigation + (including a cross-claim or counterclaim in a lawsuit) alleging that + any patent claim is infringed by making, using, selling, offering for + sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this + License of the Program or a work on which the Program is based. The + work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims + owned or controlled by the contributor, whether already acquired or + hereafter acquired, that would be infringed by some manner, permitted + by this License, of making, using, or selling its contributor version, + but do not include claims that would be infringed only as a + consequence of further modification of the contributor version. For + purposes of this definition, "control" includes the right to grant + patent sublicenses in a manner consistent with the requirements of + this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free + patent license under the contributor's essential patent claims, to + make, use, sell, offer for sale, import and otherwise run, modify and + propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express + agreement or commitment, however denominated, not to enforce a patent + (such as an express permission to practice a patent or covenant not to + sue for patent infringement). To "grant" such a patent license to a + party means to make such an agreement or commitment not to enforce a + patent against the party. + + If you convey a covered work, knowingly relying on a patent license, + and the Corresponding Source of the work is not available for anyone + to copy, free of charge and under the terms of this License, through a + publicly available network server or other readily accessible means, + then you must either (1) cause the Corresponding Source to be so + available, or (2) arrange to deprive yourself of the benefit of the + patent license for this particular work, or (3) arrange, in a manner + consistent with the requirements of this License, to extend the patent + license to downstream recipients. "Knowingly relying" means you have + actual knowledge that, but for the patent license, your conveying the + covered work in a country, or your recipient's use of the covered work + in a country, would infringe one or more identifiable patents in that + country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or + arrangement, you convey, or propagate by procuring conveyance of, a + covered work, and grant a patent license to some of the parties + receiving the covered work authorizing them to use, propagate, modify + or convey a specific copy of the covered work, then the patent license + you grant is automatically extended to all recipients of the covered + work and works based on it. + + A patent license is "discriminatory" if it does not include within + the scope of its coverage, prohibits the exercise of, or is + conditioned on the non-exercise of one or more of the rights that are + specifically granted under this License. You may not convey a covered + work if you are a party to an arrangement with a third party that is + in the business of distributing software, under which you make payment + to the third party based on the extent of your activity of conveying + the work, and under which the third party grants, to any of the + parties who would receive the covered work from you, a discriminatory + patent license (a) in connection with copies of the covered work + conveyed by you (or copies made from those copies), or (b) primarily + for and in connection with specific products or compilations that + contain the covered work, unless you entered into that arrangement, + or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting + any implied license or other defenses to infringement that may + otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or + otherwise) that contradict the conditions of this License, they do not + excuse you from the conditions of this License. If you cannot convey a + covered work so as to satisfy simultaneously your obligations under this + License and any other pertinent obligations, then as a consequence you may + not convey it at all. For example, if you agree to terms that obligate you + to collect a royalty for further conveying from those to whom you convey + the Program, the only way you could satisfy both those terms and this + License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the + Program, your modified version must prominently offer all users + interacting with it remotely through a computer network (if your version + supports such interaction) an opportunity to receive the Corresponding + Source of your version by providing access to the Corresponding Source + from a network server at no charge, through some standard or customary + means of facilitating copying of software. This Corresponding Source + shall include the Corresponding Source for any work covered by version 3 + of the GNU General Public License that is incorporated pursuant to the + following paragraph. + + Notwithstanding any other provision of this License, you have + permission to link or combine any covered work with a work licensed + under version 3 of the GNU General Public License into a single + combined work, and to convey the resulting work. The terms of this + License will continue to apply to the part which is the covered work, + but the work with which it is combined will remain governed by version + 3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of + the GNU Affero General Public License from time to time. Such new versions + will be similar in spirit to the present version, but may differ in detail to + address new problems or concerns. + + Each version is given a distinguishing version number. If the + Program specifies that a certain numbered version of the GNU Affero General + Public License "or any later version" applies to it, you have the + option of following the terms and conditions either of that numbered + version or of any later version published by the Free Software + Foundation. If the Program does not specify a version number of the + GNU Affero General Public License, you may choose any version ever published + by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future + versions of the GNU Affero General Public License can be used, that proxy's + public statement of acceptance of a version permanently authorizes you + to choose that version for the Program. + + Later license versions may give you additional or different + permissions. However, no additional obligations are imposed on any + author or copyright holder as a result of your choosing to follow a + later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY + APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT + HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY + OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, + THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM + IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF + ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING + WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS + THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY + GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE + USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF + DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD + PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), + EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF + SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided + above cannot be given local legal effect according to their terms, + reviewing courts shall apply local law that most closely approximates + an absolute waiver of all civil liability in connection with the + Program, unless a warranty or assumption of liability accompanies a + copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest + possible use to the public, the best way to achieve this is to make it + free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest + to attach them to the start of each source file to most effectively + state the exclusion of warranty; and each file should have at least + the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + + Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer + network, you should also make sure that it provides a way for users to + get its source. For example, if your program is a web application, its + interface could display a "Source" link that leads users to an archive + of the code. There are many ways you could offer source, and different + solutions will be better for different programs; see section 13 for the + specific requirements. + + You should also get your employer (if you work as a programmer) or school, + if any, to sign a "copyright disclaimer" for the program, if necessary. + For more information on this, and how to apply and follow the GNU AGPL, see + . + ## @tanstack/react-virtual MIT License diff --git a/package.json b/package.json index 85a84817a4..0fbd907c48 100644 --- a/package.json +++ b/package.json @@ -117,10 +117,10 @@ "@react-aria/focus": "3.19.1", "@react-aria/utils": "3.25.3", "@react-spring/web": "9.7.5", - "@signalapp/better-sqlite3": "9.0.13", "@signalapp/libsignal-client": "0.67.3", "@signalapp/quill-cjs": "2.1.2", "@signalapp/ringrtc": "2.50.2", + "@signalapp/sqlcipher": "1.0.0", "@tanstack/react-virtual": "3.11.2", "@types/fabric": "4.5.3", "backbone": "1.6.0", @@ -384,7 +384,7 @@ "@indutny/simple-windows-notifications", "@parcel/watcher", "@signalapp/libsignal-client", - "@signalapp/better-sqlite3", + "@signalapp/sqlcipher", "@signalapp/ringrtc", "@swc/core", "bufferutil", @@ -426,7 +426,7 @@ } }, "sign": "./ts/scripts/sign-macos.js", - "singleArchFiles": "node_modules/@signalapp/{libsignal-client/prebuilds/**,ringrtc/build/**}", + "singleArchFiles": "node_modules/@signalapp/{libsignal-client/prebuilds/**,ringrtc/build/**,sqlcipher/prebuilds/**}", "target": [ { "target": "zip", @@ -607,9 +607,7 @@ "node_modules/socks/build/common/*.js", "node_modules/socks/build/client/*.js", "node_modules/smart-buffer/build/*.js", - "!node_modules/@signalapp/better-sqlite3/deps/*", - "!node_modules/@signalapp/better-sqlite3/src/*", - "node_modules/@signalapp/better-sqlite3/build/Release/better_sqlite3.node", + "node_modules/@signalapp/sqlcipher/prebuilds/${platform}-${arch}/*.node", "node_modules/@signalapp/libsignal-client/prebuilds/${platform}-${arch}/*.node", "!node_modules/@signalapp/ringrtc/scripts/*", "node_modules/@signalapp/ringrtc/build/${platform}/*${arch}*.node", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index aa922321dc..eeae08a05b 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -124,9 +124,6 @@ importers: '@react-spring/web': specifier: 9.7.5 version: 9.7.5(react-dom@17.0.2(react@17.0.2))(react@17.0.2) - '@signalapp/better-sqlite3': - specifier: 9.0.13 - version: 9.0.13 '@signalapp/libsignal-client': specifier: 0.67.3 version: 0.67.3 @@ -136,6 +133,9 @@ importers: '@signalapp/ringrtc': specifier: 2.50.2 version: 2.50.2 + '@signalapp/sqlcipher': + specifier: 1.0.0 + version: 1.0.0 '@tanstack/react-virtual': specifier: 3.11.2 version: 3.11.2(react-dom@17.0.2(react@17.0.2))(react@17.0.2) @@ -2529,9 +2529,6 @@ packages: '@sideway/pinpoint@2.0.0': resolution: {integrity: sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ==} - '@signalapp/better-sqlite3@9.0.13': - resolution: {integrity: sha512-sl5JJvHz/mGy3JXXcWFq7PxeO/d9SaCjPq/FPc4RCujaTC9JofHLdxQjn2xSSZQBLm28o2as3p6hD9eDk+BVNw==} - '@signalapp/libsignal-client@0.60.2': resolution: {integrity: sha512-tU4kNP/yCwkFntb2ahXOSQJtzdy+YifAB2yv5hw0qyKSidRHLn6bYiz4Zo2tjxLDRoBLAUxCRsQramStiqNZdA==} @@ -2551,6 +2548,9 @@ packages: '@signalapp/ringrtc@2.50.2': resolution: {integrity: sha512-fZgfz4asC4CZsvrhxYMDVbIz0iffde9GhWEgIBpAxgEiLpuURx9YRK1ghnn9eonfZNEXwhrTUXUBlC6xYyy1rQ==} + '@signalapp/sqlcipher@1.0.0': + resolution: {integrity: sha512-3+4Y/0Tf0gis8gaZrD58cnzVVRzeDtBCPhS3gUdvxgkEXMv4guSJ+MDfHZLQKmqvuPmjTlt+YOsTjvLCWruFaQ==} + '@sinclair/typebox@0.27.8': resolution: {integrity: sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==} @@ -12253,11 +12253,6 @@ snapshots: '@sideway/pinpoint@2.0.0': {} - '@signalapp/better-sqlite3@9.0.13': - dependencies: - bindings: 1.5.0 - tar: 6.2.1 - '@signalapp/libsignal-client@0.60.2': dependencies: node-gyp-build: 4.8.4 @@ -12309,6 +12304,11 @@ snapshots: transitivePeerDependencies: - supports-color + '@signalapp/sqlcipher@1.0.0': + dependencies: + node-addon-api: 8.3.0 + node-gyp-build: 4.8.4 + '@sinclair/typebox@0.27.8': {} '@sindresorhus/is@4.6.0': {} diff --git a/scripts/esbuild.js b/scripts/esbuild.js index e37be142e6..c1aca126d8 100644 --- a/scripts/esbuild.js +++ b/scripts/esbuild.js @@ -34,7 +34,7 @@ const bundleDefaults = { '@signalapp/libsignal-client', '@signalapp/libsignal-client/zkgroup', '@signalapp/ringrtc', - '@signalapp/better-sqlite3', + '@signalapp/sqlcipher', '@indutny/mac-screen-share', 'electron', 'fs-xattr', diff --git a/ts/SignalProtocolStore.ts b/ts/SignalProtocolStore.ts index 927720b636..30b540fa72 100644 --- a/ts/SignalProtocolStore.ts +++ b/ts/SignalProtocolStore.ts @@ -2418,12 +2418,6 @@ export class SignalProtocolStore extends EventEmitter { ); } - getUnprocessedById(id: string): Promise { - return this.withZone(GLOBAL_ZONE, 'getUnprocessedById', async () => { - return DataReader.getUnprocessedById(id); - }); - } - addUnprocessed( data: UnprocessedType, { zone = GLOBAL_ZONE }: SessionTransactionOptions = {} diff --git a/ts/sql/Interface.ts b/ts/sql/Interface.ts index 5e9d8a63c3..0acb43578e 100644 --- a/ts/sql/Interface.ts +++ b/ts/sql/Interface.ts @@ -1,7 +1,7 @@ // Copyright 2020 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { ReadonlyDeep } from 'type-fest'; import type { ConversationAttributesType, @@ -236,6 +236,13 @@ export type SentProtoType = { urgent: boolean; hasPniSignatureMessage: boolean; }; +export type SentProtoDBType = { + contentHint: number; + proto: Uint8Array; + timestamp: number; + urgent: number; + hasPniSignatureMessage: number; +}; export type SentProtoWithMessageIdsType = SentProtoType & { messageIds: Array; }; @@ -676,14 +683,14 @@ type ReadableInterface = { ) => Array; getUnprocessedCount: () => number; - getUnprocessedById: (id: string) => UnprocessedType | undefined; - getAttachmentDownloadJob( + // Test-only + _getAttachmentDownloadJob( job: Pick< AttachmentDownloadJobType, 'messageId' | 'attachmentType' | 'digest' > - ): AttachmentDownloadJobType; + ): AttachmentDownloadJobType | undefined; getBackupCdnObjectMetadata: ( mediaId: string diff --git a/ts/sql/Server.ts b/ts/sql/Server.ts index bb15f4d795..66c20f7b48 100644 --- a/ts/sql/Server.ts +++ b/ts/sql/Server.ts @@ -3,8 +3,8 @@ /* eslint-disable camelcase */ -import type { Database, Statement } from '@signalapp/better-sqlite3'; -import SQL from '@signalapp/better-sqlite3'; +// TODO(indutny): format queries +import SQL from '@signalapp/sqlcipher'; import { randomBytes } from 'crypto'; import { mkdirSync, rmSync } from 'node:fs'; import { join } from 'path'; @@ -30,7 +30,10 @@ import { } from 'lodash'; import { parseBadgeCategory } from '../badges/BadgeCategory'; -import { parseBadgeImageTheme } from '../badges/BadgeImageTheme'; +import { + parseBadgeImageTheme, + type BadgeImageTheme, +} from '../badges/BadgeImageTheme'; import type { BadgeImageType, BadgeType } from '../badges/types'; import type { StoredJob } from '../jobs/types'; import { formatCountForLogging } from '../logging/formatCountForLogging'; @@ -55,8 +58,9 @@ import { isNormalNumber } from '../util/isNormalNumber'; import { isNotNil } from '../util/isNotNil'; import { parseIntOrThrow } from '../util/parseIntOrThrow'; import { updateSchema } from './migrations'; -import type { ArrayQuery, EmptyQuery, JSONRows, Query } from './util'; +import type { JSONRows } from './util'; import { + // TODO(indutny): disable caching for final batch batchMultiVarQuery, bulkAdd, createOrUpdate, @@ -131,7 +135,6 @@ import type { GetRecentStoryRepliesOptionsType, GetUnreadByConversationAndMarkReadResultType, IdentityKeyIdType, - InstalledStickerPackType, ItemKeyType, MessageAttachmentsCursorType, MessageCursorType, @@ -147,6 +150,7 @@ import type { SenderKeyType, SentMessageDBType, SentMessagesType, + SentProtoDBType, SentProtoType, SentProtoWithMessageIdsType, SentRecipientsDBType, @@ -228,6 +232,55 @@ type StickerRow = Readonly<{ localKey: string | null; size: number | null; }>; +type StorageServiceRowFields = Readonly<{ + storageID?: string; + storageVersion?: number; + storageUnknownFields?: Uint8Array | null; + storageNeedsSync: number; +}>; +type InstalledStickerPackRow = Readonly<{ + id: string; + key: string; + + position?: number | null; +}> & + StorageServiceRowFields; +type UninstalledStickerPackRow = Readonly<{ + id: string; + + uninstalledAt: number; +}> & + StorageServiceRowFields; +type StickerPackRow = InstalledStickerPackRow & + Readonly<{ + attemptedStatus?: 'downloaded' | 'installed' | 'ephemeral'; + author: string; + coverStickerId: number; + createdAt: number; + downloadAttempts: number; + installedAt?: number; + lastUsed?: number; + status: StickerPackStatusType; + stickerCount: number; + stickers: string; + title: string; + }>; +type AttachmentDownloadJobRow = Readonly<{ + messageId: string; + attachmentType: string; + digest: string; + receivedAt: number; + sentAt: number; + contentType: string; + size: number; + active: number; + attempts: number; + retryAfter: number; + lastAttemptTimestamp: number; + attachmentJson: string; + ciphertextSize: number; + source: string; +}>; // Because we can't force this module to conform to an interface, we narrow our exports // to this one default export, which does conform to the interface. @@ -323,8 +376,7 @@ export const DataReader: ServerReadableInterface = { getMostRecentAddressableMessages, getMostRecentAddressableNondisappearingMessages, getUnprocessedCount, - getUnprocessedById, - getAttachmentDownloadJob, + _getAttachmentDownloadJob, getStickerCount, getAllStickerPacks, @@ -551,34 +603,6 @@ export const DataWriter: ServerWritableInterface = { runCorruptionChecks, }; -type DatabaseQueryCache = Map>>; - -const statementCache = new WeakMap(); - -export function prepare | Record>( - db: ReadableDB, - query: string, - { pluck = false }: { pluck?: boolean } = {} -): Statement { - let dbCache = statementCache.get(db); - if (!dbCache) { - dbCache = new Map(); - statementCache.set(db, dbCache); - } - - const cacheKey = `${pluck}:${query}`; - let result = dbCache.get(cacheKey) as Statement; - if (!result) { - result = db.prepare(query); - if (pluck === true) { - result.pluck(); - } - dbCache.set(cacheKey, result); - } - - return result; -} - const MESSAGE_COLUMNS_FRAGMENTS = MESSAGE_COLUMNS.map( column => new QueryFragment(column, []) ); @@ -643,17 +667,13 @@ function migrateSchemaVersion(db: WritableDB): void { setUserVersion(db, newUserVersion); } -function openAndMigrateDatabase( - filePath: string, - key: string, - readonly: boolean -): WritableDB { +function openAndMigrateDatabase(filePath: string, key: string): WritableDB { let db: WritableDB | undefined; // First, we try to open the database without any cipher changes try { db = new SQL(filePath, { - readonly, + cacheStatements: true, }) as WritableDB; keyDatabase(db, key); switchToWAL(db); @@ -698,16 +718,13 @@ function openAndMigrateDatabase( } const INVALID_KEY = /[^0-9A-Fa-f]/; -function openAndSetUpSQLCipher( - filePath: string, - { key, readonly }: { key: string; readonly: boolean } -) { +function openAndSetUpSQLCipher(filePath: string, { key }: { key: string }) { const match = INVALID_KEY.exec(key); if (match) { throw new Error(`setupSQLCipher: key '${key}' is not valid`); } - const db = openAndMigrateDatabase(filePath, key, readonly); + const db = openAndMigrateDatabase(filePath, key); try { // Because foreign key support is not enabled by default! @@ -741,10 +758,6 @@ let logger = consoleLogger; let databaseFilePath: string | undefined; let indexedDBPath: string | undefined; -SQL.setLogHandler((code, value) => { - logger.warn(`Database log code=${code}: ${value}`); -}); - export function initialize({ configDir, key, @@ -778,7 +791,6 @@ export function initialize({ try { db = openAndSetUpSQLCipher(databaseFilePath, { key, - readonly: false, }); // For profiling use: @@ -918,9 +930,7 @@ function removeKyberPreKeysByServiceId( db: WritableDB, serviceId: ServiceIdString ): void { - db.prepare( - 'DELETE FROM kyberPreKeys WHERE ourServiceId IS $serviceId;' - ).run({ + db.prepare('DELETE FROM kyberPreKeys WHERE ourServiceId IS $serviceId;').run({ serviceId, }); } @@ -954,9 +964,7 @@ function removePreKeysByServiceId( db: WritableDB, serviceId: ServiceIdString ): void { - db.prepare( - 'DELETE FROM preKeys WHERE ourServiceId IS $serviceId;' - ).run({ + db.prepare('DELETE FROM preKeys WHERE ourServiceId IS $serviceId;').run({ serviceId, }); } @@ -996,18 +1004,18 @@ function removeSignedPreKeysByServiceId( db: WritableDB, serviceId: ServiceIdString ): void { - db.prepare( - 'DELETE FROM signedPreKeys WHERE ourServiceId IS $serviceId;' - ).run({ - serviceId, - }); + db.prepare('DELETE FROM signedPreKeys WHERE ourServiceId IS $serviceId;').run( + { + serviceId, + } + ); } function removeAllSignedPreKeys(db: WritableDB): number { return removeAllFromTable(db, SIGNED_PRE_KEYS_TABLE); } function getAllSignedPreKeys(db: ReadableDB): Array { const rows: JSONRows = db - .prepare( + .prepare( ` SELECT json FROM signedPreKeys @@ -1034,7 +1042,7 @@ function getItemById( } function getAllItems(db: ReadableDB): StoredAllItemsType { const rows: JSONRows = db - .prepare('SELECT json FROM items ORDER BY id ASC;') + .prepare('SELECT json FROM items ORDER BY id ASC;') .all(); type RawItemType = { id: ItemKeyType; value: unknown }; @@ -1060,8 +1068,7 @@ function removeAllItems(db: WritableDB): number { } function createOrUpdateSenderKey(db: WritableDB, key: SenderKeyType): void { - prepare( - db, + db.prepare( ` INSERT OR REPLACE INTO senderKeys ( id, @@ -1083,22 +1090,20 @@ function getSenderKeyById( db: ReadableDB, id: SenderKeyIdType ): SenderKeyType | undefined { - const row = prepare(db, 'SELECT * FROM senderKeys WHERE id = $id').get({ - id, - }); - - return row; + return db + .prepare('SELECT * FROM senderKeys WHERE id = $id') + .get({ + id, + }); } function removeAllSenderKeys(db: WritableDB): void { - prepare(db, 'DELETE FROM senderKeys').run(); + db.prepare('DELETE FROM senderKeys').run(); } function getAllSenderKeys(db: ReadableDB): Array { - const rows = prepare(db, 'SELECT * FROM senderKeys').all(); - - return rows; + return db.prepare('SELECT * FROM senderKeys').all(); } function removeSenderKeyById(db: WritableDB, id: SenderKeyIdType): void { - prepare(db, 'DELETE FROM senderKeys WHERE id = $id').run({ id }); + db.prepare('DELETE FROM senderKeys WHERE id = $id').run({ id }); } function insertSentProto( @@ -1115,9 +1120,9 @@ function insertSentProto( return db.transaction(() => { // 1. Insert the payload, fetching its primary key id - const info = prepare( - db, - ` + const info = db + .prepare( + ` INSERT INTO sendLogPayloads ( contentHint, proto, @@ -1132,19 +1137,19 @@ function insertSentProto( $hasPniSignatureMessage ); ` - ).run({ - ...proto, - urgent: proto.urgent ? 1 : 0, - hasPniSignatureMessage: proto.hasPniSignatureMessage ? 1 : 0, - }); + ) + .run({ + ...proto, + urgent: proto.urgent ? 1 : 0, + hasPniSignatureMessage: proto.hasPniSignatureMessage ? 1 : 0, + }); const id = parseIntOrThrow( info.lastInsertRowid, 'insertSentProto/lastInsertRowid' ); // 2. Insert a record for each recipient device. - const recipientStatement = prepare( - db, + const recipientStatement = db.prepare( ` INSERT INTO sendLogRecipients ( payloadId, @@ -1176,8 +1181,7 @@ function insertSentProto( } // 2. Insert a record for each message referenced by this payload. - const messageStatement = prepare( - db, + const messageStatement = db.prepare( ` INSERT INTO sendLogMessageIds ( payloadId, @@ -1201,8 +1205,7 @@ function insertSentProto( } function deleteSentProtosOlderThan(db: WritableDB, timestamp: number): void { - prepare( - db, + db.prepare( ` DELETE FROM sendLogPayloads WHERE @@ -1215,8 +1218,7 @@ function deleteSentProtosOlderThan(db: WritableDB, timestamp: number): void { } function deleteSentProtoByMessageId(db: WritableDB, messageId: string): void { - prepare( - db, + db.prepare( ` DELETE FROM sendLogPayloads WHERE id IN ( SELECT payloadId FROM sendLogMessageIds @@ -1241,8 +1243,7 @@ function insertProtoRecipients( } ): void { db.transaction(() => { - const statement = prepare( - db, + const statement = db.prepare( ` INSERT INTO sendLogRecipients ( payloadId, @@ -1284,9 +1285,9 @@ function deleteSentProtoRecipient( const { timestamp, recipientServiceId, deviceId } = item; // 1. Figure out what payload we're talking about. - const rows = prepare( - db, - ` + const rows = db + .prepare( + ` SELECT sendLogPayloads.id, sendLogPayloads.hasPniSignatureMessage FROM sendLogPayloads INNER JOIN sendLogRecipients @@ -1296,7 +1297,8 @@ function deleteSentProtoRecipient( sendLogRecipients.recipientServiceId = $recipientServiceId AND sendLogRecipients.deviceId = $deviceId; ` - ).all({ timestamp, recipientServiceId, deviceId }); + ) + .all({ timestamp, recipientServiceId, deviceId }); if (!rows.length) { continue; } @@ -1310,8 +1312,7 @@ function deleteSentProtoRecipient( const { id, hasPniSignatureMessage } = rows[0]; // 2. Delete the recipient/device combination in question. - prepare( - db, + db.prepare( ` DELETE FROM sendLogRecipients WHERE @@ -1322,14 +1323,16 @@ function deleteSentProtoRecipient( ).run({ id, recipientServiceId, deviceId }); // 3. See how many more recipient devices there were for this payload. - const remainingDevices = prepare( - db, - ` + const remainingDevices = + db + .prepare( + ` SELECT count(1) FROM sendLogRecipients WHERE payloadId = $id AND recipientServiceId = $recipientServiceId; `, - { pluck: true } - ).get({ id, recipientServiceId }); + { pluck: true } + ) + .get({ id, recipientServiceId }) ?? 0; // 4. If there are no remaining devices for this recipient and we included // the pni signature in the proto - return the recipient to the caller. @@ -1348,11 +1351,12 @@ function deleteSentProtoRecipient( ); // 5. See how many more recipients there were for this payload. - const remainingTotal = prepare( - db, - 'SELECT count(1) FROM sendLogRecipients WHERE payloadId = $id;', - { pluck: true } - ).get({ id }); + const remainingTotal = db + .prepare( + 'SELECT count(1) FROM sendLogRecipients WHERE payloadId = $id;', + { pluck: true } + ) + .get({ id }); strictAssert( isNumber(remainingTotal), @@ -1369,7 +1373,7 @@ function deleteSentProtoRecipient( `Deleting proto payload for timestamp ${timestamp}` ); - prepare(db, 'DELETE FROM sendLogPayloads WHERE id = $id;').run({ + db.prepare('DELETE FROM sendLogPayloads WHERE id = $id;').run({ id, }); } @@ -1395,9 +1399,9 @@ function getSentProtoByRecipient( deleteSentProtosOlderThan(db, oneDayAgo); - const row = prepare( - db, - ` + const row = db + .prepare( + ` SELECT sendLogPayloads.*, GROUP_CONCAT(DISTINCT sendLogMessageIds.messageId) AS messageIds @@ -1409,10 +1413,15 @@ function getSentProtoByRecipient( sendLogRecipients.recipientServiceId = $recipientServiceId GROUP BY sendLogPayloads.id; ` - ).get({ - timestamp, - recipientServiceId, - }); + ) + .get< + SentProtoDBType & { + messageIds: string; + } + >({ + timestamp, + recipientServiceId, + }); if (!row) { return undefined; @@ -1429,10 +1438,12 @@ function getSentProtoByRecipient( }; } function removeAllSentProtos(db: WritableDB): void { - prepare(db, 'DELETE FROM sendLogPayloads;').run(); + db.prepare('DELETE FROM sendLogPayloads;').run(); } function getAllSentProtos(db: ReadableDB): Array { - const rows = prepare(db, 'SELECT * FROM sendLogPayloads;').all(); + const rows = db + .prepare('SELECT * FROM sendLogPayloads;') + .all(); return rows.map(row => ({ ...row, @@ -1445,20 +1456,14 @@ function getAllSentProtos(db: ReadableDB): Array { function _getAllSentProtoRecipients( db: ReadableDB ): Array { - const rows = prepare( - db, - 'SELECT * FROM sendLogRecipients;' - ).all(); - - return rows; + return db + .prepare('SELECT * FROM sendLogRecipients;') + .all(); } function _getAllSentProtoMessageIds(db: ReadableDB): Array { - const rows = prepare( - db, - 'SELECT * FROM sendLogMessageIds;' - ).all(); - - return rows; + return db + .prepare('SELECT * FROM sendLogMessageIds;') + .all(); } const SESSIONS_TABLE = 'sessions'; @@ -1476,8 +1481,7 @@ function createOrUpdateSession(db: WritableDB, data: SessionType): void { ); } - prepare( - db, + db.prepare( ` INSERT OR REPLACE INTO sessions ( id, @@ -1550,7 +1554,7 @@ function removeSessionsByConversation( db: WritableDB, conversationId: string ): void { - db.prepare( + db.prepare( ` DELETE FROM sessions WHERE conversationId = $conversationId; @@ -1563,7 +1567,7 @@ function removeSessionsByServiceId( db: WritableDB, serviceId: ServiceIdString ): void { - db.prepare( + db.prepare( ` DELETE FROM sessions WHERE serviceId = $serviceId; @@ -1611,8 +1615,7 @@ function saveConversation(db: WritableDB, data: ConversationType): void { const membersList = getConversationMembersList(data); - prepare( - db, + db.prepare( ` INSERT INTO conversations ( id, @@ -1699,8 +1702,7 @@ function updateConversation(db: WritableDB, data: ConversationType): void { const membersList = getConversationMembersList(data); - prepare( - db, + db.prepare( ` UPDATE conversations SET json = $json, @@ -1753,7 +1755,7 @@ function updateConversations( function removeConversations(db: WritableDB, ids: ReadonlyArray): void { // Our node interface doesn't seem to allow you to replace one single ? with an array - db.prepare( + db.prepare( ` DELETE FROM conversations WHERE id IN ( ${ids.map(() => '?').join(', ')} ); @@ -1763,7 +1765,7 @@ function removeConversations(db: WritableDB, ids: ReadonlyArray): void { function removeConversation(db: WritableDB, id: Array | string): void { if (!Array.isArray(id)) { - db.prepare('DELETE FROM conversations WHERE id = $id;').run({ + db.prepare('DELETE FROM conversations WHERE id = $id;').run({ id, }); @@ -1778,16 +1780,16 @@ function removeConversation(db: WritableDB, id: Array | string): void { } function _removeAllConversations(db: WritableDB): void { - db.prepare('DELETE from conversations;').run(); + db.prepare('DELETE from conversations;').run(); } function getConversationById( db: ReadableDB, id: string ): ConversationType | undefined { - const row: { json: string } = db - .prepare('SELECT json FROM conversations WHERE id = $id;') - .get({ id }); + const row = db + .prepare('SELECT json FROM conversations WHERE id = $id;') + .get<{ json: string }>({ id }); if (!row) { return undefined; @@ -1798,7 +1800,7 @@ function getConversationById( function getAllConversations(db: ReadableDB): Array { const rows: ConversationRows = db - .prepare( + .prepare( ` SELECT json, profileLastFetchedAt, expireTimerVersion FROM conversations @@ -1812,7 +1814,7 @@ function getAllConversations(db: ReadableDB): Array { function getAllConversationIds(db: ReadableDB): Array { const rows: Array<{ id: string }> = db - .prepare( + .prepare( ` SELECT id FROM conversations ORDER BY id ASC; ` @@ -1827,7 +1829,7 @@ function getAllGroupsInvolvingServiceId( serviceId: ServiceIdString ): Array { const rows: ConversationRows = db - .prepare( + .prepare( ` SELECT json, profileLastFetchedAt, expireTimerVersion FROM conversations WHERE @@ -1891,7 +1893,7 @@ function searchMessages( ); writable - .prepare( + .prepare( ` INSERT INTO tmp_results (rowid) SELECT @@ -1906,7 +1908,7 @@ function searchMessages( if (conversationId === undefined) { writable - .prepare( + .prepare( ` INSERT INTO tmp_filtered_results (rowid) SELECT @@ -1922,7 +1924,7 @@ function searchMessages( .run({ limit }); } else { writable - .prepare( + .prepare( ` INSERT INTO tmp_filtered_results (rowid) SELECT @@ -2029,16 +2031,20 @@ function searchMessages( } function getStoryCount(db: ReadableDB, conversationId: string): number { - return db - .prepare( - ` - SELECT count(1) - FROM messages - WHERE conversationId = $conversationId AND isStory = 1; + return ( + db + .prepare( ` - ) - .pluck() - .get({ conversationId }); + SELECT count(1) + FROM messages + WHERE conversationId = $conversationId AND isStory = 1; + `, + { + pluck: true, + } + ) + .get({ conversationId }) ?? 0 + ); } function getMessageCount(db: ReadableDB, conversationId?: string): number { @@ -2047,17 +2053,19 @@ function getMessageCount(db: ReadableDB, conversationId?: string): number { } const count = db - .prepare( + .prepare( ` - SELECT count(1) - FROM messages - WHERE conversationId = $conversationId; - ` + SELECT count(1) + FROM messages + WHERE conversationId = $conversationId; + `, + { + pluck: true, + } ) - .pluck() - .get({ conversationId }); + .get({ conversationId }); - return count; + return count ?? 0; } // Note: we really only use this in 1:1 conversations, where story replies are always @@ -2066,20 +2074,22 @@ function hasUserInitiatedMessages( db: ReadableDB, conversationId: string ): boolean { - const exists: number = db - .prepare( - ` - SELECT EXISTS( - SELECT 1 FROM messages - INDEXED BY message_user_initiated - WHERE - conversationId IS $conversationId AND - isUserInitiatedMessage IS 1 - ); + const exists = db + .prepare( ` + SELECT EXISTS( + SELECT 1 FROM messages + INDEXED BY message_user_initiated + WHERE + conversationId IS $conversationId AND + isUserInitiatedMessage IS 1 + ); + `, + { + pluck: true, + } ) - .pluck() - .get({ conversationId }); + .get({ conversationId }); return exists !== 0; } @@ -2101,7 +2111,7 @@ export function getMostRecentAddressableMessages( LIMIT ${limit}; `; - const rows = db.prepare(query).all(parameters); + const rows = db.prepare(query).all(parameters); return rows.map(row => hydrateMessage(row)); } @@ -2124,7 +2134,7 @@ export function getMostRecentAddressableNondisappearingMessages( LIMIT ${limit}; `; - const rows = db.prepare(query).all(parameters); + const rows = db.prepare(query).all(parameters); return rows.map(row => hydrateMessage(row)); } @@ -2220,7 +2230,12 @@ export function dequeueOldestSyncTasks( ${limit} `; - const rows = db.prepare(selectAllQuery).all(selectAllParams); + const rows = db.prepare(selectAllQuery).all< + { + rowid: number; + data: string; + } & SyncTaskType + >(selectAllParams); if (!rows.length) { return { tasks: [], lastRowId: null }; } @@ -2252,12 +2267,15 @@ export function dequeueOldestSyncTasks( RETURNING id, attempts; `; - const res = db.prepare(updateQuery).raw().all(updateParams) as Array< - [string, number] - >; + const res = db.prepare(updateQuery).all<{ + id: string; + attempts: number; + }>(updateParams); if (Array.isArray(res)) { - const idToAttempts = new Map(res); + const idToAttempts = new Map( + res.map(({ id, attempts }) => [id, attempts]) + ); tasks = tasks.map(task => { const { id } = task; const attempts = idToAttempts.get(id) ?? task.attempts; @@ -2390,8 +2408,7 @@ export function saveMessage( } satisfies Omit; if (id && !forceSave) { - prepare( - db, + db.prepare( ` UPDATE messages SET ${MESSAGE_COLUMNS.map(name => `${name} = $${name}`).join(', ')} @@ -2408,8 +2425,7 @@ export function saveMessage( const createdId = id || generateMessageId(data.received_at).id; - prepare( - db, + db.prepare( ` INSERT INTO messages ( ${MESSAGE_COLUMNS.join(', ')} @@ -2475,11 +2491,11 @@ function saveMessagesIndividually( } function removeMessage(db: WritableDB, id: string): void { - db.prepare('DELETE FROM messages WHERE id = $id;').run({ id }); + db.prepare('DELETE FROM messages WHERE id = $id;').run({ id }); } function removeMessagesBatch(db: WritableDB, ids: ReadonlyArray): void { - db.prepare( + db.prepare( ` DELETE FROM messages WHERE id IN ( ${ids.map(() => '?').join(', ')} ); @@ -2496,14 +2512,14 @@ export function getMessageById( id: string ): MessageType | undefined { const row = db - .prepare( + .prepare( ` SELECT ${MESSAGE_COLUMNS.join(', ')} FROM messages WHERE id = $id; ` ) - .get({ + .get({ id, }); @@ -2522,7 +2538,7 @@ function getMessagesById( db, messageIds, (batch: ReadonlyArray): Array => { - const query = db.prepare( + const query = db.prepare( ` SELECT ${MESSAGE_COLUMNS.join(', ')} FROM messages @@ -2538,7 +2554,7 @@ function getMessagesById( function _getAllMessages(db: ReadableDB): Array { const rows: Array = db - .prepare( + .prepare( ` SELECT ${MESSAGE_COLUMNS.join(', ')} FROM messages ORDER BY id ASC @@ -2557,7 +2573,7 @@ function _removeAllMessages(db: WritableDB): void { function getAllMessageIds(db: ReadableDB): Array { const rows: Array<{ id: string }> = db - .prepare('SELECT id FROM messages ORDER BY id ASC;') + .prepare('SELECT id FROM messages ORDER BY id ASC;') .all(); return rows.map(row => row.id); @@ -2577,21 +2593,22 @@ function getMessageBySender( sent_at: number; } ): MessageType | undefined { - const rows: Array = prepare( - db, - ` + const rows: Array = db + .prepare( + ` SELECT ${MESSAGE_COLUMNS.join(', ')} FROM messages WHERE (source = $source OR sourceServiceId = $sourceServiceId) AND sourceDevice = $sourceDevice AND sent_at = $sent_at LIMIT 2; ` - ).all({ - source: source || null, - sourceServiceId: sourceServiceId || null, - sourceDevice: sourceDevice || null, - sent_at, - }); + ) + .all({ + source: source || null, + sourceServiceId: sourceServiceId || null, + sourceDevice: sourceDevice || null, + sent_at, + }); if (rows.length > 1) { logger.warn('getMessageBySender: More than one message found for', { @@ -2618,11 +2635,11 @@ export function _storyIdPredicate( // always be true. We don't just return TRUE because we want to use our passed-in // $storyId parameter. if (includeStoryReplies && storyId === undefined) { - return sqlFragment`${storyId} IS NULL`; + return sqlFragment`NULL IS NULL`; } // In contrast to: replies to a specific story - return sqlFragment`storyId IS ${storyId}`; + return sqlFragment`storyId IS ${storyId ?? null}`; } function getUnreadByConversationAndMarkRead( @@ -2682,7 +2699,9 @@ function getUnreadByConversationAndMarkRead( ORDER BY received_at DESC, sent_at DESC; `; - const rows = db.prepare(selectQuery).all(selectParams); + const rows = db + .prepare(selectQuery) + .all(selectParams); const statusJsonPatch = JSON.stringify({ readStatus: ReadStatus.Read, @@ -2738,7 +2757,7 @@ function getUnreadReactionsAndMarkRead( ): Array { return db.transaction(() => { const unreadMessages: Array = db - .prepare( + .prepare( ` SELECT reactions.rowid, targetAuthorAci, targetTimestamp, messageId FROM reactions @@ -2760,7 +2779,7 @@ function getUnreadReactionsAndMarkRead( const idsToUpdate = unreadMessages.map(item => item.rowid); batchMultiVarQuery(db, idsToUpdate, (ids: ReadonlyArray): void => { - db.prepare( + db.prepare( ` UPDATE reactions SET unread = 0 @@ -2792,7 +2811,7 @@ function markReactionAsRead( LIMIT 1; ` ) - .get({ + .get({ targetAuthorAci: targetAuthorServiceId, targetTimestamp, }); @@ -2904,10 +2923,10 @@ function removeReactionFromConversation( } function _getAllReactions(db: ReadableDB): Array { - return db.prepare('SELECT * from reactions;').all(); + return db.prepare('SELECT * from reactions;').all(); } function _removeAllReactions(db: WritableDB): void { - db.prepare('DELETE from reactions;').run(); + db.prepare('DELETE from reactions;').run(); } enum AdjacentDirection { @@ -2938,7 +2957,7 @@ function getRecentStoryReplies( ${sqlJoin(MESSAGE_COLUMNS_FRAGMENTS)} FROM messages WHERE - (${messageId} IS NULL OR id IS NOT ${messageId}) AND + (${messageId ?? null} IS NULL OR id IS NOT ${messageId ?? null}) AND isStory IS 0 AND storyId IS ${storyId} AND ( @@ -3002,7 +3021,7 @@ function getAdjacentMessagesByConversation( conversationId = ${conversationId} AND ${ requireDifferentMessage - ? sqlFragment`(${messageId} IS NULL OR id IS NOT ${messageId}) AND` + ? sqlFragment`(${messageId ?? null} IS NULL OR id IS NOT ${messageId ?? null}) AND` : sqlFragment`` } ${ @@ -3065,7 +3084,7 @@ function getAdjacentMessagesByConversation( const [query, params] = sql`${template}`; - const results = db.prepare(query).all(params); + const results = db.prepare(query).all(params); if (direction === AdjacentDirection.Older) { results.reverse(); @@ -3100,20 +3119,24 @@ function getAllStories( FROM messages WHERE isStory = 1 AND - (${conversationId} IS NULL OR conversationId IS ${conversationId}) AND - (${sourceServiceId} IS NULL OR sourceServiceId IS ${sourceServiceId}) + (${conversationId ?? null} IS NULL OR + conversationId IS ${conversationId ?? null}) AND + (${sourceServiceId ?? null} IS NULL OR + sourceServiceId IS ${sourceServiceId ?? null}) ORDER BY received_at ASC, sent_at ASC; `; - const rows = db.prepare(storiesQuery).all(storiesParams); + const rows = db + .prepare(storiesQuery) + .all(storiesParams); const [repliesQuery, repliesParams] = sql` SELECT DISTINCT storyId FROM messages WHERE storyId IS NOT NULL `; - const replies: ReadonlyArray<{ - storyId: string; - }> = db.prepare(repliesQuery).all(repliesParams); + const replies = db + .prepare(repliesQuery, { pluck: true }) + .all(repliesParams); const [repliesFromSelfQuery, repliesFromSelfParams] = sql` SELECT DISTINCT storyId @@ -3123,14 +3146,14 @@ function getAllStories( type IS 'outgoing' ) `; - const repliesFromSelf: ReadonlyArray<{ - storyId: string; - }> = db.prepare(repliesFromSelfQuery).all(repliesFromSelfParams); + const repliesFromSelf = db + .prepare(repliesFromSelfQuery, { + pluck: true, + }) + .all(repliesFromSelfParams); - const repliesLookup = new Set(replies.map(row => row.storyId)); - const repliesFromSelfLookup = new Set( - repliesFromSelf.map(row => row.storyId) - ); + const repliesLookup = new Set(replies); + const repliesFromSelfLookup = new Set(repliesFromSelf); return rows.map(row => ({ ...hydrateMessage(row), @@ -3169,7 +3192,11 @@ function getOldestMessageForConversation( LIMIT 1; `; - const row = db.prepare(query).get(params); + const row = db.prepare(query).get<{ + received_at: number; + sent_at: number; + id: string; + }>(params); if (!row) { return undefined; @@ -3196,7 +3223,11 @@ function getNewestMessageForConversation( ORDER BY received_at DESC, sent_at DESC LIMIT 1; `; - const row = db.prepare(query).get(params); + const row = db.prepare(query).get<{ + received_at: number; + sent_at: number; + id: string; + }>(params); if (!row) { return undefined; @@ -3239,7 +3270,7 @@ function getMessagesBetween( ORDER BY received_at ASC, sent_at ASC; `; - const rows = db.prepare(query).all(params); + const rows = db.prepare(query).all<{ id: string }>(params); return rows.map(row => row.id); } @@ -3279,7 +3310,11 @@ function getNearbyMessageFromDeletedSet( LIMIT 1 `; - return db.prepare(query).pluck().get(params); + return db + .prepare(query, { + pluck: true, + }) + .get(params); } const after = runQuery(true); @@ -3305,9 +3340,9 @@ function getLastConversationActivity( includeStoryReplies: boolean; } ): MessageType | undefined { - const row = prepare( - db, - ` + const row = db + .prepare( + ` SELECT ${MESSAGE_COLUMNS.join(', ')} FROM messages INDEXED BY messages_activity WHERE @@ -3319,9 +3354,10 @@ function getLastConversationActivity( ORDER BY received_at DESC, sent_at DESC LIMIT 1; ` - ).get({ - conversationId, - }); + ) + .get({ + conversationId, + }); if (!row) { return undefined; @@ -3343,9 +3379,9 @@ function getLastConversationPreview( ? 'messages_preview' : 'messages_preview_without_story'; - const row: MessageTypeUnhydrated | undefined = prepare( - db, - ` + const row: MessageTypeUnhydrated | undefined = db + .prepare( + ` SELECT ${MESSAGE_COLUMNS.join(', ')}, expiresAt FROM ( SELECT ${MESSAGE_COLUMNS.join(', ')}, expiresAt FROM messages INDEXED BY ${index} @@ -3359,10 +3395,11 @@ function getLastConversationPreview( WHERE likely(expiresAt > $now) LIMIT 1 ` - ).get({ - conversationId, - now: Date.now(), - }); + ) + .get({ + conversationId, + now: Date.now(), + }); return row ? hydrateMessage(row) : undefined; } @@ -3401,7 +3438,7 @@ function getLastConversationMessage( } ): MessageType | undefined { const row = db - .prepare( + .prepare( ` SELECT ${MESSAGE_COLUMNS.join(', ')} FROM messages WHERE conversationId = $conversationId @@ -3409,7 +3446,7 @@ function getLastConversationMessage( LIMIT 1; ` ) - .get({ + .get({ conversationId, }); @@ -3441,7 +3478,9 @@ function getOldestUnseenMessageForConversation( LIMIT 1; `; - const row = db.prepare(query).get(params); + const row = db + .prepare(query) + .get<{ received_at: number; sent_at: number; id: string }>(params); if (!row) { return undefined; @@ -3492,9 +3531,13 @@ function getTotalUnreadForConversation( isStory IS 0 AND (${_storyIdPredicate(storyId, includeStoryReplies)}) `; - const row = db.prepare(query).pluck().get(params); + const row = db + .prepare(query, { + pluck: true, + }) + .get(params); - return row; + return row ?? 0; } function getTotalUnreadMentionsOfMeForConversation( db: ReadableDB, @@ -3517,9 +3560,13 @@ function getTotalUnreadMentionsOfMeForConversation( isStory IS 0 AND (${_storyIdPredicate(storyId, includeStoryReplies)}) `; - const row = db.prepare(query).pluck().get(params); + const row = db + .prepare(query, { + pluck: true, + }) + .get(params); - return row; + return row ?? 0; } function getTotalUnseenForConversation( db: ReadableDB, @@ -3541,9 +3588,13 @@ function getTotalUnseenForConversation( isStory IS 0 AND (${_storyIdPredicate(storyId, includeStoryReplies)}) `; - const row = db.prepare(query).pluck().get(params); + const row = db + .prepare(query, { + pluck: true, + }) + .get(params); - return row; + return row ?? 0; } function getMessageMetricsForConversation( @@ -3639,8 +3690,9 @@ function clearCallHistory( `; const adminCallLinkIds: ReadonlyArray = db - .prepare(selectAdminCallLinksQuery) - .pluck() + .prepare(selectAdminCallLinksQuery, { + pluck: true, + }) .all(selectAdminCallLinksParams); const adminCallLinkIdsFragment = sqlJoin(adminCallLinkIds); @@ -3661,8 +3713,9 @@ function clearCallHistory( `; const deletedCallIds: ReadonlyArray = db - .prepare(selectCallsQuery) - .pluck() + .prepare(selectCallsQuery, { + pluck: true, + }) .all(selectCallsParams); let deletedMessageIds: ReadonlyArray = []; @@ -3688,9 +3741,10 @@ function clearCallHistory( `; const batchDeletedMessageIds = db - .prepare(deleteMessagesQuery) - .pluck() - .all(deleteMessagesParams); + .prepare(deleteMessagesQuery, { + pluck: true, + }) + .all(deleteMessagesParams); deletedMessageIds = deletedMessageIds.concat(batchDeletedMessageIds); }); @@ -3739,7 +3793,7 @@ function getCallHistoryMessageByCallId( AND type = 'call-history' AND callId = ${options.callId} `; - const row = db.prepare(query).get(params); + const row = db.prepare(query).get(params); if (row == null) { return; } @@ -3785,8 +3839,12 @@ function getCallHistoryUnreadCount(db: ReadableDB): number { AND callsHistory.status IS ${CALL_STATUS_MISSED} AND callsHistory.direction IS ${CALL_STATUS_INCOMING} `; - const row = db.prepare(query).pluck().get(params); - return row; + const row = db + .prepare(query, { + pluck: true, + }) + .get(params); + return row ?? 0; } function markCallHistoryRead(db: WritableDB, callId: string): void { @@ -3904,8 +3962,9 @@ function getConversationIdForCallHistory( `; const conversationId = db - .prepare(selectConversationIdQuery) - .pluck() + .prepare(selectConversationIdQuery, { + pluck: true, + }) .get(selectConversationIdParams); if (typeof conversationId !== 'string') { @@ -3920,7 +3979,7 @@ function getMessageReceivedAtForCall( db: ReadableDB, callId: string, conversationId: string -): number | null { +): number | undefined { const [selectQuery, selectParams] = sql` SELECT messages.received_at FROM messages @@ -3930,12 +3989,17 @@ function getMessageReceivedAtForCall( LIMIT 1 `; - const receivedAt = db.prepare(selectQuery).pluck().get(selectParams); + const receivedAt = db + .prepare(selectQuery, { + pluck: true, + }) + .get(selectParams); if (receivedAt == null) { logger.warn('getMessageReceivedAtForCall: Target call message not found'); + return undefined; } - return receivedAt ?? null; + return receivedAt; } export function markAllCallHistoryRead( @@ -3958,7 +4022,7 @@ export function markAllCallHistoryRead( ); let predicate: QueryFragment; - let receivedAt: number | null; + let receivedAt: number | undefined; if (callHistory.mode === CallMode.Adhoc) { // If the target is a call link, there's no associated conversation and messages, // and we can only mark call history read based on timestamp. @@ -4223,7 +4287,11 @@ function getCallHistoryGroupData( `; const result = isCount - ? db.prepare(query).pluck(true).get(params) + ? db + .prepare(query, { + pluck: true, + }) + .get(params) : db.prepare(query).all(params); if (isUsingTempTable) { @@ -4358,8 +4426,8 @@ function hasGroupCallHistoryMessage( conversationId: string, eraId: string ): boolean { - const exists: number = db - .prepare( + const exists = db + .prepare( ` SELECT EXISTS( SELECT 1 FROM messages @@ -4368,15 +4436,17 @@ function hasGroupCallHistoryMessage( AND json_extract(json, '$.callHistoryDetails.callMode') = 'Group' AND json_extract(json, '$.callHistoryDetails.eraId') = $eraId ); - ` + `, + { + pluck: true, + } ) - .pluck() - .get({ + .get({ conversationId, eraId, }); - return exists !== 0; + return exists === 1; } function _markCallHistoryMissed( @@ -4416,7 +4486,10 @@ function getRecentStaleRingsAndMarkOlderMissed( ORDER BY timestamp DESC `; - const ringingCalls = db.prepare(selectQuery).all(selectParams); + const ringingCalls = db.prepare(selectQuery).all<{ + callId: string; + peerId: string; + }>(selectParams); const seen = new Set(); const [latestCalls, pastCalls] = partition(ringingCalls, result => { @@ -4548,7 +4621,7 @@ function getMessagesBySentAt( ORDER BY messages.received_at DESC, messages.sent_at DESC; `; - const rows = db.prepare(query).all(params); + const rows = db.prepare(query).all(params); return rows.map(row => hydrateMessage(row)); } @@ -4557,7 +4630,7 @@ function getExpiredMessages(db: ReadableDB): Array { const now = Date.now(); const rows: Array = db - .prepare( + .prepare( ` SELECT ${MESSAGE_COLUMNS.join(', ')}, expiresAt FROM messages @@ -4575,7 +4648,7 @@ function getMessagesUnexpectedlyMissingExpirationStartTimestamp( db: ReadableDB ): Array { const rows: Array = db - .prepare( + .prepare( ` SELECT ${MESSAGE_COLUMNS.join(', ')} FROM messages INDEXED BY messages_unexpectedly_missing_expiration_start_timestamp @@ -4599,15 +4672,17 @@ function getMessagesUnexpectedlyMissingExpirationStartTimestamp( function getSoonestMessageExpiry(db: ReadableDB): undefined | number { // Note: we use `pluck` to only get the first column. - const result: null | number = db - .prepare( + const result = db + .prepare( ` SELECT MIN(expiresAt) FROM messages; - ` + `, + { + pluck: true, + } ) - .pluck(true) - .get(); + .get(); if (result != null && result >= Number.MAX_SAFE_INTEGER) { return undefined; @@ -4620,7 +4695,7 @@ function getNextTapToViewMessageTimestampToAgeOut( db: ReadableDB ): undefined | number { const row = db - .prepare( + .prepare( ` SELECT ${MESSAGE_COLUMNS.join(', ')} FROM messages WHERE @@ -4631,7 +4706,7 @@ function getNextTapToViewMessageTimestampToAgeOut( LIMIT 1; ` ) - .get(); + .get(); if (!row) { return undefined; @@ -4646,7 +4721,7 @@ function getTapToViewMessagesNeedingErase( maxTimestamp: number ): Array { const rows: Array = db - .prepare( + .prepare( ` SELECT ${MESSAGE_COLUMNS.join(', ')} FROM messages @@ -4695,8 +4770,7 @@ function saveUnprocessed(db: WritableDB, data: UnprocessedType): string { throw new Error('saveUnprocessed: id was falsey'); } - prepare( - db, + db.prepare( ` INSERT OR REPLACE INTO unprocessed ( id, @@ -4771,23 +4845,6 @@ function saveUnprocessed(db: WritableDB, data: UnprocessedType): string { return id; } -function getUnprocessedById( - db: ReadableDB, - id: string -): UnprocessedType | undefined { - const row = db - .prepare('SELECT * FROM unprocessed WHERE id = $id;') - .get({ - id, - }); - - return { - ...row, - urgent: isNumber(row.urgent) ? Boolean(row.urgent) : true, - story: Boolean(row.story), - }; -} - function getUnprocessedCount(db: ReadableDB): number { return getCountFromTable(db, 'unprocessed'); } @@ -4796,7 +4853,7 @@ function getAllUnprocessedIds(db: WritableDB): Array { return db.transaction(() => { // cleanup first const { changes: deletedStaleCount } = db - .prepare( + .prepare( 'DELETE FROM unprocessed WHERE receivedAtDate < $messageQueueCutoff' ) .run({ @@ -4811,7 +4868,7 @@ function getAllUnprocessedIds(db: WritableDB): Array { } const { changes: deletedInvalidCount } = db - .prepare( + .prepare( ` DELETE FROM unprocessed WHERE attempts >= $MAX_UNPROCESSED_ATTEMPTS @@ -4827,15 +4884,17 @@ function getAllUnprocessedIds(db: WritableDB): Array { } return db - .prepare( - ` - SELECT id - FROM unprocessed - ORDER BY receivedAtCounter ASC + .prepare( ` + SELECT id + FROM unprocessed + ORDER BY receivedAtCounter ASC + `, + { + pluck: true, + } ) - .pluck() - .all(); + .all(); })(); } @@ -4849,7 +4908,7 @@ function getUnprocessedByIdsAndIncrementAttempts( batchMultiVarQuery(db, ids, batch => { return db - .prepare( + .prepare( ` UPDATE unprocessed SET attempts = attempts + 1 @@ -4861,7 +4920,7 @@ function getUnprocessedByIdsAndIncrementAttempts( return batchMultiVarQuery(db, ids, batch => { return db - .prepare( + .prepare( ` SELECT * FROM unprocessed @@ -4880,7 +4939,7 @@ function getUnprocessedByIdsAndIncrementAttempts( } function removeUnprocesseds(db: WritableDB, ids: ReadonlyArray): void { - db.prepare( + db.prepare( ` DELETE FROM unprocessed WHERE id IN ( ${ids.map(() => '?').join(', ')} ); @@ -4890,7 +4949,7 @@ function removeUnprocesseds(db: WritableDB, ids: ReadonlyArray): void { function removeUnprocessed(db: WritableDB, id: string | Array): void { if (!Array.isArray(id)) { - prepare(db, 'DELETE FROM unprocessed WHERE id = $id;').run({ id }); + db.prepare('DELETE FROM unprocessed WHERE id = $id;').run({ id }); return; } @@ -4905,18 +4964,18 @@ function removeUnprocessed(db: WritableDB, id: string | Array): void { } function removeAllUnprocessed(db: WritableDB): void { - db.prepare('DELETE FROM unprocessed;').run(); + db.prepare('DELETE FROM unprocessed;').run(); } // Attachment Downloads -function getAttachmentDownloadJob( +function _getAttachmentDownloadJob( db: ReadableDB, job: Pick< AttachmentDownloadJobType, 'messageId' | 'attachmentType' | 'digest' > -): AttachmentDownloadJobType { +): AttachmentDownloadJobType | undefined { const [query, params] = sql` SELECT * FROM attachment_downloads WHERE @@ -4927,7 +4986,17 @@ function getAttachmentDownloadJob( digest = ${job.digest}; `; - return db.prepare(query).get(params); + const row = db.prepare(query).get(params); + if (row === undefined) { + return undefined; + } + const { attachmentJson, ...fields } = row; + return parseUnknown(attachmentDownloadJobSchema, { + ...fields, + active: Boolean(row.active), + attachment: JSON.parse(attachmentJson), + ciphertextSize: row.ciphertextSize || 0, + } as unknown); } function removeAllBackupAttachmentDownloadJobs(db: WritableDB): void { @@ -4941,7 +5010,13 @@ function getSizeOfPendingBackupAttachmentDownloadJobs(db: ReadableDB): number { const [query, params] = sql` SELECT SUM(ciphertextSize) FROM attachment_downloads WHERE source = ${AttachmentDownloadSource.BACKUP_IMPORT};`; - return db.prepare(query).pluck().get(params); + return ( + db + .prepare(query, { + pluck: true, + }) + .get(params) ?? 0 + ); } function getNextAttachmentDownloadJobs( @@ -4960,7 +5035,7 @@ function getNextAttachmentDownloadJobs( maxLastAttemptForPrioritizedMessages?: number; } ): Array { - let priorityJobs = []; + let priorityJobs = new Array(); const sourceWhereFragment = sources ? sqlFragment` @@ -4998,7 +5073,7 @@ function getNextAttachmentDownloadJobs( // Next, get any other jobs, sorted by receivedAt const numJobsRemaining = limit - priorityJobs.length; - let standardJobs = []; + let standardJobs: typeof priorityJobs = []; if (numJobsRemaining > 0) { const [query, params] = sql` SELECT * FROM attachment_downloads @@ -5099,7 +5174,7 @@ function saveAttachmentDownloadJob( } function resetAttachmentDownloadActive(db: WritableDB): void { - db.prepare( + db.prepare( ` UPDATE attachment_downloads SET active = 0 @@ -5110,7 +5185,7 @@ function resetAttachmentDownloadActive(db: WritableDB): void { function removeAttachmentDownloadJob( db: WritableDB, - job: AttachmentDownloadJobType + job: Pick ): void { const [query, params] = sql` DELETE FROM attachment_downloads @@ -5144,7 +5219,7 @@ function clearAllAttachmentBackupJobs(db: WritableDB): void { } function markAllAttachmentBackupJobsInactive(db: WritableDB): void { - db.prepare( + db.prepare( ` UPDATE attachment_backup_jobs SET active = 0; @@ -5201,7 +5276,11 @@ function getNextAttachmentBackupJobs( type ASC, receivedAt DESC LIMIT ${limit} `; - const rows = db.prepare(query).all(params); + const rows = db.prepare(query).all<{ + mediaName: string; + data: string; + active: number; + }>(params); return rows .map(row => { const parseResult = safeParseUnknown(attachmentBackupJobSchema, { @@ -5303,7 +5382,7 @@ function createOrUpdateStickerPack( } const row = db - .prepare( + .prepare( ` SELECT id FROM sticker_packs @@ -5327,7 +5406,7 @@ function createOrUpdateStickerPack( }; if (row) { - db.prepare( + db.prepare( ` UPDATE sticker_packs SET attemptedStatus = $attemptedStatus, @@ -5353,17 +5432,19 @@ function createOrUpdateStickerPack( // Assign default position when inserting a row if (!isNumber(position)) { position = db - .prepare( - ` - SELECT IFNULL(MAX(position) + 1, 0) - FROM sticker_packs + .prepare( ` + SELECT IFNULL(MAX(position) + 1, 0) + FROM sticker_packs + `, + { + pluck: true, + } ) - .pluck() .get(); } - db.prepare( + db.prepare( ` INSERT INTO sticker_packs ( attemptedStatus, @@ -5421,7 +5502,12 @@ function updateStickerPackStatus( SELECT status FROM sticker_packs WHERE id IS ${id}; `; - const oldStatus = db.prepare(select).pluck().get(selectParams); + const oldStatus = + db + .prepare(select, { + pluck: true, + }) + .get(selectParams) ?? null; const [update, updateParams] = sql` UPDATE sticker_packs @@ -5447,7 +5533,7 @@ function updateStickerPackInfo( }: StickerPackInfoType ): void { if (uninstalledAt) { - db.prepare( + db.prepare( ` UPDATE uninstalled_sticker_packs SET @@ -5465,7 +5551,7 @@ function updateStickerPackInfo( storageNeedsSync: storageNeedsSync ? 1 : 0, }); } else { - db.prepare( + db.prepare( ` UPDATE sticker_packs SET @@ -5487,7 +5573,7 @@ function updateStickerPackInfo( } } function clearAllErrorStickerPackAttempts(db: WritableDB): void { - db.prepare( + db.prepare( ` UPDATE sticker_packs SET downloadAttempts = 0 @@ -5521,7 +5607,7 @@ function createOrUpdateSticker(db: WritableDB, sticker: StickerType): void { ); } - db.prepare( + db.prepare( ` INSERT OR REPLACE INTO stickers ( emoji, @@ -5579,7 +5665,7 @@ function updateStickerLastUsed( stickerId: number, lastUsed: number ): void { - db.prepare( + db.prepare( ` UPDATE stickers SET lastUsed = $lastUsed @@ -5590,7 +5676,7 @@ function updateStickerLastUsed( packId, lastUsed, }); - db.prepare( + db.prepare( ` UPDATE sticker_packs SET lastUsed = $lastUsed @@ -5616,8 +5702,7 @@ function addStickerPackReference( ); } - prepare( - db, + db.prepare( ` INSERT OR REPLACE INTO sticker_references ( messageId, @@ -5654,7 +5739,7 @@ function deleteStickerPackReference( // 4. If it's not installed, then grab all of its sticker paths // 5. If it's not installed, then sticker pack (which cascades to all // stickers and references) - db.prepare( + db.prepare( ` DELETE FROM sticker_references WHERE messageId = $messageId AND packId = $packId; @@ -5664,27 +5749,30 @@ function deleteStickerPackReference( packId, }); - const count = db - .prepare( - ` - SELECT count(1) FROM sticker_references - WHERE packId = $packId; + const count = + db + .prepare( ` - ) - .pluck() - .get({ packId }); + SELECT count(1) FROM sticker_references + WHERE packId = $packId; + `, + { + pluck: true, + } + ) + .get({ packId }) ?? 0; if (count > 0) { return undefined; } - const packRow: { status: StickerPackStatusType } = db - .prepare( + const packRow = db + .prepare( ` SELECT status FROM sticker_packs WHERE id = $packId; ` ) - .get({ packId }); + .get<{ status: StickerPackStatusType }>({ packId }); if (!packRow) { logger.warn('deleteStickerPackReference: did not find referenced pack'); return undefined; @@ -5696,7 +5784,7 @@ function deleteStickerPackReference( } const stickerPathRows: Array<{ path: string }> = db - .prepare( + .prepare( ` SELECT path FROM stickers WHERE packId = $packId; @@ -5705,7 +5793,7 @@ function deleteStickerPackReference( .all({ packId, }); - db.prepare( + db.prepare( ` DELETE FROM sticker_packs WHERE id = $packId; @@ -5728,7 +5816,9 @@ function getUnresolvedStickerPackReferences( WHERE packId IS ${packId} AND isUnresolved IS 1 RETURNING messageId, stickerId; `; - const rows = db.prepare(query).all(params); + const rows = db + .prepare(query) + .all<{ messageId: string; stickerId: number }>(params); return rows.map(({ messageId, stickerId }) => ({ messageId, @@ -5756,7 +5846,7 @@ function deleteStickerPack(db: WritableDB, packId: string): Array { // 2. Delete sticker pack (which cascades to all stickers and references) const stickerPathRows: Array<{ path: string }> = db - .prepare( + .prepare( ` SELECT path FROM stickers WHERE packId = $packId; @@ -5765,7 +5855,7 @@ function deleteStickerPack(db: WritableDB, packId: string): Array { .all({ packId, }); - db.prepare( + db.prepare( ` DELETE FROM sticker_packs WHERE id = $packId; @@ -5781,17 +5871,20 @@ function getStickerCount(db: ReadableDB): number { } function getAllStickerPacks(db: ReadableDB): Array { const rows = db - .prepare( + .prepare( ` SELECT * FROM sticker_packs ORDER BY position ASC, id ASC ` ) - .all(); + .all(); return rows.map(row => { return { ...row, + storageNeedsSync: row.storageNeedsSync === 1, + stickers: {}, + // The columns have STRING type so if they have numeric value, sqlite // will return integers. author: String(row.author), @@ -5803,7 +5896,7 @@ function addUninstalledStickerPack( db: WritableDB, pack: UninstalledStickerPackType ): void { - db.prepare( + db.prepare( ` INSERT OR REPLACE INTO uninstalled_sticker_packs ( @@ -5845,18 +5938,19 @@ function getUninstalledStickerPacks( db: ReadableDB ): Array { const rows = db - .prepare( - 'SELECT * FROM uninstalled_sticker_packs ORDER BY id ASC' - ) - .all(); + .prepare('SELECT * FROM uninstalled_sticker_packs ORDER BY id ASC') + .all(); - return rows || []; + return rows.map(row => ({ + ...row, + storageNeedsSync: row.storageNeedsSync === 1, + })); } function getInstalledStickerPacks(db: ReadableDB): Array { // If sticker pack has a storageID - it is being downloaded and about to be // installed so we better sync it back to storage service if asked. const rows = db - .prepare( + .prepare( ` SELECT * FROM sticker_packs @@ -5866,9 +5960,13 @@ function getInstalledStickerPacks(db: ReadableDB): Array { ORDER BY id ASC ` ) - .all(); + .all(); - return rows || []; + return rows.map(row => ({ + ...row, + storageNeedsSync: row.storageNeedsSync === 1, + stickers: {}, + })); } function getStickerPackInfo( db: ReadableDB, @@ -5876,19 +5974,24 @@ function getStickerPackInfo( ): StickerPackInfoType | undefined { return db.transaction(() => { const uninstalled = db - .prepare( + .prepare( ` SELECT * FROM uninstalled_sticker_packs WHERE id IS $packId ` ) - .get({ packId }); + .get({ packId }); if (uninstalled) { - return uninstalled as UninstalledStickerPackType; + return { + ...uninstalled, + storageNeedsSync: uninstalled.storageNeedsSync === 1, + key: undefined, + position: undefined, + }; } const installed = db - .prepare( + .prepare( ` SELECT id, key, position, storageID, storageVersion, storageUnknownFields @@ -5896,9 +5999,13 @@ function getStickerPackInfo( WHERE id IS $packId ` ) - .get({ packId }); + .get({ packId }); if (installed) { - return installed as InstalledStickerPackType; + return { + ...installed, + storageNeedsSync: installed.storageNeedsSync === 1, + uninstalledAt: undefined, + }; } return undefined; @@ -5963,13 +6070,13 @@ function uninstallStickerPack( } function getAllStickers(db: ReadableDB): Array { const rows = db - .prepare( + .prepare( ` SELECT * FROM stickers ORDER BY packId ASC, id ASC ` ) - .all(); + .all(); return (rows || []).map(row => rowToSticker(row)); } @@ -5979,7 +6086,7 @@ function getRecentStickers( ): Array { // Note: we avoid 'IS NOT NULL' here because it does seem to bypass our index const rows = db - .prepare( + .prepare( ` SELECT stickers.* FROM stickers JOIN sticker_packs on stickers.packId = sticker_packs.id @@ -5988,7 +6095,7 @@ function getRecentStickers( LIMIT $limit ` ) - .all({ + .all({ limit: limit || 24, }); @@ -6003,7 +6110,7 @@ function updateEmojiUsage( ): void { db.transaction(() => { const rows = db - .prepare( + .prepare( ` SELECT * FROM emojis WHERE shortName = $shortName; @@ -6014,7 +6121,7 @@ function updateEmojiUsage( }); if (rows) { - db.prepare( + db.prepare( ` UPDATE emojis SET lastUsage = $timeUsed @@ -6022,7 +6129,7 @@ function updateEmojiUsage( ` ).run({ shortName, timeUsed }); } else { - db.prepare( + db.prepare( ` INSERT INTO emojis(shortName, lastUsage) VALUES ($shortName, $timeUsed); @@ -6034,7 +6141,7 @@ function updateEmojiUsage( function getRecentEmojis(db: ReadableDB, limit = 32): Array { const rows = db - .prepare( + .prepare( ` SELECT * FROM emojis @@ -6042,41 +6149,52 @@ function getRecentEmojis(db: ReadableDB, limit = 32): Array { LIMIT $limit; ` ) - .all({ limit }); + .all({ limit }); return rows || []; } function getAllBadges(db: ReadableDB): Array { - const [badgeRows, badgeImageFileRows] = db.transaction(() => [ - db.prepare('SELECT * FROM badges').all(), - db.prepare('SELECT * FROM badgeImageFiles').all(), - ])(); + return db.transaction(() => { + const badgeRows = db.prepare('SELECT * FROM badges').all<{ + id: string; + category: string; + name: string; + descriptionTemplate: string; + }>(); + const badgeImageFileRows = db.prepare('SELECT * FROM badgeImageFiles').all<{ + badgeId: string; + order: number; + url: string; + localPath: string; + theme: BadgeImageTheme; + }>(); - const badgeImagesByBadge = new Map< - string, - Array - >(); - for (const badgeImageFileRow of badgeImageFileRows) { - const { badgeId, order, localPath, url, theme } = badgeImageFileRow; - const badgeImages = badgeImagesByBadge.get(badgeId) || []; - badgeImages[order] = { - ...(badgeImages[order] || {}), - [parseBadgeImageTheme(theme)]: { - localPath: dropNull(localPath), - url, - }, - }; - badgeImagesByBadge.set(badgeId, badgeImages); - } + const badgeImagesByBadge = new Map< + string, + Array + >(); + for (const badgeImageFileRow of badgeImageFileRows) { + const { badgeId, order, localPath, url, theme } = badgeImageFileRow; + const badgeImages = badgeImagesByBadge.get(badgeId) || []; + badgeImages[order] = { + ...(badgeImages[order] || {}), + [parseBadgeImageTheme(theme)]: { + localPath: dropNull(localPath), + url, + }, + }; + badgeImagesByBadge.set(badgeId, badgeImages); + } - return badgeRows.map(badgeRow => ({ - id: badgeRow.id, - category: parseBadgeCategory(badgeRow.category), - name: badgeRow.name, - descriptionTemplate: badgeRow.descriptionTemplate, - images: (badgeImagesByBadge.get(badgeRow.id) || []).filter(isNotNil), - })); + return badgeRows.map(badgeRow => ({ + id: badgeRow.id, + category: parseBadgeCategory(badgeRow.category), + name: badgeRow.name, + descriptionTemplate: badgeRow.descriptionTemplate, + images: (badgeImagesByBadge.get(badgeRow.id) || []).filter(isNotNil), + })); + })(); } // This should match the logic in the badges Redux reducer. @@ -6084,8 +6202,7 @@ function updateOrCreateBadges( db: WritableDB, badges: ReadonlyArray ): void { - const insertBadge = prepare( - db, + const insertBadge = db.prepare( ` INSERT OR REPLACE INTO badges ( id, @@ -6100,12 +6217,10 @@ function updateOrCreateBadges( ); ` ); - const getImageFilesForBadge = prepare( - db, + const getImageFilesForBadge = db.prepare( 'SELECT url, localPath FROM badgeImageFiles WHERE badgeId = $badgeId' ); - const insertBadgeImageFile = prepare( - db, + const insertBadgeImageFile = db.prepare( ` INSERT INTO badgeImageFiles ( badgeId, @@ -6128,7 +6243,10 @@ function updateOrCreateBadges( const { id: badgeId } = badge; const oldLocalPaths = new Map(); - for (const { url, localPath } of getImageFilesForBadge.all({ badgeId })) { + for (const { url, localPath } of getImageFilesForBadge.all<{ + url: string; + localPath: string; + }>({ badgeId })) { if (localPath) { oldLocalPaths.set(url, localPath); } @@ -6162,19 +6280,20 @@ function badgeImageFileDownloaded( url: string, localPath: string ): void { - prepare( - db, + db.prepare( 'UPDATE badgeImageFiles SET localPath = $localPath WHERE url = $url' ).run({ url, localPath }); } function getAllBadgeImageFileLocalPaths(db: ReadableDB): Set { const localPaths = db - .prepare( - 'SELECT localPath FROM badgeImageFiles WHERE localPath IS NOT NULL' + .prepare( + 'SELECT localPath FROM badgeImageFiles WHERE localPath IS NOT NULL', + { + pluck: true, + } ) - .pluck() - .all(); + .all(); return new Set(localPaths); } @@ -6288,20 +6407,18 @@ function _getAllStoryDistributions( db: ReadableDB ): Array { const storyDistributions = db - .prepare('SELECT * FROM storyDistributions;') - .all(); + .prepare('SELECT * FROM storyDistributions;') + .all(); return storyDistributions.map(hydrateStoryDistribution); } function _getAllStoryDistributionMembers( db: ReadableDB ): Array { - return db - .prepare('SELECT * FROM storyDistributionMembers;') - .all(); + return db.prepare('SELECT * FROM storyDistributionMembers;').all(); } function _deleteAllStoryDistributions(db: WritableDB): void { - db.prepare('DELETE FROM storyDistributions;').run(); + db.prepare('DELETE FROM storyDistributions;').run(); } function createNewStoryDistribution( db: WritableDB, @@ -6315,8 +6432,7 @@ function createNewStoryDistribution( db.transaction(() => { const payload = freezeStoryDistribution(distribution); - prepare( - db, + db.prepare( ` INSERT INTO storyDistributions( id, @@ -6346,8 +6462,7 @@ function createNewStoryDistribution( const { id: listId, members } = distribution; - const memberInsertStatement = prepare( - db, + const memberInsertStatement = db.prepare( ` INSERT OR REPLACE INTO storyDistributionMembers ( listId, @@ -6384,23 +6499,23 @@ function getStoryDistributionWithMembers( db: ReadableDB, id: string ): StoryDistributionWithMembersType | undefined { - const storyDistribution: StoryDistributionForDatabase | undefined = prepare( - db, - 'SELECT * FROM storyDistributions WHERE id = $id;' - ).get({ - id, - }); + const storyDistribution = db + .prepare('SELECT * FROM storyDistributions WHERE id = $id;') + .get({ + id, + }); if (!storyDistribution) { return undefined; } - const members = prepare( - db, - 'SELECT * FROM storyDistributionMembers WHERE listId = $id;' - ).all({ - id, - }); + const members = db + .prepare( + 'SELECT serviceId FROM storyDistributionMembers WHERE listId = $id;' + ) + .all<{ serviceId: ServiceIdString }>({ + id, + }); return { ...hydrateStoryDistribution(storyDistribution), @@ -6425,8 +6540,7 @@ function modifyStoryDistribution( ); } - prepare( - db, + db.prepare( ` UPDATE storyDistributions SET @@ -6451,8 +6565,7 @@ function modifyStoryDistributionMembers( toRemove, }: { toAdd: Array; toRemove: Array } ): void { - const memberInsertStatement = prepare( - db, + const memberInsertStatement = db.prepare( ` INSERT OR REPLACE INTO storyDistributionMembers ( listId, @@ -6505,20 +6618,19 @@ function deleteStoryDistribution( db: WritableDB, id: StoryDistributionIdString ): void { - db.prepare('DELETE FROM storyDistributions WHERE id = $id;').run({ + db.prepare('DELETE FROM storyDistributions WHERE id = $id;').run({ id, }); } function _getAllStoryReads(db: ReadableDB): Array { - return db.prepare('SELECT * FROM storyReads;').all(); + return db.prepare('SELECT * FROM storyReads;').all(); } function _deleteAllStoryReads(db: WritableDB): void { - db.prepare('DELETE FROM storyReads;').run(); + db.prepare('DELETE FROM storyReads;').run(); } function addNewStoryRead(db: WritableDB, read: StoryReadType): void { - prepare( - db, + db.prepare( ` INSERT OR REPLACE INTO storyReads( authorId, @@ -6549,7 +6661,7 @@ function getLastStoryReadsForAuthor( const limit = initialLimit || 5; return db - .prepare( + .prepare( ` SELECT * FROM storyReads WHERE @@ -6570,15 +6682,19 @@ function countStoryReadsByConversation( db: ReadableDB, conversationId: string ): number { - return db - .prepare( - ` + return ( + db + .prepare( + ` SELECT count(1) FROM storyReads WHERE conversationId = $conversationId; - ` - ) - .pluck() - .get({ conversationId }); + `, + { + pluck: true, + } + ) + .get({ conversationId }) ?? 0 + ); } // All data in database @@ -6669,8 +6785,9 @@ function removeAllConfiguration(db: WritableDB): void { ); const itemIds: ReadonlyArray = db - .prepare('SELECT id FROM items') - .pluck(true) + .prepare('SELECT id FROM items', { + pluck: true, + }) .all(); const allowedSet = new Set(STORAGE_UI_KEYS); @@ -6746,7 +6863,7 @@ function getMessagesNeedingUpgrade( { maxVersion }: { maxVersion: number } ): Array { const rows: Array = db - .prepare( + .prepare( ` SELECT ${MESSAGE_COLUMNS.join(', ')} FROM messages @@ -6798,18 +6915,20 @@ function getMessageServerGuidsForSpam( // The server's maximum is 3, which is why you see `LIMIT 3` in this query. Note that we // use `pluck` here to only get the first column! return db - .prepare( - ` - SELECT serverGuid - FROM messages - WHERE conversationId = $conversationId - AND type = 'incoming' - AND serverGuid IS NOT NULL - ORDER BY received_at DESC, sent_at DESC - LIMIT 3; + .prepare( ` + SELECT serverGuid + FROM messages + WHERE conversationId = $conversationId + AND type = 'incoming' + AND serverGuid IS NOT NULL + ORDER BY received_at DESC, sent_at DESC + LIMIT 3; + `, + { + pluck: true, + } ) - .pluck(true) .all({ conversationId }); } @@ -7036,22 +7155,24 @@ function pageMessages( } const rowids: Array = writable - .prepare( - ` - DELETE FROM tmp_${runId}_updated_messages - RETURNING rowid - ORDER BY received_at ASC, sent_at ASC - LIMIT $chunkSize; + .prepare( ` + DELETE FROM tmp_${runId}_updated_messages + RETURNING rowid + ORDER BY received_at ASC, sent_at ASC + LIMIT $chunkSize; + `, + { + pluck: true, + } ) - .pluck() .all({ chunkSize }); const messages = batchMultiVarQuery( writable, rowids, (batch: ReadonlyArray): Array => { - const query = writable.prepare( + const query = writable.prepare( ` SELECT ${MESSAGE_COLUMNS.join(', ')} FROM messages @@ -7117,7 +7238,7 @@ function getKnownConversationAttachments(db: ReadableDB): Array { `${conversationTotal}` ); - const fetchConversations = db.prepare( + const fetchConversations = db.prepare( ` SELECT json FROM conversations WHERE id > $id @@ -7127,7 +7248,7 @@ function getKnownConversationAttachments(db: ReadableDB): Array { ); while (!complete) { - const rows = fetchConversations.all({ + const rows = fetchConversations.all<{ json: string }>({ id, chunkSize, }); @@ -7172,7 +7293,7 @@ function removeKnownStickers( while (!complete) { const rows: Array<{ rowid: number; path: string }> = db - .prepare( + .prepare( ` SELECT rowid, path FROM stickers WHERE rowid > $rowid @@ -7225,7 +7346,7 @@ function removeKnownDraftAttachments( while (!complete) { const rows: JSONRows = db - .prepare( + .prepare( ` SELECT json FROM conversations WHERE id > $id @@ -7268,7 +7389,7 @@ export function getJobsInQueue( queueType: string ): Array { return db - .prepare( + .prepare( ` SELECT id, timestamp, data FROM jobs @@ -7276,7 +7397,7 @@ export function getJobsInQueue( ORDER BY timestamp; ` ) - .all({ queueType }) + .all<{ id: string; timestamp: number; data: string }>({ queueType }) .map(row => ({ id: row.id, queueType, @@ -7286,7 +7407,7 @@ export function getJobsInQueue( } export function insertJob(db: WritableDB, job: Readonly): void { - db.prepare( + db.prepare( ` INSERT INTO jobs (id, queueType, timestamp, data) @@ -7302,40 +7423,46 @@ export function insertJob(db: WritableDB, job: Readonly): void { } function deleteJob(db: WritableDB, id: string): void { - db.prepare('DELETE FROM jobs WHERE id = $id').run({ id }); + db.prepare('DELETE FROM jobs WHERE id = $id').run({ id }); } function wasGroupCallRingPreviouslyCanceled( db: ReadableDB, ringId: bigint ): boolean { - return db - .prepare( - ` - SELECT EXISTS ( - SELECT 1 FROM groupCallRingCancellations - WHERE ringId = $ringId - AND createdAt >= $ringsOlderThanThisAreIgnored - ); - ` - ) - .pluck() - .get({ - ringId, - ringsOlderThanThisAreIgnored: Date.now() - MAX_GROUP_CALL_RING_AGE, - }); + return ( + db + .prepare( + ` + SELECT EXISTS ( + SELECT 1 FROM groupCallRingCancellations + WHERE ringId = $ringId + AND createdAt >= $ringsOlderThanThisAreIgnored + ); + `, + { + pluck: true, + bigint: true, + } + ) + .get({ + ringId, + ringsOlderThanThisAreIgnored: Date.now() - MAX_GROUP_CALL_RING_AGE, + }) === 1 + ); } function processGroupCallRingCancellation( db: WritableDB, ringId: bigint ): void { - db.prepare( + db.prepare( ` INSERT INTO groupCallRingCancellations (ringId, createdAt) VALUES ($ringId, $createdAt) ON CONFLICT (ringId) DO NOTHING; - ` + `, + { bigint: true } ).run({ ringId, createdAt: Date.now() }); } @@ -7344,7 +7471,7 @@ function processGroupCallRingCancellation( const MAX_GROUP_CALL_RING_AGE = 30 * durations.MINUTE; function cleanExpiredGroupCallRingCancellations(db: WritableDB): void { - db.prepare( + db.prepare( ` DELETE FROM groupCallRingCancellations WHERE createdAt < $expiredRingTime; @@ -7356,18 +7483,20 @@ function cleanExpiredGroupCallRingCancellations(db: WritableDB): void { function getMaxMessageCounter(db: ReadableDB): number | undefined { return db - .prepare( + .prepare( ` - SELECT MAX(counter) - FROM - ( - SELECT MAX(received_at) AS counter FROM messages - UNION - SELECT MAX(timestamp) AS counter FROM unprocessed - ) - ` +SELECT MAX(counter) +FROM + ( + SELECT MAX(received_at) AS counter FROM messages + UNION + SELECT MAX(timestamp) AS counter FROM unprocessed + ) +`, + { + pluck: true, + } ) - .pluck() .get(); } @@ -7389,7 +7518,7 @@ function updateAllConversationColors( value: CustomColorType; } ): void { - db.prepare( + db.prepare( ` UPDATE conversations SET json = JSON_PATCH(json, $patch); @@ -7436,7 +7565,7 @@ function saveEditedMessages( ${conversationId}, ${messageId}, ${sentAt}, - ${readStatus} + ${readStatus ?? null} ); `; @@ -7458,7 +7587,7 @@ function _getAllEditedMessages( db: ReadableDB ): Array<{ messageId: string; sentAt: number }> { return db - .prepare( + .prepare( ` SELECT * FROM edited_messages; ` @@ -7496,10 +7625,12 @@ function getUnreadEditedMessagesAndMarkRead( ORDER BY messages.received_at DESC, messages.sent_at DESC; `; - const rows = db.prepare(selectQuery).all(selectParams); + const rows = db + .prepare(selectQuery) + .all(selectParams); if (rows.length) { - const newestSentAt = rows[0].sentAt; + const newestSentAt = rows[0].sent_at; const [updateStatusQuery, updateStatusParams] = sql` UPDATE edited_messages @@ -7517,18 +7648,21 @@ function getUnreadEditedMessagesAndMarkRead( return rows.map(row => { const json = hydrateMessage(row); return { - originalReadStatus: row.readStatus, + originalReadStatus: row.readStatus ?? undefined, readStatus: ReadStatus.Read, seenStatus: SeenStatus.Seen, ...pick(json, [ + 'conversationId', 'expirationStartTimestamp', 'id', + 'received_at', 'sent_at', 'source', 'sourceServiceId', + 'timestamp', 'type', ]), - }; + } satisfies MessageType & { originalReadStatus: ReadStatus | undefined }; }); })(); } diff --git a/ts/sql/migrations/1000-mark-unread-call-history-messages-as-unseen.ts b/ts/sql/migrations/1000-mark-unread-call-history-messages-as-unseen.ts index f673b5922e..a5a40dc114 100644 --- a/ts/sql/migrations/1000-mark-unread-call-history-messages-as-unseen.ts +++ b/ts/sql/migrations/1000-mark-unread-call-history-messages-as-unseen.ts @@ -1,7 +1,7 @@ // Copyright 2024 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; import { ReadStatus } from '../../messages/MessageReadStatus'; diff --git a/ts/sql/migrations/1010-call-links-table.ts b/ts/sql/migrations/1010-call-links-table.ts index 61def0276b..07dc73a1f4 100644 --- a/ts/sql/migrations/1010-call-links-table.ts +++ b/ts/sql/migrations/1010-call-links-table.ts @@ -1,7 +1,7 @@ // Copyright 2024 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; import { sql } from '../util'; diff --git a/ts/sql/migrations/1020-self-merges.ts b/ts/sql/migrations/1020-self-merges.ts index 0ba57bc393..eacb8b6ddd 100644 --- a/ts/sql/migrations/1020-self-merges.ts +++ b/ts/sql/migrations/1020-self-merges.ts @@ -30,7 +30,11 @@ export function updateToSchemaVersion1020( SELECT id FROM conversations WHERE serviceId IS ${ourAci} `; - const ourConversationId = db.prepare(selectQuery).pluck().get(selectParams); + const ourConversationId = db + .prepare(selectQuery, { + pluck: true, + }) + .get(selectParams); if (ourConversationId == null) { logger.error('updateToSchemaVersion1020: no conversation'); db.pragma('user_version = 1020'); diff --git a/ts/sql/migrations/1030-unblock-event.ts b/ts/sql/migrations/1030-unblock-event.ts index c506a9d024..6e85ce5e76 100644 --- a/ts/sql/migrations/1030-unblock-event.ts +++ b/ts/sql/migrations/1030-unblock-event.ts @@ -1,7 +1,7 @@ // Copyright 2024 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; import { sql, sqlFragment } from '../util'; diff --git a/ts/sql/migrations/1040-undownloaded-backed-up-media.ts b/ts/sql/migrations/1040-undownloaded-backed-up-media.ts index 1b3b24dcdb..10972e51e6 100644 --- a/ts/sql/migrations/1040-undownloaded-backed-up-media.ts +++ b/ts/sql/migrations/1040-undownloaded-backed-up-media.ts @@ -1,7 +1,7 @@ // Copyright 2024 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; import { diff --git a/ts/sql/migrations/1050-group-send-endorsements.ts b/ts/sql/migrations/1050-group-send-endorsements.ts index a034349d9d..6f06e4dc76 100644 --- a/ts/sql/migrations/1050-group-send-endorsements.ts +++ b/ts/sql/migrations/1050-group-send-endorsements.ts @@ -1,7 +1,7 @@ // Copyright 2024 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; import { sql } from '../util'; diff --git a/ts/sql/migrations/1060-addressable-messages-and-sync-tasks.ts b/ts/sql/migrations/1060-addressable-messages-and-sync-tasks.ts index bbcd61ef41..4c8ae15f64 100644 --- a/ts/sql/migrations/1060-addressable-messages-and-sync-tasks.ts +++ b/ts/sql/migrations/1060-addressable-messages-and-sync-tasks.ts @@ -1,7 +1,7 @@ // Copyright 2024 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/1070-attachment-backup.ts b/ts/sql/migrations/1070-attachment-backup.ts index 9faae2103d..d03bf01d79 100644 --- a/ts/sql/migrations/1070-attachment-backup.ts +++ b/ts/sql/migrations/1070-attachment-backup.ts @@ -1,7 +1,7 @@ // Copyright 2024 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/1080-nondisappearing-addressable.ts b/ts/sql/migrations/1080-nondisappearing-addressable.ts index f04f80446b..38957cde80 100644 --- a/ts/sql/migrations/1080-nondisappearing-addressable.ts +++ b/ts/sql/migrations/1080-nondisappearing-addressable.ts @@ -1,7 +1,7 @@ // Copyright 2024 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/1090-message-delete-indexes.ts b/ts/sql/migrations/1090-message-delete-indexes.ts index 80ed30e890..da85ce564f 100644 --- a/ts/sql/migrations/1090-message-delete-indexes.ts +++ b/ts/sql/migrations/1090-message-delete-indexes.ts @@ -1,7 +1,7 @@ // Copyright 2024 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/1100-optimize-mark-call-history-read-in-conversation.ts b/ts/sql/migrations/1100-optimize-mark-call-history-read-in-conversation.ts index 53ad83feb0..db27f9f9fc 100644 --- a/ts/sql/migrations/1100-optimize-mark-call-history-read-in-conversation.ts +++ b/ts/sql/migrations/1100-optimize-mark-call-history-read-in-conversation.ts @@ -1,7 +1,7 @@ // Copyright 2024 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; import { sql } from '../util'; diff --git a/ts/sql/migrations/1110-sticker-local-key.ts b/ts/sql/migrations/1110-sticker-local-key.ts index 46e1d6be72..f55cf481ae 100644 --- a/ts/sql/migrations/1110-sticker-local-key.ts +++ b/ts/sql/migrations/1110-sticker-local-key.ts @@ -1,7 +1,7 @@ // Copyright 2024 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/1120-messages-foreign-keys-indexes.ts b/ts/sql/migrations/1120-messages-foreign-keys-indexes.ts index 58036802d9..a6d2d33fe7 100644 --- a/ts/sql/migrations/1120-messages-foreign-keys-indexes.ts +++ b/ts/sql/migrations/1120-messages-foreign-keys-indexes.ts @@ -1,7 +1,7 @@ // Copyright 2024 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/1130-isStory-index.ts b/ts/sql/migrations/1130-isStory-index.ts index 28743a8990..fc900899ab 100644 --- a/ts/sql/migrations/1130-isStory-index.ts +++ b/ts/sql/migrations/1130-isStory-index.ts @@ -1,7 +1,7 @@ // Copyright 2024 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/1140-call-links-deleted-column.ts b/ts/sql/migrations/1140-call-links-deleted-column.ts index 943d9464ce..79ecf37e09 100644 --- a/ts/sql/migrations/1140-call-links-deleted-column.ts +++ b/ts/sql/migrations/1140-call-links-deleted-column.ts @@ -1,6 +1,6 @@ // Copyright 2024 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; export const version = 1140; diff --git a/ts/sql/migrations/1150-expire-timer-version.ts b/ts/sql/migrations/1150-expire-timer-version.ts index 5899fde638..ec0650d4a7 100644 --- a/ts/sql/migrations/1150-expire-timer-version.ts +++ b/ts/sql/migrations/1150-expire-timer-version.ts @@ -1,6 +1,6 @@ // Copyright 2024 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; export const version = 1150; diff --git a/ts/sql/migrations/1160-optimize-calls-unread-count.ts b/ts/sql/migrations/1160-optimize-calls-unread-count.ts index 4de5947ee3..ec4e04ae6d 100644 --- a/ts/sql/migrations/1160-optimize-calls-unread-count.ts +++ b/ts/sql/migrations/1160-optimize-calls-unread-count.ts @@ -1,6 +1,6 @@ // Copyright 2024 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; import { sql, sqlConstant } from '../util'; import { CallDirection, CallStatusValue } from '../../types/CallDisposition'; diff --git a/ts/sql/migrations/1170-update-call-history-unread-index.ts b/ts/sql/migrations/1170-update-call-history-unread-index.ts index 7dc7c0d23a..f5a162d440 100644 --- a/ts/sql/migrations/1170-update-call-history-unread-index.ts +++ b/ts/sql/migrations/1170-update-call-history-unread-index.ts @@ -1,6 +1,6 @@ // Copyright 2024 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; import { sql } from '../util'; diff --git a/ts/sql/migrations/1180-add-attachment-download-source.ts b/ts/sql/migrations/1180-add-attachment-download-source.ts index 42f593bcdd..e2fdac371a 100644 --- a/ts/sql/migrations/1180-add-attachment-download-source.ts +++ b/ts/sql/migrations/1180-add-attachment-download-source.ts @@ -1,6 +1,6 @@ // Copyright 2024 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; import { AttachmentDownloadSource } from '../Interface'; diff --git a/ts/sql/migrations/1190-call-links-storage.ts b/ts/sql/migrations/1190-call-links-storage.ts index 9642ba626c..f86546dcdc 100644 --- a/ts/sql/migrations/1190-call-links-storage.ts +++ b/ts/sql/migrations/1190-call-links-storage.ts @@ -1,6 +1,6 @@ // Copyright 2024 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; export const version = 1190; diff --git a/ts/sql/migrations/1200-attachment-download-source-index.ts b/ts/sql/migrations/1200-attachment-download-source-index.ts index e60940fd34..b6f8b2bc22 100644 --- a/ts/sql/migrations/1200-attachment-download-source-index.ts +++ b/ts/sql/migrations/1200-attachment-download-source-index.ts @@ -1,6 +1,6 @@ // Copyright 2024 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; export const version = 1200; diff --git a/ts/sql/migrations/1210-call-history-started-id.ts b/ts/sql/migrations/1210-call-history-started-id.ts index fad1a398f8..3da56cd8f0 100644 --- a/ts/sql/migrations/1210-call-history-started-id.ts +++ b/ts/sql/migrations/1210-call-history-started-id.ts @@ -1,6 +1,6 @@ // Copyright 2024 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; export const version = 1210; diff --git a/ts/sql/migrations/1220-blob-sessions.ts b/ts/sql/migrations/1220-blob-sessions.ts index 97502e3973..37312fc10e 100644 --- a/ts/sql/migrations/1220-blob-sessions.ts +++ b/ts/sql/migrations/1220-blob-sessions.ts @@ -2,7 +2,7 @@ // SPDX-License-Identifier: AGPL-3.0-only import assert from 'assert'; import z from 'zod'; -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; import * as Errors from '../../types/errors'; import { @@ -144,16 +144,17 @@ export function updateToSchemaVersion1220( ) STRICT; `); - const getItem = db - .prepare( - ` - SELECT json -> '$.value' FROM items WHERE id IS ? - ` - ) - .pluck(); + const getItem = db.prepare( + ` + SELECT json -> '$.value' FROM items WHERE id IS ? + `, + { + pluck: true, + } + ); - const identityKeyMapJson = getItem.get('identityKeyMap'); - const registrationIdMapJson = getItem.get('registrationIdMap'); + const identityKeyMapJson = getItem.get(['identityKeyMap']); + const registrationIdMapJson = getItem.get(['registrationIdMap']); // If we don't have private keys - the sessions cannot be used anyway if (!identityKeyMapJson || !registrationIdMapJson) { diff --git a/ts/sql/migrations/1230-call-links-admin-key-index.ts b/ts/sql/migrations/1230-call-links-admin-key-index.ts index 3dc883901f..7ab71a2fae 100644 --- a/ts/sql/migrations/1230-call-links-admin-key-index.ts +++ b/ts/sql/migrations/1230-call-links-admin-key-index.ts @@ -1,6 +1,6 @@ // Copyright 2024 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; export const version = 1230; diff --git a/ts/sql/migrations/1240-defunct-call-links-table.ts b/ts/sql/migrations/1240-defunct-call-links-table.ts index 24f0e2c692..e29778bd00 100644 --- a/ts/sql/migrations/1240-defunct-call-links-table.ts +++ b/ts/sql/migrations/1240-defunct-call-links-table.ts @@ -1,7 +1,7 @@ // Copyright 2024 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; import { sql } from '../util'; diff --git a/ts/sql/migrations/1250-defunct-call-links-storage.ts b/ts/sql/migrations/1250-defunct-call-links-storage.ts index c0e6312901..398822caa9 100644 --- a/ts/sql/migrations/1250-defunct-call-links-storage.ts +++ b/ts/sql/migrations/1250-defunct-call-links-storage.ts @@ -1,6 +1,6 @@ // Copyright 2024 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; export const version = 1250; diff --git a/ts/sql/migrations/1260-sync-tasks-rowid.ts b/ts/sql/migrations/1260-sync-tasks-rowid.ts index 1e9958fa02..18aed51003 100644 --- a/ts/sql/migrations/1260-sync-tasks-rowid.ts +++ b/ts/sql/migrations/1260-sync-tasks-rowid.ts @@ -1,6 +1,6 @@ // Copyright 2024 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; import { sql } from '../util'; diff --git a/ts/sql/migrations/1270-normalize-messages.ts b/ts/sql/migrations/1270-normalize-messages.ts index 495215c42a..9c2c8ff0cf 100644 --- a/ts/sql/migrations/1270-normalize-messages.ts +++ b/ts/sql/migrations/1270-normalize-messages.ts @@ -1,6 +1,6 @@ // Copyright 2025 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; import { sql } from '../util'; diff --git a/ts/sql/migrations/1280-blob-unprocessed.ts b/ts/sql/migrations/1280-blob-unprocessed.ts index ada93322b2..95eb0d4504 100644 --- a/ts/sql/migrations/1280-blob-unprocessed.ts +++ b/ts/sql/migrations/1280-blob-unprocessed.ts @@ -116,20 +116,22 @@ export function updateToSchemaVersion1280( } try { - const decoded = Proto.Envelope.decode(Buffer.from(envelope, 'base64')); + const decoded = Proto.Envelope.decode( + Buffer.from(String(envelope), 'base64') + ); if (!decoded.content) { throw new Error('Missing envelope content'); } const content = decrypted - ? Buffer.from(decrypted, 'base64') + ? Buffer.from(String(decrypted), 'base64') : decoded.content; insertStmt.run({ ...rest, id, type: decoded.type ?? Proto.Envelope.Type.UNKNOWN, - content, + content: content ?? null, isEncrypted: decrypted ? 0 : 1, timestamp: timestamp || Date.now(), attempts: attempts || 0, @@ -138,16 +140,17 @@ export function updateToSchemaVersion1280( story: story ? 1 : 0, serverGuid: serverGuid || getGuid(), serverTimestamp: serverTimestamp || 0, - destinationServiceId: normalizeServiceId( - decoded.destinationServiceId || ourAci, - 'Envelope.destinationServiceId' - ), + destinationServiceId: + normalizeServiceId( + decoded.destinationServiceId || ourAci, + 'Envelope.destinationServiceId' + ) ?? null, updatedPni: isUntaggedPniString(decoded.updatedPni) ? normalizePni( toTaggedPni(decoded.updatedPni), 'Envelope.updatedPni' ) - : undefined, + : null, // Sadly not captured previously messageAgeSec: 0, reportingToken: decoded.reportSpamToken?.length diff --git a/ts/sql/migrations/1330-sync-tasks-type-index.ts b/ts/sql/migrations/1330-sync-tasks-type-index.ts index 1a452c1436..5b73bcf985 100644 --- a/ts/sql/migrations/1330-sync-tasks-type-index.ts +++ b/ts/sql/migrations/1330-sync-tasks-type-index.ts @@ -1,6 +1,6 @@ // Copyright 2025 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; import { sql } from '../util'; diff --git a/ts/sql/migrations/41-uuid-keys.ts b/ts/sql/migrations/41-uuid-keys.ts index c8ecf9865b..8d0d38c51d 100644 --- a/ts/sql/migrations/41-uuid-keys.ts +++ b/ts/sql/migrations/41-uuid-keys.ts @@ -5,14 +5,13 @@ import type { LoggerType } from '../../types/Logging'; import { isValidUuid } from '../../util/isValidUuid'; import Helpers from '../../textsecure/Helpers'; import { createOrUpdate, getById, removeById } from '../util'; -import type { EmptyQuery, Query } from '../util'; import type { ItemKeyType, ReadableDB, WritableDB } from '../Interface'; export function getOurUuid(db: ReadableDB): string | undefined { const UUID_ID: ItemKeyType = 'uuid_id'; const row: { json: string } | undefined = db - .prepare('SELECT json FROM items WHERE id = $id;') + .prepare('SELECT json FROM items WHERE id = $id;') .get({ id: UUID_ID }); if (!row) { @@ -34,19 +33,20 @@ export default function updateToSchemaVersion41( return; } - const getConversationUuid = db - .prepare( - ` - SELECT uuid - FROM - conversations - WHERE - id = $conversationId - ` - ) - .pluck(); + const getConversationUuid = db.prepare( + ` + SELECT uuid + FROM + conversations + WHERE + id = $conversationId + `, + { + pluck: true, + } + ); - const getConversationStats = db.prepare( + const getConversationStats = db.prepare( ` SELECT uuid, e164, active_at FROM @@ -56,9 +56,15 @@ export default function updateToSchemaVersion41( ` ); + type StatsType = { + uuid: string; + e164: string; + active_at: number; + }; + const compareConvoRecency = (a: string, b: string): number => { - const aStats = getConversationStats.get({ conversationId: a }); - const bStats = getConversationStats.get({ conversationId: b }); + const aStats = getConversationStats.get({ conversationId: a }); + const bStats = getConversationStats.get({ conversationId: b }); const isAComplete = Boolean(aStats?.uuid && aStats?.e164); const isBComplete = Boolean(bStats?.uuid && bStats?.e164); @@ -73,7 +79,7 @@ export default function updateToSchemaVersion41( return 1; } - return aStats.active_at - bStats.active_at; + return (aStats?.active_at ?? 0) - (bStats?.active_at ?? 0); }; const clearSessionsAndKeys = (): number => { @@ -141,7 +147,7 @@ export default function updateToSchemaVersion41( const prefixKeys = (ourUuid: string) => { for (const table of ['signedPreKeys', 'preKeys']) { // Update id to include suffix, add `ourUuid` and `keyId` fields. - db.prepare( + db.prepare( ` UPDATE ${table} SET @@ -166,14 +172,12 @@ export default function updateToSchemaVersion41( senderId: string; lastUpdatedDate: number; }> = db - .prepare( - 'SELECT id, senderId, lastUpdatedDate FROM senderKeys' - ) + .prepare('SELECT id, senderId, lastUpdatedDate FROM senderKeys') .all(); logger.info(`Updating ${senderKeys.length} sender keys`); - const updateSenderKey = db.prepare( + const updateSenderKey = db.prepare( ` UPDATE senderKeys SET @@ -184,9 +188,7 @@ export default function updateToSchemaVersion41( ` ); - const deleteSenderKey = db.prepare( - 'DELETE FROM senderKeys WHERE id = $id' - ); + const deleteSenderKey = db.prepare('DELETE FROM senderKeys WHERE id = $id'); const pastKeys = new Map< string, @@ -201,7 +203,7 @@ export default function updateToSchemaVersion41( let skipped = 0; for (const { id, senderId, lastUpdatedDate } of senderKeys) { const [conversationId] = Helpers.unencodeNumber(senderId); - const uuid = getConversationUuid.get({ conversationId }); + const uuid = getConversationUuid.get({ conversationId }); if (!uuid) { deleted += 1; @@ -252,12 +254,12 @@ export default function updateToSchemaVersion41( // // Set ourUuid column and field in json const allSessions = db - .prepare('SELECT id, conversationId FROM SESSIONS') - .all(); + .prepare('SELECT id, conversationId FROM SESSIONS') + .all<{ id: string; conversationId: string }>(); logger.info(`Updating ${allSessions.length} sessions`); - const updateSession = db.prepare( + const updateSession = db.prepare( ` UPDATE sessions SET @@ -278,9 +280,7 @@ export default function updateToSchemaVersion41( ` ); - const deleteSession = db.prepare( - 'DELETE FROM sessions WHERE id = $id' - ); + const deleteSession = db.prepare('DELETE FROM sessions WHERE id = $id'); const pastSessions = new Map< string, @@ -293,7 +293,7 @@ export default function updateToSchemaVersion41( let deleted = 0; let skipped = 0; for (const { id, conversationId } of allSessions) { - const uuid = getConversationUuid.get({ conversationId }); + const uuid = getConversationUuid.get({ conversationId }); if (!uuid) { deleted += 1; deleteSession.run({ id }); @@ -338,13 +338,13 @@ export default function updateToSchemaVersion41( }; const updateIdentityKeys = () => { - const identityKeys: ReadonlyArray<{ - id: string; - }> = db.prepare('SELECT id FROM identityKeys').all(); + const identityKeys = db + .prepare('SELECT id FROM identityKeys') + .all<{ id: string }>(); logger.info(`Updating ${identityKeys.length} identity keys`); - const updateIdentityKey = db.prepare( + const updateIdentityKey = db.prepare( ` UPDATE OR REPLACE identityKeys SET @@ -361,7 +361,7 @@ export default function updateToSchemaVersion41( let migrated = 0; for (const { id } of identityKeys) { - const uuid = getConversationUuid.get({ conversationId: id }); + const uuid = getConversationUuid.get({ conversationId: id }); let newId: string; if (uuid) { diff --git a/ts/sql/migrations/42-stale-reactions.ts b/ts/sql/migrations/42-stale-reactions.ts index f0785a8acf..f50c1b5353 100644 --- a/ts/sql/migrations/42-stale-reactions.ts +++ b/ts/sql/migrations/42-stale-reactions.ts @@ -2,7 +2,6 @@ // SPDX-License-Identifier: AGPL-3.0-only import { batchMultiVarQuery } from '../util'; -import type { ArrayQuery } from '../util'; import type { WritableDB } from '../Interface'; import type { LoggerType } from '../../types/Logging'; @@ -39,8 +38,9 @@ export default function updateToSchemaVersion42( // Note: we use `pluck` here to fetch only the first column of // returned row. const messageIdList: Array = db - .prepare('SELECT id FROM messages ORDER BY id ASC;') - .pluck() + .prepare('SELECT id FROM messages ORDER BY id ASC;', { + pluck: true, + }) .all(); const allReactions: Array<{ rowid: number; @@ -57,7 +57,7 @@ export default function updateToSchemaVersion42( }); function deleteReactions(rowids: ReadonlyArray) { - db.prepare( + db.prepare( ` DELETE FROM reactions WHERE rowid IN ( ${rowids.map(() => '?').join(', ')} ); diff --git a/ts/sql/migrations/43-gv2-uuid.ts b/ts/sql/migrations/43-gv2-uuid.ts index c4f7b24dcc..bb8a476274 100644 --- a/ts/sql/migrations/43-gv2-uuid.ts +++ b/ts/sql/migrations/43-gv2-uuid.ts @@ -14,7 +14,6 @@ import { jsonToObject, objectToJSON, } from '../util'; -import type { EmptyQuery, Query } from '../util'; import type { WritableDB } from '../Interface'; type MessageType = Readonly<{ @@ -61,17 +60,18 @@ export default function updateToSchemaVersion43( pendingAdminApprovalV2?: Array; }; - const getConversationUuid = db - .prepare( - ` - SELECT uuid - FROM - conversations - WHERE - id = $conversationId - ` - ) - .pluck(); + const getConversationUuid = db.prepare( + ` + SELECT uuid + FROM + conversations + WHERE + id = $conversationId + `, + { + pluck: true, + } + ); const updateConversationStmt = db.prepare( ` @@ -112,7 +112,7 @@ export default function updateToSchemaVersion43( const newValue = oldValue .map(member => { - const uuid: ServiceIdString = getConversationUuid.get({ + const uuid = getConversationUuid.get({ conversationId: member.conversationId, }); if (!uuid) { @@ -278,7 +278,7 @@ export default function updateToSchemaVersion43( } changedDetails = true; - const newValue: ServiceIdString | null = getConversationUuid.get({ + const newValue = getConversationUuid.get({ conversationId: oldValue, }); if (key === 'inviter' && !newValue) { @@ -318,7 +318,7 @@ export default function updateToSchemaVersion43( } if (sourceUuid) { - const newValue: ServiceIdString | null = getConversationUuid.get({ + const newValue = getConversationUuid.get({ conversationId: sourceUuid, }); @@ -333,7 +333,7 @@ export default function updateToSchemaVersion43( if (invitedGV2Members) { const newMembers = invitedGV2Members .map(({ addedByUserId, conversationId }, i) => { - const uuid: ServiceIdString | null = getConversationUuid.get({ + const uuid = getConversationUuid.get({ conversationId, }); const oldMember = @@ -357,7 +357,7 @@ export default function updateToSchemaVersion43( return newMember; } - const newAddedBy: ServiceIdString | null = getConversationUuid.get({ + const newAddedBy = getConversationUuid.get({ conversationId: addedByUserId, }); if (!newAddedBy) { @@ -392,15 +392,16 @@ export default function updateToSchemaVersion43( db.transaction(() => { const allConversations = db - .prepare( + .prepare( ` - SELECT json, profileLastFetchedAt + SELECT json FROM conversations ORDER BY id ASC; - ` + `, + { pluck: true } ) - .all() - .map(({ json }) => jsonToObject(json)); + .all() + .map(json => jsonToObject(json)); logger.info( 'updateToSchemaVersion43: About to iterate through ' + diff --git a/ts/sql/migrations/44-badges.ts b/ts/sql/migrations/44-badges.ts index 6759e905d3..64a60a2cec 100644 --- a/ts/sql/migrations/44-badges.ts +++ b/ts/sql/migrations/44-badges.ts @@ -1,7 +1,7 @@ // Copyright 2021 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/45-stories.ts b/ts/sql/migrations/45-stories.ts index 6c39ca8537..c864536b34 100644 --- a/ts/sql/migrations/45-stories.ts +++ b/ts/sql/migrations/45-stories.ts @@ -1,7 +1,7 @@ // Copyright 2021 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/46-optimize-stories.ts b/ts/sql/migrations/46-optimize-stories.ts index e93352834d..12be945096 100644 --- a/ts/sql/migrations/46-optimize-stories.ts +++ b/ts/sql/migrations/46-optimize-stories.ts @@ -1,7 +1,7 @@ // Copyright 2021 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/47-further-optimize.ts b/ts/sql/migrations/47-further-optimize.ts index d53307ff9c..752fe0774a 100644 --- a/ts/sql/migrations/47-further-optimize.ts +++ b/ts/sql/migrations/47-further-optimize.ts @@ -4,7 +4,6 @@ import type { LoggerType } from '../../types/Logging'; import { getOurUuid } from './41-uuid-keys'; import type { WritableDB } from '../Interface'; -import type { Query } from '../util'; export default function updateToSchemaVersion47( currentVersion: number, @@ -123,7 +122,7 @@ export default function updateToSchemaVersion47( if (!ourUuid) { logger.info('updateToSchemaVersion47: our UUID not found'); } else { - db.prepare( + db.prepare( ` UPDATE messages SET isChangeCreatedByUs = json_extract(json, '$.groupV2Change.from') IS $ourUuid; diff --git a/ts/sql/migrations/48-fix-user-initiated-index.ts b/ts/sql/migrations/48-fix-user-initiated-index.ts index ff824420a1..b9cd911f81 100644 --- a/ts/sql/migrations/48-fix-user-initiated-index.ts +++ b/ts/sql/migrations/48-fix-user-initiated-index.ts @@ -1,7 +1,7 @@ // Copyright 2021 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/49-fix-preview-index.ts b/ts/sql/migrations/49-fix-preview-index.ts index f399feaddf..d33aca5b3a 100644 --- a/ts/sql/migrations/49-fix-preview-index.ts +++ b/ts/sql/migrations/49-fix-preview-index.ts @@ -1,7 +1,7 @@ // Copyright 2021 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/50-fix-messages-unread-index.ts b/ts/sql/migrations/50-fix-messages-unread-index.ts index 03bed8cd5f..5f23e921fc 100644 --- a/ts/sql/migrations/50-fix-messages-unread-index.ts +++ b/ts/sql/migrations/50-fix-messages-unread-index.ts @@ -1,7 +1,7 @@ // Copyright 2021 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/51-centralize-conversation-jobs.ts b/ts/sql/migrations/51-centralize-conversation-jobs.ts index f9d730d33b..3cb1976db3 100644 --- a/ts/sql/migrations/51-centralize-conversation-jobs.ts +++ b/ts/sql/migrations/51-centralize-conversation-jobs.ts @@ -46,7 +46,7 @@ export default function updateToSchemaVersion51( return; } - const message = getMessageById.get(messageId); + const message = getMessageById.get([messageId]); if (!message) { logger.warn( `updateToSchemaVersion51: Unable to find message for reaction job ${id}` diff --git a/ts/sql/migrations/52-optimize-stories.ts b/ts/sql/migrations/52-optimize-stories.ts index dd534e11cf..a5987249fb 100644 --- a/ts/sql/migrations/52-optimize-stories.ts +++ b/ts/sql/migrations/52-optimize-stories.ts @@ -1,7 +1,7 @@ // Copyright 2022 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/53-gv2-banned-members.ts b/ts/sql/migrations/53-gv2-banned-members.ts index 8398494a3d..801fe38a84 100644 --- a/ts/sql/migrations/53-gv2-banned-members.ts +++ b/ts/sql/migrations/53-gv2-banned-members.ts @@ -1,11 +1,10 @@ // Copyright 2022 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; import { jsonToObject } from '../util'; -import type { EmptyQuery } from '../util'; export default function updateToSchemaVersion53( currentVersion: number, @@ -67,16 +66,17 @@ export default function updateToSchemaVersion53( db.transaction(() => { const allConversations = db - .prepare( + .prepare( ` - SELECT json, profileLastFetchedAt + SELECT json FROM conversations WHERE type = 'group' ORDER BY id ASC; - ` + `, + { pluck: true } ) - .all() - .map(({ json }) => jsonToObject(json)); + .all() + .map(json => jsonToObject(json)); logger.info( 'updateToSchemaVersion53: About to iterate through ' + diff --git a/ts/sql/migrations/54-unprocessed-received-at-counter.ts b/ts/sql/migrations/54-unprocessed-received-at-counter.ts index 2c90c27ddd..2678deb633 100644 --- a/ts/sql/migrations/54-unprocessed-received-at-counter.ts +++ b/ts/sql/migrations/54-unprocessed-received-at-counter.ts @@ -1,7 +1,7 @@ // Copyright 2022 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/56-add-unseen-to-message.ts b/ts/sql/migrations/56-add-unseen-to-message.ts index cbb86599a4..4e79451ef4 100644 --- a/ts/sql/migrations/56-add-unseen-to-message.ts +++ b/ts/sql/migrations/56-add-unseen-to-message.ts @@ -1,7 +1,7 @@ // Copyright 2021 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import { ReadStatus } from '../../messages/MessageReadStatus'; import { SeenStatus } from '../../MessageSeenStatus'; diff --git a/ts/sql/migrations/57-rm-message-history-unsynced.ts b/ts/sql/migrations/57-rm-message-history-unsynced.ts index e7ed36c795..fd0c51c670 100644 --- a/ts/sql/migrations/57-rm-message-history-unsynced.ts +++ b/ts/sql/migrations/57-rm-message-history-unsynced.ts @@ -1,7 +1,7 @@ // Copyright 2022 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/58-update-unread.ts b/ts/sql/migrations/58-update-unread.ts index b8c6b8a98c..3c96dfa9eb 100644 --- a/ts/sql/migrations/58-update-unread.ts +++ b/ts/sql/migrations/58-update-unread.ts @@ -1,7 +1,7 @@ // Copyright 2021 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import { ReadStatus } from '../../messages/MessageReadStatus'; import { SeenStatus } from '../../MessageSeenStatus'; diff --git a/ts/sql/migrations/59-unprocessed-received-at-counter-index.ts b/ts/sql/migrations/59-unprocessed-received-at-counter-index.ts index beeb07de7b..61fd8ed98f 100644 --- a/ts/sql/migrations/59-unprocessed-received-at-counter-index.ts +++ b/ts/sql/migrations/59-unprocessed-received-at-counter-index.ts @@ -1,7 +1,7 @@ // Copyright 2022 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/60-update-expiring-index.ts b/ts/sql/migrations/60-update-expiring-index.ts index 58fcb1838b..42ff0fce57 100644 --- a/ts/sql/migrations/60-update-expiring-index.ts +++ b/ts/sql/migrations/60-update-expiring-index.ts @@ -1,7 +1,7 @@ // Copyright 2021 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/61-distribution-list-storage.ts b/ts/sql/migrations/61-distribution-list-storage.ts index d4e85ecfa7..badab21781 100644 --- a/ts/sql/migrations/61-distribution-list-storage.ts +++ b/ts/sql/migrations/61-distribution-list-storage.ts @@ -1,7 +1,7 @@ // Copyright 2022 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/62-add-urgent-to-send-log.ts b/ts/sql/migrations/62-add-urgent-to-send-log.ts index c4da52818a..b0a4b8b061 100644 --- a/ts/sql/migrations/62-add-urgent-to-send-log.ts +++ b/ts/sql/migrations/62-add-urgent-to-send-log.ts @@ -1,7 +1,7 @@ // Copyright 2021 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/63-add-urgent-to-unprocessed.ts b/ts/sql/migrations/63-add-urgent-to-unprocessed.ts index 2149c953da..5dadfb8616 100644 --- a/ts/sql/migrations/63-add-urgent-to-unprocessed.ts +++ b/ts/sql/migrations/63-add-urgent-to-unprocessed.ts @@ -1,7 +1,7 @@ // Copyright 2021 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/64-uuid-column-for-pre-keys.ts b/ts/sql/migrations/64-uuid-column-for-pre-keys.ts index 1c7debeb9b..e0fae3462b 100644 --- a/ts/sql/migrations/64-uuid-column-for-pre-keys.ts +++ b/ts/sql/migrations/64-uuid-column-for-pre-keys.ts @@ -1,7 +1,7 @@ // Copyright 2022 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/65-add-storage-id-to-stickers.ts b/ts/sql/migrations/65-add-storage-id-to-stickers.ts index 12197fdf55..c35ba4bb95 100644 --- a/ts/sql/migrations/65-add-storage-id-to-stickers.ts +++ b/ts/sql/migrations/65-add-storage-id-to-stickers.ts @@ -1,7 +1,7 @@ // Copyright 2021 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/66-add-pni-signature-to-sent-protos.ts b/ts/sql/migrations/66-add-pni-signature-to-sent-protos.ts index 11c7018685..a669fb619d 100644 --- a/ts/sql/migrations/66-add-pni-signature-to-sent-protos.ts +++ b/ts/sql/migrations/66-add-pni-signature-to-sent-protos.ts @@ -1,7 +1,7 @@ // Copyright 2022 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/67-add-story-to-unprocessed.ts b/ts/sql/migrations/67-add-story-to-unprocessed.ts index cea1070f17..3341d056ba 100644 --- a/ts/sql/migrations/67-add-story-to-unprocessed.ts +++ b/ts/sql/migrations/67-add-story-to-unprocessed.ts @@ -1,7 +1,7 @@ // Copyright 2021 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/68-drop-deprecated-columns.ts b/ts/sql/migrations/68-drop-deprecated-columns.ts index 7357d9be43..9e0c37c202 100644 --- a/ts/sql/migrations/68-drop-deprecated-columns.ts +++ b/ts/sql/migrations/68-drop-deprecated-columns.ts @@ -1,7 +1,7 @@ // Copyright 2022 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/69-group-call-ring-cancellations.ts b/ts/sql/migrations/69-group-call-ring-cancellations.ts index d216dbcdc1..33f4af041d 100644 --- a/ts/sql/migrations/69-group-call-ring-cancellations.ts +++ b/ts/sql/migrations/69-group-call-ring-cancellations.ts @@ -1,7 +1,7 @@ // Copyright 2022 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/70-story-reply-index.ts b/ts/sql/migrations/70-story-reply-index.ts index aae22ff84f..0c63170d7a 100644 --- a/ts/sql/migrations/70-story-reply-index.ts +++ b/ts/sql/migrations/70-story-reply-index.ts @@ -1,7 +1,7 @@ // Copyright 2021 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/71-merge-notifications.ts b/ts/sql/migrations/71-merge-notifications.ts index b4d76d3a14..70c92023d9 100644 --- a/ts/sql/migrations/71-merge-notifications.ts +++ b/ts/sql/migrations/71-merge-notifications.ts @@ -1,7 +1,7 @@ // Copyright 2021 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/72-optimize-call-id-message-lookup.ts b/ts/sql/migrations/72-optimize-call-id-message-lookup.ts index 4be96343f2..4e82af8675 100644 --- a/ts/sql/migrations/72-optimize-call-id-message-lookup.ts +++ b/ts/sql/migrations/72-optimize-call-id-message-lookup.ts @@ -1,7 +1,7 @@ // Copyright 2021 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; export default function updateToSchemaVersion72( diff --git a/ts/sql/migrations/73-remove-phone-number-discovery.ts b/ts/sql/migrations/73-remove-phone-number-discovery.ts index ee3766ad1b..7508800ead 100644 --- a/ts/sql/migrations/73-remove-phone-number-discovery.ts +++ b/ts/sql/migrations/73-remove-phone-number-discovery.ts @@ -1,7 +1,7 @@ // Copyright 2023 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/74-optimize-convo-open.ts b/ts/sql/migrations/74-optimize-convo-open.ts index cf80c29b33..83a284d23f 100644 --- a/ts/sql/migrations/74-optimize-convo-open.ts +++ b/ts/sql/migrations/74-optimize-convo-open.ts @@ -1,7 +1,7 @@ // Copyright 2023 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/75-noop.ts b/ts/sql/migrations/75-noop.ts index 41bb20dc76..dd745220c8 100644 --- a/ts/sql/migrations/75-noop.ts +++ b/ts/sql/migrations/75-noop.ts @@ -1,7 +1,7 @@ // Copyright 2023 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/76-optimize-convo-open-2.ts b/ts/sql/migrations/76-optimize-convo-open-2.ts index d9530a6171..192224e8f1 100644 --- a/ts/sql/migrations/76-optimize-convo-open-2.ts +++ b/ts/sql/migrations/76-optimize-convo-open-2.ts @@ -1,7 +1,7 @@ // Copyright 2023 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/77-signal-tokenizer.ts b/ts/sql/migrations/77-signal-tokenizer.ts index 1470dd866f..cdc04f69d8 100644 --- a/ts/sql/migrations/77-signal-tokenizer.ts +++ b/ts/sql/migrations/77-signal-tokenizer.ts @@ -1,7 +1,7 @@ // Copyright 2023 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; @@ -18,7 +18,7 @@ export default function updateToSchemaVersion77( db.exec( ` -- Create FTS table with custom tokenizer from - -- @signalapp/better-sqlite3. + -- @signalapp/sqlcipher. DROP TABLE messages_fts; diff --git a/ts/sql/migrations/78-merge-receipt-jobs.ts b/ts/sql/migrations/78-merge-receipt-jobs.ts index d59fc3d22f..06abd1c418 100644 --- a/ts/sql/migrations/78-merge-receipt-jobs.ts +++ b/ts/sql/migrations/78-merge-receipt-jobs.ts @@ -66,7 +66,7 @@ export default function updateToSchemaVersion78( return; } - const message = getMessageById.get(messageId); + const message = getMessageById.get([messageId]); if (!message) { logger.warn( `updateToSchemaVersion78: Unable to find message for ${queue.queueType} job ${id}` diff --git a/ts/sql/migrations/79-paging-lightbox.ts b/ts/sql/migrations/79-paging-lightbox.ts index d404014fff..d5c4b3c95c 100644 --- a/ts/sql/migrations/79-paging-lightbox.ts +++ b/ts/sql/migrations/79-paging-lightbox.ts @@ -1,7 +1,7 @@ // Copyright 2023 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/80-edited-messages.ts b/ts/sql/migrations/80-edited-messages.ts index 95de47e874..3eb322e43a 100644 --- a/ts/sql/migrations/80-edited-messages.ts +++ b/ts/sql/migrations/80-edited-messages.ts @@ -1,7 +1,7 @@ // Copyright 2023 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/81-contact-removed-notification.ts b/ts/sql/migrations/81-contact-removed-notification.ts index 91fe81908b..c0f9600060 100644 --- a/ts/sql/migrations/81-contact-removed-notification.ts +++ b/ts/sql/migrations/81-contact-removed-notification.ts @@ -1,7 +1,7 @@ // Copyright 2023 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/82-edited-messages-read-index.ts b/ts/sql/migrations/82-edited-messages-read-index.ts index 0747c19f4a..f95ba0bd63 100644 --- a/ts/sql/migrations/82-edited-messages-read-index.ts +++ b/ts/sql/migrations/82-edited-messages-read-index.ts @@ -1,7 +1,7 @@ // Copyright 2023 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/83-mentions.ts b/ts/sql/migrations/83-mentions.ts index 084143b857..7209c286d7 100644 --- a/ts/sql/migrations/83-mentions.ts +++ b/ts/sql/migrations/83-mentions.ts @@ -1,7 +1,7 @@ // Copyright 2021 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; export default function updateToSchemaVersion83( diff --git a/ts/sql/migrations/84-all-mentions.ts b/ts/sql/migrations/84-all-mentions.ts index 607261eda6..8e2067aed8 100644 --- a/ts/sql/migrations/84-all-mentions.ts +++ b/ts/sql/migrations/84-all-mentions.ts @@ -1,7 +1,7 @@ // Copyright 2021 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; export default function updateToSchemaVersion84( diff --git a/ts/sql/migrations/85-add-kyber-keys.ts b/ts/sql/migrations/85-add-kyber-keys.ts index 740388655c..d49fd555fb 100644 --- a/ts/sql/migrations/85-add-kyber-keys.ts +++ b/ts/sql/migrations/85-add-kyber-keys.ts @@ -1,7 +1,7 @@ // Copyright 2023 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/86-story-replies-index.ts b/ts/sql/migrations/86-story-replies-index.ts index 0fda5328f9..2ca9723584 100644 --- a/ts/sql/migrations/86-story-replies-index.ts +++ b/ts/sql/migrations/86-story-replies-index.ts @@ -1,7 +1,7 @@ // Copyright 2023 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/87-cleanup.ts b/ts/sql/migrations/87-cleanup.ts index 73e05ac15a..52f2ad138f 100644 --- a/ts/sql/migrations/87-cleanup.ts +++ b/ts/sql/migrations/87-cleanup.ts @@ -1,7 +1,7 @@ // Copyright 2023 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; import { cleanKeys } from './920-clean-more-keys'; diff --git a/ts/sql/migrations/88-service-ids.ts b/ts/sql/migrations/88-service-ids.ts index 852e293094..0848057c05 100644 --- a/ts/sql/migrations/88-service-ids.ts +++ b/ts/sql/migrations/88-service-ids.ts @@ -1,7 +1,7 @@ // Copyright 2023 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import { omit } from 'lodash'; import type { LoggerType } from '../../types/Logging'; @@ -484,27 +484,31 @@ function migrateItems(db: Database, logger: LoggerType): OurServiceIds { const uuidIdJson = db .prepare( ` - SELECT json - FROM items - WHERE id IS 'uuid_id' - ` + SELECT json + FROM items + WHERE id IS 'uuid_id' + `, + { + pluck: true, + } ) - .pluck() - .get(); + .get(); const pniJson = db .prepare( ` - SELECT json - FROM items - WHERE id IS 'pni' - ` + SELECT json + FROM items + WHERE id IS 'pni' + `, + { + pluck: true, + } ) - .pluck() - .get(); + .get(); let legacyAci: string | undefined; try { - [legacyAci] = JSON.parse(uuidIdJson).value.split('.', 2); + [legacyAci] = JSON.parse(uuidIdJson ?? '').value.split('.', 2); } catch (error) { if (uuidIdJson) { logger.warn( @@ -518,7 +522,7 @@ function migrateItems(db: Database, logger: LoggerType): OurServiceIds { let legacyPni: string | undefined; try { - legacyPni = JSON.parse(pniJson).value; + legacyPni = JSON.parse(pniJson ?? '').value; } catch (error) { if (pniJson) { logger.warn('updateToSchemaVersion88: failed to parse pni item', error); @@ -852,7 +856,10 @@ function migratePreKeys( ourServiceIds: OurServiceIds, logger: LoggerType ): void { - const preKeys = db.prepare(`SELECT id, json FROM ${table}`).all(); + const preKeys = db.prepare(`SELECT id, json FROM ${table}`).all<{ + id: string; + json: string; + }>(); const updateStmt = db.prepare(` UPDATE ${table} @@ -1001,7 +1008,11 @@ function migrateJobs( identifierToServiceId: Map, logger: LoggerType ): void { - const jobs = db.prepare('SELECT id, queueType, data FROM jobs').all(); + const jobs = db.prepare('SELECT id, queueType, data FROM jobs').all<{ + id: string; + queueType: string; + data: string; + }>(); const updateStmt = db.prepare('UPDATE jobs SET data = $data WHERE id IS $id'); let updatedCount = 0; diff --git a/ts/sql/migrations/90-delete-story-reply-screenshot.ts b/ts/sql/migrations/90-delete-story-reply-screenshot.ts index c910aa36ad..b791923784 100644 --- a/ts/sql/migrations/90-delete-story-reply-screenshot.ts +++ b/ts/sql/migrations/90-delete-story-reply-screenshot.ts @@ -1,7 +1,7 @@ // Copyright 2023 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; import { sql } from '../util'; diff --git a/ts/sql/migrations/91-clean-keys.ts b/ts/sql/migrations/91-clean-keys.ts index 6f3229f742..67534c2d62 100644 --- a/ts/sql/migrations/91-clean-keys.ts +++ b/ts/sql/migrations/91-clean-keys.ts @@ -1,7 +1,7 @@ // Copyright 2023 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database, RunResult } from '@signalapp/better-sqlite3'; +import type { Database, RunResult } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; import { sql } from '../util'; @@ -60,10 +60,12 @@ export default function updateToSchemaVersion91( // Do overall count - if it's less than 1000, move on - const totalKeys = db - .prepare('SELECT count(*) FROM preKeys;') - .pluck(true) - .get(); + const totalKeys = + db + .prepare('SELECT count(*) FROM preKeys;', { + pluck: true, + }) + .get() ?? 0; logger.info(`updateToSchemaVersion91: Found ${totalKeys} keys`); if (totalKeys < 1000) { db.pragma('user_version = 91'); @@ -74,11 +76,12 @@ export default function updateToSchemaVersion91( let pni: PniString; const pniJson = db - .prepare("SELECT json FROM items WHERE id IS 'pni'") - .pluck() - .get(); + .prepare("SELECT json FROM items WHERE id IS 'pni'", { + pluck: true, + }) + .get(); try { - const pniData = JSON.parse(pniJson); + const pniData = JSON.parse(pniJson ?? ''); pni = normalizePni(pniData.value, 'updateToSchemaVersion91'); } catch (error) { db.pragma('user_version = 91'); @@ -97,7 +100,11 @@ export default function updateToSchemaVersion91( const [beforeQuery, beforeParams] = sql`SELECT count(*) from preKeys WHERE ourServiceId = ${pni}`; - const beforeKeys = db.prepare(beforeQuery).pluck(true).get(beforeParams); + const beforeKeys = db + .prepare(beforeQuery, { + pluck: true, + }) + .get(beforeParams); logger.info(`updateToSchemaVersion91: Found ${beforeKeys} preKeys for PNI`); // Create index to help us with all these queries @@ -124,7 +131,11 @@ export default function updateToSchemaVersion91( LIMIT 1 OFFSET 499 `; - const oldBoundary = db.prepare(oldQuery).pluck(true).get(oldParams); + const oldBoundary = db + .prepare(oldQuery, { + pluck: true, + }) + .get(oldParams); logger.info( `updateToSchemaVersion91: Found 500th-oldest timestamp: ${oldBoundary}` ); @@ -141,7 +152,11 @@ export default function updateToSchemaVersion91( LIMIT 1 OFFSET 499 `; - const newBoundary = db.prepare(newQuery).pluck(true).get(newParams); + const newBoundary = db + .prepare(newQuery, { + pluck: true, + }) + .get(newParams); logger.info( `updateToSchemaVersion91: Found 500th-newest timestamp: ${newBoundary}` ); @@ -155,8 +170,8 @@ export default function updateToSchemaVersion91( SELECT rowid FROM preKeys WHERE createdAt IS NOT NULL AND - createdAt > ${oldBoundary} AND - createdAt < ${newBoundary} AND + createdAt > ${oldBoundary ?? null} AND + createdAt < ${newBoundary ?? null} AND ourServiceId = ${pni} LIMIT 10000 ); @@ -175,7 +190,11 @@ export default function updateToSchemaVersion91( FROM preKeys WHERE ourServiceId = ${pni}; `; - const afterCount = db.prepare(afterQuery).pluck(true).get(afterParams); + const afterCount = db + .prepare(afterQuery, { + pluck: true, + }) + .get(afterParams); logger.info( `updateToSchemaVersion91: Found ${afterCount} preKeys for PNI after delete` ); diff --git a/ts/sql/migrations/920-clean-more-keys.ts b/ts/sql/migrations/920-clean-more-keys.ts index 9dad04d59e..9adb0d1916 100644 --- a/ts/sql/migrations/920-clean-more-keys.ts +++ b/ts/sql/migrations/920-clean-more-keys.ts @@ -1,7 +1,7 @@ // Copyright 2023 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database, RunResult } from '@signalapp/better-sqlite3'; +import type { Database, RunResult } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; import type { QueryFragment } from '../util'; @@ -65,11 +65,12 @@ export function cleanKeys( // Grab our PNI let pni: PniString; const pniJson = db - .prepare("SELECT json FROM items WHERE id IS 'pni'") - .pluck() - .get(); + .prepare("SELECT json FROM items WHERE id IS 'pni'", { + pluck: true, + }) + .get(); try { - const pniData = JSON.parse(pniJson); + const pniData = JSON.parse(pniJson ?? ''); pni = normalizePni(pniData.value, logId); } catch (error) { if (pniJson) { @@ -84,10 +85,12 @@ export function cleanKeys( } // Do overall count - if it's less than 1000, move on - const totalKeys = db - .prepare(sql`SELECT count(*) FROM ${tableName};`[0]) - .pluck(true) - .get(); + const totalKeys = + db + .prepare(sql`SELECT count(*) FROM ${tableName};`[0], { + pluck: true, + }) + .get() ?? 0; logger.info(`${logId}: Found ${totalKeys} total keys`); if (totalKeys < 1000) { return; @@ -96,7 +99,11 @@ export function cleanKeys( // Grab PNI-specific count const [beforeQuery, beforeParams] = sql`SELECT count(*) from ${tableName} WHERE ${idField} = ${pni}`; - const beforeKeys = db.prepare(beforeQuery).pluck(true).get(beforeParams); + const beforeKeys = db + .prepare(beforeQuery, { + pluck: true, + }) + .get(beforeParams); logger.info(`${logId}: Found ${beforeKeys} keys for PNI`); // Create index to help us with all these queries @@ -123,7 +130,11 @@ export function cleanKeys( LIMIT 1 OFFSET 499 `; - const oldBoundary = db.prepare(oldQuery).pluck(true).get(oldParams); + const oldBoundary = db + .prepare(oldQuery, { + pluck: true, + }) + .get(oldParams); logger.info(`${logId}: Found 500th-oldest timestamp: ${oldBoundary}`); // Fetch 500th-newest timestamp for PNI @@ -137,7 +148,11 @@ export function cleanKeys( LIMIT 1 OFFSET 499 `; - const newBoundary = db.prepare(newQuery).pluck(true).get(newParams); + const newBoundary = db + .prepare(newQuery, { + pluck: true, + }) + .get(newParams); logger.info(`${logId}: Found 500th-newest timestamp: ${newBoundary}`); // Delete everything in between for PNI @@ -146,8 +161,8 @@ export function cleanKeys( DELETE FROM ${tableName} WHERE createdAt IS NOT NULL AND - createdAt > ${oldBoundary} AND - createdAt < ${newBoundary} AND + createdAt > ${oldBoundary ?? null} AND + createdAt < ${newBoundary ?? null} AND ${idField} = ${pni} LIMIT 10000; `; @@ -164,7 +179,11 @@ export function cleanKeys( FROM ${tableName} WHERE ${idField} = ${pni}; `; - const afterCount = db.prepare(afterQuery).pluck(true).get(afterParams); + const afterCount = db + .prepare(afterQuery, { + pluck: true, + }) + .get(afterParams); logger.info(`${logId}: Found ${afterCount} keys for PNI after delete`); db.exec( diff --git a/ts/sql/migrations/930-fts5-secure-delete.ts b/ts/sql/migrations/930-fts5-secure-delete.ts index 90fccb9d33..3b277e0a7c 100644 --- a/ts/sql/migrations/930-fts5-secure-delete.ts +++ b/ts/sql/migrations/930-fts5-secure-delete.ts @@ -1,7 +1,7 @@ // Copyright 2023 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/940-fts5-revert.ts b/ts/sql/migrations/940-fts5-revert.ts index ce1e7d0d8e..60f17a4a64 100644 --- a/ts/sql/migrations/940-fts5-revert.ts +++ b/ts/sql/migrations/940-fts5-revert.ts @@ -1,7 +1,7 @@ // Copyright 2023 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/950-fts5-secure-delete.ts b/ts/sql/migrations/950-fts5-secure-delete.ts index 7f3569e80b..6351a1fcc8 100644 --- a/ts/sql/migrations/950-fts5-secure-delete.ts +++ b/ts/sql/migrations/950-fts5-secure-delete.ts @@ -1,7 +1,7 @@ // Copyright 2023 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/960-untag-pni.ts b/ts/sql/migrations/960-untag-pni.ts index 7f65fb4cd0..04a39cc2e9 100644 --- a/ts/sql/migrations/960-untag-pni.ts +++ b/ts/sql/migrations/960-untag-pni.ts @@ -1,7 +1,7 @@ // Copyright 2023 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; import type { @@ -74,27 +74,31 @@ function migratePni( const uuidIdJson = db .prepare( ` - SELECT json - FROM items - WHERE id IS 'uuid_id' - ` + SELECT json + FROM items + WHERE id IS 'uuid_id' + `, + { + pluck: true, + } ) - .pluck() - .get(); + .get(); const pniJson = db .prepare( ` - SELECT json - FROM items - WHERE id IS 'pni' - ` + SELECT json + FROM items + WHERE id IS 'pni' + `, + { + pluck: true, + } ) - .pluck() - .get(); + .get(); let aci: string | undefined; try { - [aci] = JSON.parse(uuidIdJson).value.split('.', 2); + [aci] = JSON.parse(uuidIdJson ?? '').value.split('.', 2); } catch (error) { if (uuidIdJson) { logger.warn( @@ -111,7 +115,7 @@ function migratePni( let legacyPni: string | undefined; try { - legacyPni = JSON.parse(pniJson).value; + legacyPni = JSON.parse(pniJson ?? '').value; } catch (error) { if (pniJson) { logger.warn('updateToSchemaVersion960: failed to parse pni item', error); @@ -188,7 +192,7 @@ function migratePreKeys( ): void { const preKeys = db .prepare(`SELECT id, json FROM ${table} WHERE ourServiceId IS $legacyPni`) - .all({ legacyPni }); + .all<{ id: string; json: string }>({ legacyPni }); const updateStmt = db.prepare(` UPDATE ${table} diff --git a/ts/sql/migrations/970-fts5-optimize.ts b/ts/sql/migrations/970-fts5-optimize.ts index 199aa7c31e..d6d39794e9 100644 --- a/ts/sql/migrations/970-fts5-optimize.ts +++ b/ts/sql/migrations/970-fts5-optimize.ts @@ -1,7 +1,7 @@ // Copyright 2023 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; export const version = 970; diff --git a/ts/sql/migrations/980-reaction-timestamp.ts b/ts/sql/migrations/980-reaction-timestamp.ts index 07878ae7f1..0c4455324d 100644 --- a/ts/sql/migrations/980-reaction-timestamp.ts +++ b/ts/sql/migrations/980-reaction-timestamp.ts @@ -1,7 +1,7 @@ // Copyright 2023 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/990-phone-number-sharing.ts b/ts/sql/migrations/990-phone-number-sharing.ts index 59d4ec1fdd..3ec05b4384 100644 --- a/ts/sql/migrations/990-phone-number-sharing.ts +++ b/ts/sql/migrations/990-phone-number-sharing.ts @@ -1,7 +1,7 @@ // Copyright 2024 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import type { LoggerType } from '../../types/Logging'; diff --git a/ts/sql/migrations/index.ts b/ts/sql/migrations/index.ts index e48273536e..10a5e2e2f7 100644 --- a/ts/sql/migrations/index.ts +++ b/ts/sql/migrations/index.ts @@ -1,7 +1,7 @@ // Copyright 2021 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only -import type { Database } from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; import { keyBy } from 'lodash'; import { v4 as generateUuid } from 'uuid'; @@ -14,7 +14,6 @@ import { objectToJSON, jsonToObject, } from '../util'; -import type { Query, EmptyQuery } from '../util'; import type { WritableDB } from '../Interface'; import updateToSchemaVersion41 from './41-uuid-keys'; @@ -957,10 +956,10 @@ function updateToSchemaVersion20( // significantly, so we drop them and recreate them later. // Drop triggers const triggers = db - .prepare( + .prepare( "SELECT * FROM sqlite_master WHERE type = 'trigger' AND tbl_name = 'messages'" ) - .all(); + .all<{ name: string; sql: string }>(); for (const trigger of triggers) { db.exec(`DROP TRIGGER ${trigger.name}`); @@ -985,28 +984,29 @@ function updateToSchemaVersion20( // Drop invalid groups and any associated messages const maybeInvalidGroups = db - .prepare( + .prepare( "SELECT * FROM conversations WHERE type = 'group' AND members IS NULL;" ) - .all(); + .all<{ id: string; json: string }>(); for (const group of maybeInvalidGroups) { const json: { id: string; members: Array } = JSON.parse( group.json ); if (!json.members || !json.members.length) { - db.prepare('DELETE FROM conversations WHERE id = $id;').run({ + db.prepare('DELETE FROM conversations WHERE id = $id;').run({ + id: json.id, + }); + db.prepare('DELETE FROM messages WHERE conversationId = $id;').run({ id: json.id, }); - db.prepare( - 'DELETE FROM messages WHERE conversationId = $id;' - ).run({ id: json.id }); } } // Generate new IDs and alter data - const allConversations = db - .prepare('SELECT * FROM conversations;') - .all(); + const allConversations = db.prepare('SELECT * FROM conversations;').all<{ + id: string; + type: string; + }>(); const allConversationsByOldId = keyBy(allConversations, 'id'); for (const row of allConversations) { @@ -1023,7 +1023,7 @@ function updateToSchemaVersion20( } const patch = JSON.stringify(patchObj); - db.prepare( + db.prepare( ` UPDATE conversations SET id = $newId, json = JSON_PATCH(json, $patch) @@ -1035,7 +1035,7 @@ function updateToSchemaVersion20( patch, }); const messagePatch = JSON.stringify({ conversationId: newId }); - db.prepare( + db.prepare( ` UPDATE messages SET conversationId = $newId, json = JSON_PATCH(json, $patch) @@ -1049,7 +1049,7 @@ function updateToSchemaVersion20( members: string; json: string; }> = db - .prepare( + .prepare( ` SELECT id, members, json FROM conversations WHERE type = 'group'; ` @@ -1088,7 +1088,7 @@ function updateToSchemaVersion20( profileSharing: false, }; - db.prepare( + db.prepare( ` UPDATE conversations SET @@ -1113,7 +1113,7 @@ function updateToSchemaVersion20( members: newMembers, }; const newMembersValue = newMembers.join(' '); - db.prepare( + db.prepare( ` UPDATE conversations SET members = $newMembersValue, json = $newJsonValue @@ -1127,7 +1127,10 @@ function updateToSchemaVersion20( }); // Update sessions to stable IDs - const allSessions = db.prepare('SELECT * FROM sessions;').all(); + const allSessions = db.prepare('SELECT * FROM sessions;').all<{ + id: string; + json: string; + }>(); for (const session of allSessions) { // Not using patch here so we can explicitly delete a property rather than // implicitly delete via null @@ -1138,7 +1141,7 @@ function updateToSchemaVersion20( newJson.id = `${newJson.conversationId}.${newJson.deviceId}`; } delete newJson.number; - db.prepare( + db.prepare( ` UPDATE sessions SET id = $newId, json = $newJson, conversationId = $newConversationId @@ -1153,13 +1156,14 @@ function updateToSchemaVersion20( } // Update identity keys to stable IDs - const allIdentityKeys = db - .prepare('SELECT * FROM identityKeys;') - .all(); + const allIdentityKeys = db.prepare('SELECT * FROM identityKeys;').all<{ + json: string; + id: number; + }>(); for (const identityKey of allIdentityKeys) { const newJson = JSON.parse(identityKey.json); newJson.id = allConversationsByOldId[newJson.id]; - db.prepare( + db.prepare( ` UPDATE identityKeys SET id = $newId, json = $newJson @@ -2105,10 +2109,12 @@ export function enableFTS5SecureDelete(db: Database, logger: LoggerType): void { db .prepare( ` - SELECT v FROM messages_fts_config WHERE k is 'secure-delete'; - ` + SELECT v FROM messages_fts_config WHERE k is 'secure-delete'; + `, + { + pluck: true, + } ) - .pluck() .get() === 1; if (!isEnabled) { diff --git a/ts/sql/server/callLinks.ts b/ts/sql/server/callLinks.ts index 139bc9e7db..b89789eafa 100644 --- a/ts/sql/server/callLinks.ts +++ b/ts/sql/server/callLinks.ts @@ -21,7 +21,6 @@ import { defunctCallLinkFromRecord, } from '../../util/callLinksRingrtc'; import type { ReadableDB, WritableDB } from '../Interface'; -import { prepare } from '../Server'; import { sql } from '../util'; import { strictAssert } from '../../util/assert'; import { CallStatusValue, DirectCallStatus } from '../../types/CallDisposition'; @@ -33,7 +32,13 @@ export function callLinkExists(db: ReadableDB, roomId: string): boolean { FROM callLinks WHERE roomId = ${roomId}; `; - return db.prepare(query).pluck(true).get(params) === 1; + return ( + db + .prepare(query, { + pluck: true, + }) + .get(params) === 1 + ); } export function getCallLinkByRoomId( @@ -53,11 +58,9 @@ export function getCallLinkRecordByRoomId( db: ReadableDB, roomId: string ): CallLinkRecord | undefined { - const row = prepare(db, 'SELECT * FROM callLinks WHERE roomId = $roomId').get( - { - roomId, - } - ); + const row = db.prepare('SELECT * FROM callLinks WHERE roomId = $roomId').get({ + roomId, + }); if (!row) { return undefined; @@ -83,8 +86,7 @@ function _insertCallLink(db: WritableDB, callLink: CallLinkType): void { assertRoomIdMatchesRootKey(roomId, rootKey); const data = callLinkToRecord(callLink); - prepare( - db, + db.prepare( ` INSERT INTO callLinks ( roomId, @@ -174,8 +176,7 @@ export function updateCallLinkAdminKeyByRoomId( adminKey: string ): void { const adminKeyBytes = toAdminKeyBytes(adminKey); - prepare( - db, + db.prepare( ` UPDATE callLinks SET adminKey = $adminKeyBytes @@ -360,7 +361,11 @@ export function getAllMarkedDeletedCallLinkRoomIds( const [query] = sql` SELECT roomId FROM callLinks WHERE deleted = 1; `; - return db.prepare(query).pluck().all(); + return db + .prepare(query, { + pluck: true, + }) + .all(); } // TODO: Run this after uploading storage records, maybe periodically on startup @@ -387,7 +392,13 @@ export function defunctCallLinkExists(db: ReadableDB, roomId: string): boolean { FROM defunctCallLinks WHERE roomId = ${roomId}; `; - return db.prepare(query).pluck(true).get(params) === 1; + return ( + db + .prepare(query, { + pluck: true, + }) + .get(params) === 1 + ); } export function getAllDefunctCallLinksWithAdminKey( @@ -414,8 +425,7 @@ export function insertDefunctCallLink( assertRoomIdMatchesRootKey(roomId, rootKey); const data = defunctCallLinkToRecord(defunctCallLink); - prepare( - db, + db.prepare( ` INSERT INTO defunctCallLinks ( roomId, diff --git a/ts/sql/server/groupSendEndorsements.ts b/ts/sql/server/groupSendEndorsements.ts index 272371e23b..ee9b0bd690 100644 --- a/ts/sql/server/groupSendEndorsements.ts +++ b/ts/sql/server/groupSendEndorsements.ts @@ -11,7 +11,6 @@ import { groupSendMemberEndorsementSchema, groupSendEndorsementsDataSchema, } from '../../types/GroupSendEndorsements'; -import { prepare } from '../Server'; import type { ReadableDB, WritableDB } from '../Interface'; import { sql } from '../util'; import type { AciString } from '../../types/ServiceId'; @@ -43,8 +42,8 @@ function _deleteAllEndorsementsForGroup(db: WritableDB, groupId: string): void { DELETE FROM groupSendMemberEndorsement WHERE groupId IS ${groupId}; `; - prepare>(db, deleteCombined).run(deleteCombinedParams); - prepare>(db, deleteMembers).run(deleteMembersParams); + db.prepare(deleteCombined).run(deleteCombinedParams); + db.prepare(deleteMembers).run(deleteMembersParams); } function _replaceCombinedEndorsement( @@ -57,9 +56,7 @@ function _replaceCombinedEndorsement( (groupId, expiration, endorsement) VALUES (${groupId}, ${expiration}, ${endorsement}); `; - const result = prepare>(db, insertCombined).run( - insertCombinedParams - ); + const result = db.prepare(insertCombined).run(insertCombinedParams); strictAssert( result.changes === 1, 'Must update groupSendCombinedEndorsement' @@ -77,9 +74,7 @@ function _replaceMemberEndorsements( (groupId, memberAci, expiration, endorsement) VALUES (${groupId}, ${memberAci}, ${expiration}, ${endorsement}); `; - const result = prepare>(db, replaceMember).run( - replaceMemberParams - ); + const result = db.prepare(replaceMember).run(replaceMemberParams); strictAssert( result.changes === 1, 'Must update groupSendMemberEndorsement' @@ -104,8 +99,10 @@ export function getGroupSendCombinedEndorsementExpiration( SELECT expiration FROM groupSendCombinedEndorsement WHERE groupId IS ${groupId}; `; - const value = prepare>(db, selectGroup) - .pluck() + const value = db + .prepare(selectGroup, { + pluck: true, + }) .get(selectGroupParams); if (value == null) { return null; @@ -128,19 +125,17 @@ export function getGroupSendEndorsementsData( WHERE groupId IS ${groupId} `; - const combinedEndorsement: unknown = prepare>( - db, - selectCombinedEndorsement - ).get(selectCombinedEndorsementParams); + const combinedEndorsement: unknown = db + .prepare(selectCombinedEndorsement) + .get(selectCombinedEndorsementParams); if (combinedEndorsement == null) { return null; } - const memberEndorsements: Array = prepare>( - db, - selectMemberEndorsements - ).all(selectMemberEndorsementsParams); + const memberEndorsements: Array = db + .prepare(selectMemberEndorsements) + .all(selectMemberEndorsementsParams); return parseLoose(groupSendEndorsementsDataSchema, { combinedEndorsement, @@ -159,9 +154,9 @@ export function getGroupSendMemberEndorsement( WHERE groupId IS ${groupId} AND memberAci IS ${memberAci} `; - const row = prepare>(db, selectMemberEndorsements).get( - selectMemberEndorsementsParams - ); + const row = db + .prepare(selectMemberEndorsements) + .get(selectMemberEndorsementsParams); if (row == null) { return null; } diff --git a/ts/sql/util.ts b/ts/sql/util.ts index ac0462f2e5..2568a5f08d 100644 --- a/ts/sql/util.ts +++ b/ts/sql/util.ts @@ -6,11 +6,6 @@ import { isNumber, last } from 'lodash'; import type { ReadableDB, WritableDB } from './Interface'; import type { LoggerType } from '../types/Logging'; -export type EmptyQuery = []; -export type ArrayQuery = Array>; -export type Query = { - [key: string]: null | number | bigint | string | Uint8Array; -}; export type JSONRow = Readonly<{ json: string }>; export type JSONRows = Array; @@ -39,12 +34,7 @@ export function jsonToObject(json: string): T { return JSON.parse(json); } -export type QueryTemplateParam = - | Uint8Array - | string - | number - | null - | undefined; +export type QueryTemplateParam = Uint8Array | string | number | null; export type QueryFragmentValue = QueryFragment | QueryTemplateParam; export class QueryFragment { @@ -184,7 +174,11 @@ export function explainQueryPlan( template: QueryTemplate ): QueryTemplate { const [query, params] = template; - const plan = db.prepare(`EXPLAIN QUERY PLAN ${query}`).all(params); + const plan = db.prepare(`EXPLAIN QUERY PLAN ${query}`).all<{ + id: string | number; + parent: string | number; + detail: string; + }>(params); logger.info('EXPLAIN QUERY PLAN'); for (const line of query.split('\n')) { logger.info(line); @@ -200,15 +194,15 @@ export function explainQueryPlan( // export function getSQLiteVersion(db: ReadableDB): string { - const { sqlite_version: version } = db - .prepare('select sqlite_version() AS sqlite_version') - .get(); - - return version; + return ( + db + .prepare('select sqlite_version() AS sqlite_version', { pluck: true }) + .get() ?? '' + ); } export function getSchemaVersion(db: ReadableDB): number { - return db.pragma('schema_version', { simple: true }); + return db.pragma('schema_version', { simple: true }) as number; } export function setUserVersion(db: WritableDB, version: number): void { @@ -219,11 +213,11 @@ export function setUserVersion(db: WritableDB, version: number): void { } export function getUserVersion(db: ReadableDB): number { - return db.pragma('user_version', { simple: true }); + return db.pragma('user_version', { simple: true }) as number; } export function getSQLCipherVersion(db: ReadableDB): string | undefined { - return db.pragma('cipher_version', { simple: true }); + return db.pragma('cipher_version', { simple: true }) as string | undefined; } // @@ -276,7 +270,7 @@ export function createOrUpdate( throw new Error('createOrUpdate: Provided data did not have a truthy id'); } - db.prepare( + db.prepare( ` INSERT OR REPLACE INTO ${table} ( id, @@ -310,14 +304,14 @@ export function getById( id: Key ): Result | undefined { const row = db - .prepare( + .prepare( ` - SELECT * + SELECT json FROM ${table} WHERE id = $id; ` ) - .get({ + .get<{ json: string }>({ id, }); @@ -362,22 +356,21 @@ export function removeById( } export function removeAllFromTable(db: WritableDB, table: TableType): number { - return db.prepare(`DELETE FROM ${table};`).run().changes; + return db.prepare(`DELETE FROM ${table};`).run().changes; } export function getAllFromTable(db: ReadableDB, table: TableType): Array { - const rows: JSONRows = db - .prepare(`SELECT json FROM ${table};`) - .all(); + const rows: JSONRows = db.prepare(`SELECT json FROM ${table};`).all(); return rows.map(row => jsonToObject(row.json)); } export function getCountFromTable(db: ReadableDB, table: TableType): number { - const result: null | number = db - .prepare(`SELECT count(*) from ${table};`) - .pluck(true) - .get(); + const result = db + .prepare(`SELECT count(*) from ${table};`, { + pluck: true, + }) + .get(); if (isNumber(result)) { return result; } @@ -392,7 +385,7 @@ export class TableIterator { ) {} *[Symbol.iterator](): Iterator { - const fetchObject = this.db.prepare( + const fetchObject = this.db.prepare( ` SELECT json FROM ${this.table} WHERE id > $id diff --git a/ts/test-electron/models/messages_test.ts b/ts/test-electron/models/messages_test.ts index b04301fffc..066d72d708 100644 --- a/ts/test-electron/models/messages_test.ts +++ b/ts/test-electron/models/messages_test.ts @@ -61,6 +61,7 @@ describe('Message', () => { return window.MessageCache.register( new MessageModel({ id, + conversationId: 'convo', ...attrs, sent_at: Date.now(), received_at: Date.now(), diff --git a/ts/test-electron/services/AttachmentDownloadManager_test.ts b/ts/test-electron/services/AttachmentDownloadManager_test.ts index ed2f2deb50..09fdcba0ec 100644 --- a/ts/test-electron/services/AttachmentDownloadManager_test.ts +++ b/ts/test-electron/services/AttachmentDownloadManager_test.ts @@ -315,8 +315,8 @@ describe('AttachmentDownloadManager/JobManager', () => { assert.strictEqual(runJob.callCount, 2); assertRunJobCalledWith([jobs[1], jobs[0]]); - const retriedJob = await DataReader.getAttachmentDownloadJob(jobs[1]); - const finishedJob = await DataReader.getAttachmentDownloadJob(jobs[0]); + const retriedJob = await DataReader._getAttachmentDownloadJob(jobs[1]); + const finishedJob = await DataReader._getAttachmentDownloadJob(jobs[0]); assert.isUndefined(finishedJob); assert.strictEqual(retriedJob?.attempts, 1); @@ -349,7 +349,7 @@ describe('AttachmentDownloadManager/JobManager', () => { ]); // Ensure it's been removed after completed - assert.isUndefined(await DataReader.getAttachmentDownloadJob(jobs[1])); + assert.isUndefined(await DataReader._getAttachmentDownloadJob(jobs[1])); }); it('will reset attempts if addJob is called again', async () => { @@ -402,7 +402,7 @@ describe('AttachmentDownloadManager/JobManager', () => { assert.strictEqual(runJob.callCount, 8); // Ensure it's been removed - assert.isUndefined(await DataReader.getAttachmentDownloadJob(jobs[0])); + assert.isUndefined(await DataReader._getAttachmentDownloadJob(jobs[0])); }); it('only selects backup_import jobs if the mediaDownload is not paused', async () => { diff --git a/ts/test-electron/sql/utils_test.ts b/ts/test-electron/sql/utils_test.ts index e6e77b8aaa..0a99dd3faf 100644 --- a/ts/test-electron/sql/utils_test.ts +++ b/ts/test-electron/sql/utils_test.ts @@ -2,8 +2,8 @@ // SPDX-License-Identifier: AGPL-3.0-only import { assert } from 'chai'; -import type { Database } from '@signalapp/better-sqlite3'; -import SQL from '@signalapp/better-sqlite3'; +import type { Database } from '@signalapp/sqlcipher'; +import SQL from '@signalapp/sqlcipher'; import { sql, sqlFragment, sqlJoin } from '../../sql/util'; describe('sql/utils/sql', () => { diff --git a/ts/test-node/sql/helpers.ts b/ts/test-node/sql/helpers.ts index aaa8b0a8f2..4822748c77 100644 --- a/ts/test-node/sql/helpers.ts +++ b/ts/test-node/sql/helpers.ts @@ -2,9 +2,10 @@ // SPDX-License-Identifier: AGPL-3.0-only import { noop } from 'lodash'; -import SQL from '@signalapp/better-sqlite3'; +import SQL from '@signalapp/sqlcipher'; import type { ReadableDB, WritableDB } from '../../sql/Interface'; +import type { QueryTemplate } from '../../sql/util'; import { SCHEMA_VERSIONS } from '../../sql/migrations'; import { consoleLogger } from '../../util/consoleLogger'; @@ -13,7 +14,7 @@ export function createDB(): WritableDB { } export function updateToVersion(db: WritableDB, version: number): void { - const startVersion = db.pragma('user_version', { simple: true }); + const startVersion = db.pragma('user_version', { simple: true }) as number; const silentLogger = { ...consoleLogger, @@ -68,7 +69,7 @@ export function getTableData(db: ReadableDB, table: string): TableRows { return db .prepare(`SELECT * FROM ${table}`) .all() - .map((row: Record) => { + .map(row => { const result: Record< string, string | number | null | Record @@ -77,8 +78,8 @@ export function getTableData(db: ReadableDB, table: string): TableRows { if (value == null) { continue; } - if (Buffer.isBuffer(value)) { - result[key] = value.toString('hex'); + if (value instanceof Uint8Array) { + result[key] = Buffer.from(value).toString('hex'); continue; } try { @@ -93,3 +94,14 @@ export function getTableData(db: ReadableDB, table: string): TableRows { return result; }); } + +export function explain(db: ReadableDB, template: QueryTemplate): string { + const [query, params] = template; + const details = db + .prepare(`EXPLAIN QUERY PLAN ${query}`) + .all<{ detail: string }>(params) + .map(({ detail }) => detail) + .join('\n'); + + return details; +} diff --git a/ts/test-node/sql/migration_1000_test.ts b/ts/test-node/sql/migration_1000_test.ts index e4137d55c0..bf69480768 100644 --- a/ts/test-node/sql/migration_1000_test.ts +++ b/ts/test-node/sql/migration_1000_test.ts @@ -52,8 +52,8 @@ describe('SQL/updateToSchemaVersion1000', () => { ${message.id}, ${message.conversationId}, ${message.type}, - ${message.readStatus}, - ${message.seenStatus}, + ${message.readStatus ?? null}, + ${message.seenStatus ?? null}, ${json} ) `; @@ -95,7 +95,7 @@ describe('SQL/updateToSchemaVersion1000', () => { `; return db .prepare(query) - .all() + .all<{ json: string; readStatus: ReadStatus; seenStatus: SeenStatus }>() .map(row => { return { message: jsonToObject(row.json), diff --git a/ts/test-node/sql/migration_1040_test.ts b/ts/test-node/sql/migration_1040_test.ts index d813710988..d0e9f44c5d 100644 --- a/ts/test-node/sql/migration_1040_test.ts +++ b/ts/test-node/sql/migration_1040_test.ts @@ -5,20 +5,22 @@ import { assert } from 'chai'; import type { ReadableDB, WritableDB } from '../../sql/Interface'; import { jsonToObject, objectToJSON, sql, sqlJoin } from '../../sql/util'; -import { createDB, updateToVersion } from './helpers'; +import { createDB, updateToVersion, explain } from './helpers'; import type { LegacyAttachmentDownloadJobType } from '../../sql/migrations/1040-undownloaded-backed-up-media'; import type { AttachmentType } from '../../types/Attachment'; import type { AttachmentDownloadJobType } from '../../types/AttachmentDownload'; import { IMAGE_JPEG } from '../../types/MIME'; -function getAttachmentDownloadJobs(db: ReadableDB) { +function getAttachmentDownloadJobs( + db: ReadableDB +): Array> { const [query] = sql` SELECT * FROM attachment_downloads ORDER BY receivedAt DESC; `; return db .prepare(query) - .all() + .all<{ attachmentJson: string }>() .map(job => ({ ...omit(job, 'attachmentJson'), attachment: jsonToObject(job.attachmentJson), @@ -64,7 +66,7 @@ function insertNewJob( ${job.messageId}, ${job.attachmentType}, ${objectToJSON(job.attachment)}, - ${job.attachment.digest}, + ${job.attachment.digest ?? null}, ${job.attachment.contentType}, ${job.attachment.size}, ${job.receivedAt}, @@ -238,7 +240,7 @@ describe('SQL/updateToSchemaVersion1040', () => { }); { - const [query, params] = sql` + const template = sql` SELECT * FROM attachment_downloads WHERE active = 0 @@ -248,6 +250,7 @@ describe('SQL/updateToSchemaVersion1040', () => { LIMIT 5 `; + const [query, params] = template; const result = db.prepare(query).all(params); assert.strictEqual(result.length, 2); assert.deepStrictEqual( @@ -255,11 +258,7 @@ describe('SQL/updateToSchemaVersion1040', () => { ['message4', 'message1'] ); - const details = db - .prepare(`EXPLAIN QUERY PLAN ${query}`) - .all(params) - .map(step => step.detail) - .join(', '); + const details = explain(db, template); assert.include( details, 'USING INDEX attachment_downloads_active_receivedAt' @@ -269,7 +268,7 @@ describe('SQL/updateToSchemaVersion1040', () => { } { const messageIds = ['message1', 'message2', 'message4']; - const [query, params] = sql` + const template = sql` SELECT * FROM attachment_downloads INDEXED BY attachment_downloads_active_messageId WHERE @@ -282,17 +281,15 @@ describe('SQL/updateToSchemaVersion1040', () => { LIMIT 5 `; + const [query, params] = template; + const result = db.prepare(query).all(params); assert.strictEqual(result.length, 2); assert.deepStrictEqual( result.map(res => res.messageId), ['message1', 'message4'] ); - const details = db - .prepare(`EXPLAIN QUERY PLAN ${query}`) - .all(params) - .map(step => step.detail) - .join(', '); + const details = explain(db, template); // This query _will_ use a temp b-tree for ordering, but the number of rows // should be quite low. @@ -466,16 +463,16 @@ function insertLegacyJob( job: Partial ): void { db.prepare('INSERT OR REPLACE INTO messages (id) VALUES ($id)').run({ - id: job.messageId, + id: job.messageId ?? null, }); const [query, params] = sql` INSERT INTO attachment_downloads (id, timestamp, pending, json) VALUES ( - ${job.id}, - ${job.timestamp}, - ${job.pending}, + ${job.id ?? null}, + ${job.timestamp ?? null}, + ${job.pending ?? null}, ${objectToJSON(job)} ); `; diff --git a/ts/test-node/sql/migration_1060_test.ts b/ts/test-node/sql/migration_1060_test.ts index 9bf5310f71..297f259740 100644 --- a/ts/test-node/sql/migration_1060_test.ts +++ b/ts/test-node/sql/migration_1060_test.ts @@ -11,7 +11,7 @@ import { } from '../../sql/Server'; import type { WritableDB, ReadableDB, MessageType } from '../../sql/Interface'; import { sql, jsonToObject } from '../../sql/util'; -import { insertData, updateToVersion, createDB } from './helpers'; +import { insertData, updateToVersion, createDB, explain } from './helpers'; import { MAX_SYNC_TASK_ATTEMPTS } from '../../util/syncTasks.types'; import { WEEK } from '../../util/durations'; @@ -36,7 +36,7 @@ export function getMostRecentAddressableMessages( LIMIT ${limit}; `; - const rows = db.prepare(query).all(parameters); + const rows = db.prepare(query).all<{ json: string }>(parameters); return rows.map(row => jsonToObject(row.json)); } @@ -169,10 +169,9 @@ describe('SQL/updateToSchemaVersion1060', () => { }); it('ensures that index is used for getMostRecentAddressableMessages, with storyId', () => { - const { detail } = db - .prepare( - ` - EXPLAIN QUERY PLAN + const detail = explain( + db, + sql` SELECT json FROM messages INDEXED BY messages_by_date_addressable WHERE @@ -181,8 +180,7 @@ describe('SQL/updateToSchemaVersion1060', () => { ORDER BY received_at DESC, sent_at DESC LIMIT 5; ` - ) - .get(); + ); assert.notInclude(detail, 'B-TREE'); assert.notInclude(detail, 'SCAN'); diff --git a/ts/test-node/sql/migration_1080_test.ts b/ts/test-node/sql/migration_1080_test.ts index 8632a00424..450b8ef1e7 100644 --- a/ts/test-node/sql/migration_1080_test.ts +++ b/ts/test-node/sql/migration_1080_test.ts @@ -6,7 +6,7 @@ import { v4 as generateGuid } from 'uuid'; import type { WritableDB, ReadableDB, MessageType } from '../../sql/Interface'; import { sql, jsonToObject } from '../../sql/util'; -import { createDB, insertData, updateToVersion } from './helpers'; +import { createDB, insertData, updateToVersion, explain } from './helpers'; import type { MessageAttributesType } from '../../model-types'; import { DurationInSeconds } from '../../util/durations/duration-in-seconds'; @@ -43,7 +43,7 @@ export function getMostRecentAddressableNondisappearingMessages( LIMIT ${limit}; `; - const rows = db.prepare(query).all(parameters); + const rows = db.prepare(query).all<{ json: string }>(parameters); return rows.map(row => jsonToObject(row.json)); } @@ -159,10 +159,9 @@ describe('SQL/updateToSchemaVersion1080', () => { }); it('ensures that index is used for getMostRecentAddressableNondisappearingMessagesSync, with storyId', () => { - const { detail } = db - .prepare( - ` - EXPLAIN QUERY PLAN + const detail = explain( + db, + sql` SELECT json FROM messages INDEXED BY messages_by_date_addressable_nondisappearing WHERE @@ -172,8 +171,7 @@ describe('SQL/updateToSchemaVersion1080', () => { ORDER BY received_at DESC, sent_at DESC LIMIT 5; ` - ) - .get(); + ); assert.notInclude(detail, 'B-TREE'); assert.notInclude(detail, 'SCAN'); diff --git a/ts/test-node/sql/migration_1090_test.ts b/ts/test-node/sql/migration_1090_test.ts index 356af6e3a5..86864daa6b 100644 --- a/ts/test-node/sql/migration_1090_test.ts +++ b/ts/test-node/sql/migration_1090_test.ts @@ -3,7 +3,8 @@ import { assert } from 'chai'; import type { WritableDB } from '../../sql/Interface'; -import { createDB, updateToVersion } from './helpers'; +import { sql } from '../../sql/util'; +import { createDB, updateToVersion, explain } from './helpers'; describe('SQL/updateToSchemaVersion1090', () => { let db: WritableDB; @@ -18,16 +19,12 @@ describe('SQL/updateToSchemaVersion1090', () => { describe('Additional messages_on_delete indexes', () => { it('uses index for selecting reactions by messageId', () => { - const details = db - .prepare( - `EXPLAIN QUERY PLAN - SELECT rowid FROM reactions + const details = explain( + db, + sql`SELECT rowid FROM reactions WHERE messageId = '123'; ` - ) - .all() - .map(step => step.detail) - .join(', '); + ); assert.strictEqual( details, @@ -36,15 +33,10 @@ describe('SQL/updateToSchemaVersion1090', () => { }); it('uses index for selecting storyReads by storyId', () => { - const details = db - .prepare( - `EXPLAIN QUERY PLAN - DELETE FROM storyReads WHERE storyId = '123'; - ` - ) - .all() - .map(step => step.detail) - .join(', '); + const details = explain( + db, + sql`DELETE FROM storyReads WHERE storyId = '123';` + ); assert.strictEqual( details, diff --git a/ts/test-node/sql/migration_1120_test.ts b/ts/test-node/sql/migration_1120_test.ts index 4d8e77485f..a2d1afed6c 100644 --- a/ts/test-node/sql/migration_1120_test.ts +++ b/ts/test-node/sql/migration_1120_test.ts @@ -3,7 +3,8 @@ import { assert } from 'chai'; import type { WritableDB } from '../../sql/Interface'; -import { createDB, updateToVersion } from './helpers'; +import { sql } from '../../sql/util'; +import { createDB, updateToVersion, explain } from './helpers'; describe('SQL/updateToSchemaVersion1120', () => { let db: WritableDB; @@ -17,15 +18,10 @@ describe('SQL/updateToSchemaVersion1120', () => { }); it('uses index for deleting edited messages', () => { - const details = db - .prepare( - `EXPLAIN QUERY PLAN - DELETE FROM edited_messages WHERE messageId = 'messageId'; - ` - ) - .all() - .map(step => step.detail) - .join(', '); + const details = explain( + db, + sql`DELETE FROM edited_messages WHERE messageId = 'messageId';` + ); assert.strictEqual( details, @@ -34,15 +30,10 @@ describe('SQL/updateToSchemaVersion1120', () => { }); it('uses index for deleting mentions', () => { - const details = db - .prepare( - `EXPLAIN QUERY PLAN - DELETE FROM mentions WHERE messageId = 'messageId'; - ` - ) - .all() - .map(step => step.detail) - .join(', '); + const details = explain( + db, + sql`DELETE FROM mentions WHERE messageId = 'messageId';` + ); assert.strictEqual( details, diff --git a/ts/test-node/sql/migration_1130_test.ts b/ts/test-node/sql/migration_1130_test.ts index 5516c9a867..6e95f1748a 100644 --- a/ts/test-node/sql/migration_1130_test.ts +++ b/ts/test-node/sql/migration_1130_test.ts @@ -3,7 +3,8 @@ import { assert } from 'chai'; import type { WritableDB } from '../../sql/Interface'; -import { createDB, updateToVersion } from './helpers'; +import { sql } from '../../sql/util'; +import { createDB, updateToVersion, explain } from './helpers'; describe('SQL/updateToSchemaVersion1130', () => { let db: WritableDB; @@ -17,10 +18,9 @@ describe('SQL/updateToSchemaVersion1130', () => { }); it('uses new index for getAllStories query and no params', () => { - const details = db - .prepare( - ` - EXPLAIN QUERY PLAN + const details = explain( + db, + sql` SELECT json, id FROM messages WHERE @@ -29,19 +29,15 @@ describe('SQL/updateToSchemaVersion1130', () => { (NULL IS NULL OR sourceServiceId IS NULL) ORDER BY received_at ASC, sent_at ASC; ` - ) - .all() - .map(step => step.detail) - .join(', '); + ); assert.strictEqual(details, 'SCAN messages USING INDEX messages_isStory'); }); it('uses new index for getAllStories query and with conversationId', () => { - const details = db - .prepare( - ` - EXPLAIN QUERY PLAN + const details = explain( + db, + sql` SELECT json, id FROM messages WHERE @@ -50,19 +46,15 @@ describe('SQL/updateToSchemaVersion1130', () => { (NULL IS NULL OR sourceServiceId IS NULL) ORDER BY received_at ASC, sent_at ASC; ` - ) - .all() - .map(step => step.detail) - .join(', '); + ); assert.strictEqual(details, 'SCAN messages USING INDEX messages_isStory'); }); it('uses new index for getAllStories query and with sourceServiceId', () => { - const details = db - .prepare( - ` - EXPLAIN QUERY PLAN + const details = explain( + db, + sql` SELECT json, id FROM messages WHERE @@ -71,19 +63,15 @@ describe('SQL/updateToSchemaVersion1130', () => { ('something' IS NULL OR sourceServiceId IS 'something') ORDER BY received_at ASC, sent_at ASC; ` - ) - .all() - .map(step => step.detail) - .join(', '); + ); assert.strictEqual(details, 'SCAN messages USING INDEX messages_isStory'); }); it('uses new index for getAllStories query and both params', () => { - const details = db - .prepare( - ` - EXPLAIN QUERY PLAN + const details = explain( + db, + sql` SELECT json, id FROM messages WHERE @@ -92,27 +80,20 @@ describe('SQL/updateToSchemaVersion1130', () => { ('something' IS NULL OR sourceServiceId IS 'something') ORDER BY received_at ASC, sent_at ASC; ` - ) - .all() - .map(step => step.detail) - .join(', '); + ); assert.strictEqual(details, 'SCAN messages USING INDEX messages_isStory'); }); it('uses previous index for getAllStories get replies query', () => { - const details = db - .prepare( - ` - EXPLAIN QUERY PLAN + const details = explain( + db, + sql` SELECT DISTINCT storyId FROM messages WHERE storyId IS NOT NULL ` - ) - .all() - .map(step => step.detail) - .join(', '); + ); assert.strictEqual( details, @@ -121,10 +102,9 @@ describe('SQL/updateToSchemaVersion1130', () => { }); it('uses previous index for getAllStories get replies from self query', () => { - const details = db - .prepare( - ` - EXPLAIN QUERY PLAN + const details = explain( + db, + sql` SELECT DISTINCT storyId FROM messages WHERE ( @@ -132,10 +112,7 @@ describe('SQL/updateToSchemaVersion1130', () => { type IS 'outgoing' ) ` - ) - .all() - .map(step => step.detail) - .join(', '); + ); assert.strictEqual( details, diff --git a/ts/test-node/sql/migration_1180_test.ts b/ts/test-node/sql/migration_1180_test.ts index cb4bc5608e..426f22e913 100644 --- a/ts/test-node/sql/migration_1180_test.ts +++ b/ts/test-node/sql/migration_1180_test.ts @@ -4,7 +4,7 @@ import { assert } from 'chai'; import { omit } from 'lodash'; import type { WritableDB } from '../../sql/Interface'; -import { createDB, updateToVersion } from './helpers'; +import { createDB, updateToVersion, explain } from './helpers'; import type { AttachmentDownloadJobType } from '../../types/AttachmentDownload'; import { jsonToObject, objectToJSON, sql } from '../../sql/util'; import { IMAGE_BMP } from '../../types/MIME'; @@ -59,14 +59,14 @@ function insertOldJob( db.prepare(query).run(params); } -function getAttachmentDownloadJobs(db: WritableDB) { +function getAttachmentDownloadJobs(db: WritableDB): unknown { const [query] = sql` SELECT * FROM attachment_downloads ORDER BY receivedAt DESC; `; return db .prepare(query) - .all() + .all<{ active: number; attachmentJson: string }>() .map(job => ({ ...omit(job, 'attachmentJson'), active: job.active === 1, @@ -108,17 +108,13 @@ describe('SQL/updateToSchemaVersion1180', () => { }); it('uses convering index for summing all pending backup jobs', async () => { updateToVersion(db, 1180); - const details = db - .prepare( + const details = explain( + db, + sql` + SELECT SUM(ciphertextSize) FROM attachment_downloads + WHERE source = 'backup_import'; ` - EXPLAIN QUERY PLAN - SELECT SUM(ciphertextSize) FROM attachment_downloads - WHERE source = 'backup_import'; - ` - ) - .all() - .map(step => step.detail) - .join(', '); + ); assert.strictEqual( details, @@ -127,17 +123,13 @@ describe('SQL/updateToSchemaVersion1180', () => { }); it('uses index for deleting all backup jobs', async () => { updateToVersion(db, 1180); - const details = db - .prepare( + const details = explain( + db, + sql` + DELETE FROM attachment_downloads + WHERE source = 'backup_import'; ` - EXPLAIN QUERY PLAN - DELETE FROM attachment_downloads - WHERE source = 'backup_import'; - ` - ) - .all() - .map(step => step.detail) - .join(', '); + ); assert.strictEqual( details, diff --git a/ts/test-node/sql/migration_1200_test.ts b/ts/test-node/sql/migration_1200_test.ts index 6397d0d125..fc5235f45b 100644 --- a/ts/test-node/sql/migration_1200_test.ts +++ b/ts/test-node/sql/migration_1200_test.ts @@ -5,7 +5,7 @@ import { assert } from 'chai'; import { AttachmentDownloadSource, type WritableDB } from '../../sql/Interface'; import { objectToJSON, sql } from '../../sql/util'; -import { createDB, updateToVersion } from './helpers'; +import { createDB, updateToVersion, explain } from './helpers'; import type { AttachmentDownloadJobType } from '../../types/AttachmentDownload'; import { IMAGE_JPEG } from '../../types/MIME'; @@ -71,7 +71,7 @@ function insertJob( ${job.messageId}, ${job.attachmentType}, ${objectToJSON(job.attachment)}, - ${job.attachment.digest}, + ${job.attachment.digest ?? null}, ${job.attachment.contentType}, ${job.attachment.size}, ${job.receivedAt}, @@ -113,7 +113,7 @@ describe('SQL/updateToSchemaVersion1200', () => { it('uses correct index for standard query', () => { const now = Date.now(); - const [query, params] = sql` + const template = sql` SELECT * FROM attachment_downloads WHERE active = 0 @@ -122,11 +122,7 @@ describe('SQL/updateToSchemaVersion1200', () => { ORDER BY receivedAt DESC LIMIT 3 `; - const details = db - .prepare(`EXPLAIN QUERY PLAN ${query}`) - .all(params) - .map(step => step.detail) - .join(', '); + const details = explain(db, template); assert.equal( details, 'SEARCH attachment_downloads USING INDEX attachment_downloads_active_receivedAt (active=?)' @@ -136,7 +132,7 @@ describe('SQL/updateToSchemaVersion1200', () => { it('uses correct index for standard query with sources', () => { const now = Date.now(); // query with sources (e.g. when backup-import is paused) - const [query, params] = sql` + const template = sql` SELECT * FROM attachment_downloads WHERE active IS 0 @@ -147,11 +143,7 @@ describe('SQL/updateToSchemaVersion1200', () => { ORDER BY receivedAt DESC LIMIT 3 `; - const details = db - .prepare(`EXPLAIN QUERY PLAN ${query}`) - .all(params) - .map(step => step.detail) - .join(', '); + const details = explain(db, template); assert.equal( details, 'SEARCH attachment_downloads USING INDEX attachment_downloads_active_source_receivedAt (active=? AND source=?)' @@ -160,7 +152,7 @@ describe('SQL/updateToSchemaVersion1200', () => { it('uses provided index for prioritized query with sources', () => { // prioritize visible messages with sources (e.g. when backup-import is paused) - const [query, params] = sql` + const template = sql` SELECT * FROM attachment_downloads INDEXED BY attachment_downloads_active_messageId WHERE @@ -174,36 +166,30 @@ describe('SQL/updateToSchemaVersion1200', () => { ORDER BY receivedAt ASC LIMIT 3 `; + const [query, params] = template; const result = db.prepare(query).all(params); assert.strictEqual(result.length, 1); assert.deepStrictEqual(result[0].messageId, 'message12'); - const details = db - .prepare(`EXPLAIN QUERY PLAN ${query}`) - .all(params) - .map(step => step.detail) - .join(', '); + const details = explain(db, template); assert.equal( details, - 'SEARCH attachment_downloads USING INDEX attachment_downloads_active_messageId (active=? AND messageId=?), USE TEMP B-TREE FOR ORDER BY' + 'SEARCH attachment_downloads USING INDEX attachment_downloads_active_messageId (active=? AND messageId=?)\nUSE TEMP B-TREE FOR ORDER BY' ); }); it('uses existing index to remove all backup jobs ', () => { // prioritize visible messages with sources (e.g. when backup-import is paused) - const [query, params] = sql` + const template = sql` DELETE FROM attachment_downloads WHERE source = 'backup_import'; `; - const details = db - .prepare(`EXPLAIN QUERY PLAN ${query}`) - .all(params) - .map(step => step.detail) - .join(', '); + const details = explain(db, template); assert.equal( details, 'SEARCH attachment_downloads USING COVERING INDEX attachment_downloads_source_ciphertextSize (source=?)' ); + const [query, params] = template; db.prepare(query).run(params); assert.equal( db.prepare('SELECT * FROM attachment_downloads').all().length, diff --git a/ts/test-node/sql/migration_1330_test.ts b/ts/test-node/sql/migration_1330_test.ts index f106b591c0..133a6fe190 100644 --- a/ts/test-node/sql/migration_1330_test.ts +++ b/ts/test-node/sql/migration_1330_test.ts @@ -9,8 +9,9 @@ import { saveSyncTasks, incrementAllSyncTaskAttempts, } from '../../sql/Server'; +import { sql } from '../../sql/util'; import type { WritableDB } from '../../sql/Interface'; -import { updateToVersion, createDB } from './helpers'; +import { updateToVersion, createDB, explain } from './helpers'; import type { SyncTaskType } from '../../util/syncTasks'; @@ -27,17 +28,15 @@ describe('SQL/updateToSchemaVersion1330', () => { describe('Sync Tasks task index', () => { it('uses the task index for queries', () => { - const { detail } = db - .prepare( - ` - EXPLAIN QUERY PLAN + const detail = explain( + db, + sql` SELECT rowid, * FROM syncTasks WHERE rowid > 0 AND type IN ('delete-converation', 'delete-local-conversation') ORDER BY rowid ASC LIMIT 10000 ` - ) - .get(); + ); assert.include(detail, 'USING INDEX syncTasks_type'); }); }); diff --git a/ts/test-node/sql/migration_87_test.ts b/ts/test-node/sql/migration_87_test.ts index 54efc71bf2..1907deb4b0 100644 --- a/ts/test-node/sql/migration_87_test.ts +++ b/ts/test-node/sql/migration_87_test.ts @@ -82,17 +82,37 @@ describe('SQL/updateToSchemaVersion87(cleanup)', () => { } function getCountOfKyberKeys(): number { - return db.prepare('SELECT count(*) FROM kyberPreKeys;').pluck(true).get(); + return ( + db + .prepare('SELECT count(*) FROM kyberPreKeys;', { + pluck: true, + }) + .get() ?? 0 + ); } function getCountOfPreKeys(): number { - return db.prepare('SELECT count(*) FROM preKeys;').pluck(true).get(); + return ( + db + .prepare('SELECT count(*) FROM preKeys;', { + pluck: true, + }) + .get() ?? 0 + ); } function getCountOfSignedKeys(): number { - return db.prepare('SELECT count(*) FROM signedPreKeys;').pluck(true).get(); + return ( + db + .prepare('SELECT count(*) FROM signedPreKeys;', { + pluck: true, + }) + .get() ?? 0 + ); } function getPragma(): number { - return db.prepare('PRAGMA user_version;').pluck(true).get(); + return db.pragma('user_version', { + simple: true, + }) as number; } function generateKyberKey( diff --git a/ts/test-node/sql/migration_89_test.ts b/ts/test-node/sql/migration_89_test.ts index 36f5b6eb3c..71613e6f43 100644 --- a/ts/test-node/sql/migration_89_test.ts +++ b/ts/test-node/sql/migration_89_test.ts @@ -435,7 +435,7 @@ describe('SQL/updateToSchemaVersion89', () => { `; return db .prepare(query) - .all() + .all<{ json: string }>() .map(row => { return jsonToObject(row.json); }); diff --git a/ts/test-node/sql/migration_90_test.ts b/ts/test-node/sql/migration_90_test.ts index 227a09ef18..5acfd5d125 100644 --- a/ts/test-node/sql/migration_90_test.ts +++ b/ts/test-node/sql/migration_90_test.ts @@ -4,7 +4,14 @@ import { assert } from 'chai'; import type { WritableDB } from '../../sql/Interface'; -import { createDB, updateToVersion, insertData, getTableData } from './helpers'; +import { sql } from '../../sql/util'; +import { + createDB, + updateToVersion, + insertData, + getTableData, + explain, +} from './helpers'; describe('SQL/updateToSchemaVersion90', () => { let db: WritableDB; @@ -85,20 +92,16 @@ describe('SQL/updateToSchemaVersion90', () => { it('should use storyId index', () => { updateToVersion(db, 90); - const details = db - .prepare( - ` - EXPLAIN QUERY PLAN + const details = explain( + db, + sql` UPDATE messages SET json = json_remove(json, '$.storyReplyContext.attachment.screenshotData') WHERE isStory = 0 AND storyId > '0' AND json->'storyReplyContext.attachment.screenshotData' IS NOT NULL; ` - ) - .all() - .map(({ detail }) => detail) - .join('\n'); + ); assert.include(details, 'USING INDEX messages_by_storyId'); assert.notInclude(details, 'SCAN'); diff --git a/ts/test-node/sql/migration_91_test.ts b/ts/test-node/sql/migration_91_test.ts index b8714cbdb6..c61215d019 100644 --- a/ts/test-node/sql/migration_91_test.ts +++ b/ts/test-node/sql/migration_91_test.ts @@ -50,11 +50,19 @@ describe('SQL/updateToSchemaVersion91', () => { } function getCountOfKeys(): number { - return db.prepare('SELECT count(*) FROM preKeys;').pluck(true).get(); + return ( + db + .prepare('SELECT count(*) FROM preKeys;', { + pluck: true, + }) + .get() ?? 0 + ); } function getPragma(): number { - return db.prepare('PRAGMA user_version;').pluck(true).get(); + return db.pragma('user_version', { + simple: true, + }) as number; } function generateKey( diff --git a/ts/test-node/sql/migration_920_test.ts b/ts/test-node/sql/migration_920_test.ts index b9e4fe3f75..d35d68a3d4 100644 --- a/ts/test-node/sql/migration_920_test.ts +++ b/ts/test-node/sql/migration_920_test.ts @@ -60,14 +60,28 @@ describe('SQL/updateToSchemaVersion92', () => { } function getCountOfKyberKeys(): number { - return db.prepare('SELECT count(*) FROM kyberPreKeys;').pluck(true).get(); + return ( + db + .prepare('SELECT count(*) FROM kyberPreKeys;', { + pluck: true, + }) + .get() ?? 0 + ); } function getCountOfSignedKeys(): number { - return db.prepare('SELECT count(*) FROM signedPreKeys;').pluck(true).get(); + return ( + db + .prepare('SELECT count(*) FROM signedPreKeys;', { + pluck: true, + }) + .get() ?? 0 + ); } function getPragma(): number { - return db.prepare('PRAGMA user_version;').pluck(true).get(); + return db.pragma('user_version', { + simple: true, + }) as number; } function generateKyberKey( diff --git a/ts/test-node/sql/migrations_test.ts b/ts/test-node/sql/migrations_test.ts index 4e0e1b7ceb..070df7c0aa 100644 --- a/ts/test-node/sql/migrations_test.ts +++ b/ts/test-node/sql/migrations_test.ts @@ -12,7 +12,7 @@ import { objectToJSON, sql, sqlJoin } from '../../sql/util'; import { BodyRange } from '../../types/BodyRange'; import type { AciString } from '../../types/ServiceId'; import { generateAci } from '../../types/ServiceId'; -import { createDB, updateToVersion } from './helpers'; +import { createDB, updateToVersion, explain } from './helpers'; const OUR_UUID = generateGuid(); @@ -100,15 +100,24 @@ describe('SQL migrations test', () => { ` ); - const senderKeyCount = db - .prepare('SELECT COUNT(*) FROM senderKeys') - .pluck(); - const sessionCount = db.prepare('SELECT COUNT(*) FROM sessions').pluck(); - const signedPreKeyCount = db - .prepare('SELECT COUNT(*) FROM signedPreKeys') - .pluck(); - const preKeyCount = db.prepare('SELECT COUNT(*) FROM preKeys').pluck(); - const itemCount = db.prepare('SELECT COUNT(*) FROM items').pluck(); + const senderKeyCount = db.prepare('SELECT COUNT(*) FROM senderKeys', { + pluck: true, + }); + const sessionCount = db.prepare('SELECT COUNT(*) FROM sessions', { + pluck: true, + }); + const signedPreKeyCount = db.prepare( + 'SELECT COUNT(*) FROM signedPreKeys', + { + pluck: true, + } + ); + const preKeyCount = db.prepare('SELECT COUNT(*) FROM preKeys', { + pluck: true, + }); + const itemCount = db.prepare('SELECT COUNT(*) FROM items', { + pluck: true, + }); assert.strictEqual(senderKeyCount.get(), 1); assert.strictEqual(sessionCount.get(), 1); @@ -223,7 +232,11 @@ describe('SQL migrations test', () => { updateToVersion(db, 41); assert.strictEqual( - db.prepare('SELECT COUNT(*) FROM senderKeys').pluck().get(), + db + .prepare('SELECT COUNT(*) FROM senderKeys', { + pluck: true, + }) + .get(), 0 ); }); @@ -321,7 +334,11 @@ describe('SQL migrations test', () => { updateToVersion(db, 41); assert.strictEqual( - db.prepare('SELECT COUNT(*) FROM sessions').pluck().get(), + db + .prepare('SELECT COUNT(*) FROM sessions', { + pluck: true, + }) + .get(), 0 ); }); @@ -342,7 +359,11 @@ describe('SQL migrations test', () => { updateToVersion(db, 41); assert.strictEqual( - db.prepare('SELECT COUNT(*) FROM sessions').pluck().get(), + db + .prepare('SELECT COUNT(*) FROM sessions', { + pluck: true, + }) + .get(), 0 ); }); @@ -537,10 +558,12 @@ describe('SQL migrations test', () => { ` ); - const reactionCount = db - .prepare('SELECT COUNT(*) FROM reactions;') - .pluck(); - const messageCount = db.prepare('SELECT COUNT(*) FROM messages;').pluck(); + const reactionCount = db.prepare('SELECT COUNT(*) FROM reactions;', { + pluck: true, + }); + const messageCount = db.prepare('SELECT COUNT(*) FROM messages;', { + pluck: true, + }); assert.strictEqual(reactionCount.get(), 4); assert.strictEqual(messageCount.get(), 2); @@ -551,8 +574,9 @@ describe('SQL migrations test', () => { assert.strictEqual(messageCount.get(), 2); const reactionMessageIds = db - .prepare('SELECT messageId FROM reactions;') - .pluck() + .prepare('SELECT messageId FROM reactions;', { + pluck: true, + }) .all(); assert.sameDeepMembers(reactionMessageIds, [MESSAGE_ID_1, MESSAGE_ID_2]); @@ -576,10 +600,12 @@ describe('SQL migrations test', () => { ` ); - const reactionCount = db - .prepare('SELECT COUNT(*) FROM reactions;') - .pluck(); - const messageCount = db.prepare('SELECT COUNT(*) FROM messages;').pluck(); + const reactionCount = db.prepare('SELECT COUNT(*) FROM reactions;', { + pluck: true, + }); + const messageCount = db.prepare('SELECT COUNT(*) FROM messages;', { + pluck: true, + }); assert.strictEqual(reactionCount.get(), 3); assert.strictEqual(messageCount.get(), 3); @@ -599,8 +625,9 @@ describe('SQL migrations test', () => { assert.strictEqual(messageCount.get(), 2); const reactionMessageIds = db - .prepare('SELECT messageId FROM reactions;') - .pluck() + .prepare('SELECT messageId FROM reactions;', { + pluck: true, + }) .all(); assert.sameDeepMembers(reactionMessageIds, [MESSAGE_ID_2, MESSAGE_ID_3]); @@ -700,9 +727,11 @@ describe('SQL migrations test', () => { updateToVersion(db, 43); - const { members, json: convoJSON } = db + const row = db .prepare("SELECT members, json FROM conversations WHERE id = 'c'") - .get(); + .get<{ members: string; json: string }>(); + + const { members, json: convoJSON } = row || { members: '', json: '' }; assert.strictEqual(members, `${UUID_A} ${UUID_B}`); assert.deepStrictEqual(JSON.parse(convoJSON), { @@ -723,9 +752,10 @@ describe('SQL migrations test', () => { ], }); - const { json: messageMJSON } = db - .prepare("SELECT json FROM messages WHERE id = 'm'") - .get(); + const messageMJSON = + db + .prepare("SELECT json FROM messages WHERE id = 'm'") + .get<{ json: string }>()?.json ?? ''; assert.deepStrictEqual(JSON.parse(messageMJSON), { id: 'm', @@ -751,9 +781,10 @@ describe('SQL migrations test', () => { ], }); - const { json: messageNJSON } = db - .prepare("SELECT json FROM messages WHERE id = 'n'") - .get(); + const messageNJSON = + db + .prepare("SELECT json FROM messages WHERE id = 'n'") + .get<{ json: string }>()?.json ?? ''; assert.deepStrictEqual(JSON.parse(messageNJSON), { id: 'n', @@ -781,9 +812,10 @@ describe('SQL migrations test', () => { updateToVersion(db, 43); - const { json: messageMJSON } = db - .prepare("SELECT json FROM messages WHERE id = 'm'") - .get(); + const messageMJSON = + db + .prepare("SELECT json FROM messages WHERE id = 'm'") + .get<{ json: string }>()?.json ?? ''; assert.deepStrictEqual(JSON.parse(messageMJSON), { id: 'm', @@ -822,10 +854,12 @@ describe('SQL migrations test', () => { ('${AUTHOR_ID}', '${CONVERSATION_ID}', '${STORY_ID_2}', ${Date.now()}); ` ); - const storyReadCount = db - .prepare('SELECT COUNT(*) FROM storyReads;') - .pluck(); - const messageCount = db.prepare('SELECT COUNT(*) FROM messages;').pluck(); + const storyReadCount = db.prepare('SELECT COUNT(*) FROM storyReads;', { + pluck: true, + }); + const messageCount = db.prepare('SELECT COUNT(*) FROM messages;', { + pluck: true, + }); assert.strictEqual(storyReadCount.get(), 2); assert.strictEqual(messageCount.get(), 5); @@ -841,8 +875,9 @@ describe('SQL migrations test', () => { assert.strictEqual(messageCount.get(), 2); const storyReadIds = db - .prepare('SELECT storyId FROM storyReads;') - .pluck() + .prepare('SELECT storyId FROM storyReads;', { + pluck: true, + }) .all(); assert.sameDeepMembers(storyReadIds, [STORY_ID_2]); }); @@ -875,12 +910,15 @@ describe('SQL migrations test', () => { ` ); - const listCount = db - .prepare('SELECT COUNT(*) FROM storyDistributions;') - .pluck(); - const memberCount = db - .prepare('SELECT COUNT(*) FROM storyDistributionMembers;') - .pluck(); + const listCount = db.prepare('SELECT COUNT(*) FROM storyDistributions;', { + pluck: true, + }); + const memberCount = db.prepare( + 'SELECT COUNT(*) FROM storyDistributionMembers;', + { + pluck: true, + } + ); assert.strictEqual(listCount.get(), 2); assert.strictEqual(memberCount.get(), 6); @@ -891,8 +929,9 @@ describe('SQL migrations test', () => { assert.strictEqual(memberCount.get(), 2); const members = db - .prepare('SELECT uuid FROM storyDistributionMembers;') - .pluck() + .prepare('SELECT uuid FROM storyDistributionMembers;', { + pluck: true, + }) .all(); assert.sameDeepMembers(members, [UUID_1, UUID_2]); @@ -948,15 +987,21 @@ describe('SQL migrations test', () => { updateToVersion(db, 47); assert.strictEqual( - db.prepare('SELECT COUNT(*) FROM messages;').pluck().get(), + db + .prepare('SELECT COUNT(*) FROM messages;', { + pluck: true, + }) + .get(), 2 ); assert.strictEqual( db .prepare( - 'SELECT COUNT(*) FROM messages WHERE isChangeCreatedByUs IS 0;' + 'SELECT COUNT(*) FROM messages WHERE isChangeCreatedByUs IS 0;', + { + pluck: true, + } ) - .pluck() .get(), 1, 'zero' @@ -964,9 +1009,11 @@ describe('SQL migrations test', () => { assert.strictEqual( db .prepare( - 'SELECT COUNT(*) FROM messages WHERE isChangeCreatedByUs IS 1;' + 'SELECT COUNT(*) FROM messages WHERE isChangeCreatedByUs IS 1;', + { + pluck: true, + } ) - .pluck() .get(), 1, 'one' @@ -994,20 +1041,26 @@ describe('SQL migrations test', () => { ); assert.strictEqual( - db.prepare('SELECT COUNT(*) FROM messages;').pluck().get(), + db + .prepare('SELECT COUNT(*) FROM messages;', { + pluck: true, + }) + .get(), 3 ); assert.strictEqual( db - .prepare('SELECT COUNT(*) FROM messages WHERE isStory IS 0;') - .pluck() + .prepare('SELECT COUNT(*) FROM messages WHERE isStory IS 0;', { + pluck: true, + }) .get(), 2 ); assert.strictEqual( db - .prepare('SELECT COUNT(*) FROM messages WHERE isStory IS 1;') - .pluck() + .prepare('SELECT COUNT(*) FROM messages WHERE isStory IS 1;', { + pluck: true, + }) .get(), 1 ); @@ -1035,33 +1088,43 @@ describe('SQL migrations test', () => { ); assert.strictEqual( - db.prepare('SELECT COUNT(*) FROM messages;').pluck().get(), + db + .prepare('SELECT COUNT(*) FROM messages;', { + pluck: true, + }) + .get(), 4 ); assert.strictEqual( db .prepare( - 'SELECT COUNT(*) FROM messages WHERE shouldAffectPreview IS 1;' + 'SELECT COUNT(*) FROM messages WHERE shouldAffectPreview IS 1;', + { + pluck: true, + } ) - .pluck() .get(), 3 ); assert.strictEqual( db .prepare( - 'SELECT COUNT(*) FROM messages WHERE shouldAffectActivity IS 1;' + 'SELECT COUNT(*) FROM messages WHERE shouldAffectActivity IS 1;', + { + pluck: true, + } ) - .pluck() .get(), 2 ); assert.strictEqual( db .prepare( - 'SELECT COUNT(*) FROM messages WHERE isUserInitiatedMessage IS 1;' + 'SELECT COUNT(*) FROM messages WHERE isUserInitiatedMessage IS 1;', + { + pluck: true, + } ) - .pluck() .get(), 1 ); @@ -1100,24 +1163,32 @@ describe('SQL migrations test', () => { ); assert.strictEqual( - db.prepare('SELECT COUNT(*) FROM messages;').pluck().get(), + db + .prepare('SELECT COUNT(*) FROM messages;', { + pluck: true, + }) + .get(), 3 ); assert.strictEqual( db .prepare( - 'SELECT COUNT(*) FROM messages WHERE isTimerChangeFromSync IS 1;' + 'SELECT COUNT(*) FROM messages WHERE isTimerChangeFromSync IS 1;', + { + pluck: true, + } ) - .pluck() .get(), 1 ); assert.strictEqual( db .prepare( - 'SELECT COUNT(*) FROM messages WHERE isTimerChangeFromSync IS 0;' + 'SELECT COUNT(*) FROM messages WHERE isTimerChangeFromSync IS 0;', + { + pluck: true, + } ) - .pluck() .get(), 2 ); @@ -1199,24 +1270,32 @@ describe('SQL migrations test', () => { ); assert.strictEqual( - db.prepare('SELECT COUNT(*) FROM messages;').pluck().get(), + db + .prepare('SELECT COUNT(*) FROM messages;', { + pluck: true, + }) + .get(), 5 ); assert.strictEqual( db .prepare( - 'SELECT COUNT(*) FROM messages WHERE isGroupLeaveEvent IS 1;' + 'SELECT COUNT(*) FROM messages WHERE isGroupLeaveEvent IS 1;', + { + pluck: true, + } ) - .pluck() .get(), 1 ); assert.strictEqual( db .prepare( - 'SELECT COUNT(*) FROM messages WHERE isGroupLeaveEvent IS 0;' + 'SELECT COUNT(*) FROM messages WHERE isGroupLeaveEvent IS 0;', + { + pluck: true, + } ) - .pluck() .get(), 4 ); @@ -1225,10 +1304,9 @@ describe('SQL migrations test', () => { it('ensures that index is used for getOlderMessagesByConversation', () => { updateToVersion(db, 47); - const { detail } = db - .prepare( - ` - EXPLAIN QUERY PLAN + const detail = explain( + db, + sql` SELECT json FROM messages WHERE conversationId = 'd8b05bb1-36b3-4478-841b-600af62321eb' AND (NULL IS NULL OR id IS NOT NULL) AND @@ -1241,8 +1319,7 @@ describe('SQL migrations test', () => { ORDER BY received_at DESC, sent_at DESC LIMIT 10; ` - ) - .get(); + ); assert.notInclude(detail, 'B-TREE'); assert.notInclude(detail, 'SCAN'); @@ -1257,10 +1334,9 @@ describe('SQL migrations test', () => { it('creates usable index for hasUserInitiatedMessages', () => { updateToVersion(db, 48); - const details = db - .prepare( - ` - EXPLAIN QUERY PLAN + const details = explain( + db, + sql` SELECT COUNT(*) as count FROM ( SELECT 1 FROM messages @@ -1270,10 +1346,7 @@ describe('SQL migrations test', () => { LIMIT 1 ); ` - ) - .all() - .map(({ detail }) => detail) - .join('\n'); + ); assert.include( details, @@ -1286,10 +1359,9 @@ describe('SQL migrations test', () => { it('creates usable index for messages preview', () => { updateToVersion(db, 49); - const details = db - .prepare( - ` - EXPLAIN QUERY PLAN + const details = explain( + db, + sql` SELECT json FROM messages WHERE conversationId = 'convo' AND @@ -1303,10 +1375,7 @@ describe('SQL migrations test', () => { ORDER BY received_at DESC, sent_at DESC LIMIT 1; ` - ) - .all() - .map(({ detail }) => detail) - .join('\n'); + ); assert.include(details, 'USING INDEX messages_preview'); assert.notInclude(details, 'TEMP B-TREE'); @@ -1318,10 +1387,9 @@ describe('SQL migrations test', () => { it('creates usable index for messages_unread', () => { updateToVersion(db, 50); - const details = db - .prepare( - ` - EXPLAIN QUERY PLAN + const details = explain( + db, + sql` SELECT * FROM messages WHERE conversationId = 'conversation' AND readStatus = 'something' AND @@ -1330,10 +1398,7 @@ describe('SQL migrations test', () => { ORDER BY received_at ASC, sent_at ASC LIMIT 1; ` - ) - .all() - .map(({ detail }) => detail) - .join('\n'); + ); assert.include(details, 'USING INDEX messages_unread'); assert.notInclude(details, 'TEMP B-TREE'); @@ -1368,16 +1433,27 @@ describe('SQL migrations test', () => { ` ); - const totalJobs = db.prepare('SELECT COUNT(*) FROM jobs;').pluck(); - const normalSendJobs = db - .prepare("SELECT COUNT(*) FROM jobs WHERE queueType = 'normal send';") - .pluck(); - const conversationJobs = db - .prepare("SELECT COUNT(*) FROM jobs WHERE queueType = 'conversation';") - .pluck(); - const reactionJobs = db - .prepare("SELECT COUNT(*) FROM jobs WHERE queueType = 'reactions';") - .pluck(); + const totalJobs = db.prepare('SELECT COUNT(*) FROM jobs;', { + pluck: true, + }); + const normalSendJobs = db.prepare( + "SELECT COUNT(*) FROM jobs WHERE queueType = 'normal send';", + { + pluck: true, + } + ); + const conversationJobs = db.prepare( + "SELECT COUNT(*) FROM jobs WHERE queueType = 'conversation';", + { + pluck: true, + } + ); + const reactionJobs = db.prepare( + "SELECT COUNT(*) FROM jobs WHERE queueType = 'reactions';", + { + pluck: true, + } + ); assert.strictEqual(totalJobs.get(), 4, 'before total'); assert.strictEqual(normalSendJobs.get(), 1, 'before normal'); @@ -1465,13 +1541,21 @@ describe('SQL migrations test', () => { ` ); - const totalJobs = db.prepare('SELECT COUNT(*) FROM jobs;').pluck(); - const reactionJobs = db - .prepare("SELECT COUNT(*) FROM jobs WHERE queueType = 'reactions';") - .pluck(); - const conversationJobs = db - .prepare("SELECT COUNT(*) FROM jobs WHERE queueType = 'conversation';") - .pluck(); + const totalJobs = db.prepare('SELECT COUNT(*) FROM jobs;', { + pluck: true, + }); + const reactionJobs = db.prepare( + "SELECT COUNT(*) FROM jobs WHERE queueType = 'reactions';", + { + pluck: true, + } + ); + const conversationJobs = db.prepare( + "SELECT COUNT(*) FROM jobs WHERE queueType = 'conversation';", + { + pluck: true, + } + ); assert.strictEqual(totalJobs.get(), 6, 'total jobs before'); assert.strictEqual(reactionJobs.get(), 6, 'reaction jobs before'); @@ -1542,13 +1626,21 @@ describe('SQL migrations test', () => { queueType: 'normal send', }); - const totalJobs = db.prepare('SELECT COUNT(*) FROM jobs;').pluck(); - const normalSend = db - .prepare("SELECT COUNT(*) FROM jobs WHERE queueType = 'normal send';") - .pluck(); - const conversationJobs = db - .prepare("SELECT COUNT(*) FROM jobs WHERE queueType = 'conversation';") - .pluck(); + const totalJobs = db.prepare('SELECT COUNT(*) FROM jobs;', { + pluck: true, + }); + const normalSend = db.prepare( + "SELECT COUNT(*) FROM jobs WHERE queueType = 'normal send';", + { + pluck: true, + } + ); + const conversationJobs = db.prepare( + "SELECT COUNT(*) FROM jobs WHERE queueType = 'conversation';", + { + pluck: true, + } + ); assert.strictEqual(totalJobs.get(), 3, 'total jobs before'); assert.strictEqual(normalSend.get(), 3, 'normal send jobs before'); @@ -1595,7 +1687,6 @@ describe('SQL migrations test', () => { return [ { template: sql` - EXPLAIN QUERY PLAN SELECT * FROM messages WHERE conversationId = 'conversation' AND readStatus = 'something' AND @@ -1608,7 +1699,6 @@ describe('SQL migrations test', () => { }, { template: sql` - EXPLAIN QUERY PLAN SELECT json FROM messages WHERE conversationId = 'd8b05bb1-36b3-4478-841b-600af62321eb' AND (NULL IS NULL OR id IS NOT NULL) AND @@ -1631,12 +1721,7 @@ describe('SQL migrations test', () => { for (const storyId of ['123', undefined]) { for (const { template, index } of getQueries(storyId, true)) { - const [query, params] = template; - const details = db - .prepare(query) - .all(params) - .map(({ detail }) => detail) - .join('\n'); + const details = explain(db, template); const postfixedIndex = index + (storyId ? '' : '_no_story_id'); @@ -1722,10 +1807,15 @@ describe('SQL migrations test', () => { ` ); - const totalJobs = db.prepare('SELECT COUNT(*) FROM jobs;').pluck(); - const reportSpamJobs = db - .prepare("SELECT COUNT(*) FROM jobs WHERE queueType = 'report spam';") - .pluck(); + const totalJobs = db.prepare('SELECT COUNT(*) FROM jobs;', { + pluck: true, + }); + const reportSpamJobs = db.prepare( + "SELECT COUNT(*) FROM jobs WHERE queueType = 'report spam';", + { + pluck: true, + } + ); assert.strictEqual(totalJobs.get(), 2, 'before total'); assert.strictEqual(reportSpamJobs.get(), 1, 'before report spam'); @@ -1788,16 +1878,22 @@ describe('SQL migrations test', () => { ); assert.strictEqual( - db.prepare('SELECT COUNT(*) FROM messages;').pluck().get(), + db + .prepare('SELECT COUNT(*) FROM messages;', { + pluck: true, + }) + .get(), 11, 'starting total' ); assert.strictEqual( db .prepare( - `SELECT COUNT(*) FROM messages WHERE readStatus = ${ReadStatus.Unread};` + `SELECT COUNT(*) FROM messages WHERE readStatus = ${ReadStatus.Unread};`, + { + pluck: true, + } ) - .pluck() .get(), 11, 'starting unread count' @@ -1806,16 +1902,22 @@ describe('SQL migrations test', () => { updateToVersion(db, 56); assert.strictEqual( - db.prepare('SELECT COUNT(*) FROM messages;').pluck().get(), + db + .prepare('SELECT COUNT(*) FROM messages;', { + pluck: true, + }) + .get(), 11, 'ending total' ); assert.strictEqual( db .prepare( - `SELECT COUNT(*) FROM messages WHERE readStatus = ${ReadStatus.Unread};` + `SELECT COUNT(*) FROM messages WHERE readStatus = ${ReadStatus.Unread};`, + { + pluck: true, + } ) - .pluck() .get(), 10, 'ending unread count' @@ -1823,9 +1925,11 @@ describe('SQL migrations test', () => { assert.strictEqual( db .prepare( - `SELECT COUNT(*) FROM messages WHERE seenStatus = ${SeenStatus.Unseen};` + `SELECT COUNT(*) FROM messages WHERE seenStatus = ${SeenStatus.Unseen};`, + { + pluck: true, + } ) - .pluck() .get(), 10, 'ending unseen count' @@ -1834,9 +1938,11 @@ describe('SQL migrations test', () => { assert.strictEqual( db .prepare( - "SELECT readStatus FROM messages WHERE type = 'other' LIMIT 1;" + "SELECT readStatus FROM messages WHERE type = 'other' LIMIT 1;", + { + pluck: true, + } ) - .pluck() .get(), ReadStatus.Read, "checking read status for lone 'other' message" @@ -1846,10 +1952,9 @@ describe('SQL migrations test', () => { it('creates usable index for getOldestUnseenMessageForConversation', () => { updateToVersion(db, 56); - const first = db - .prepare( - ` - EXPLAIN QUERY PLAN + const first = explain( + db, + sql` SELECT * FROM messages WHERE conversationId = 'id-conversation-4' AND seenStatus = ${SeenStatus.Unseen} AND @@ -1858,19 +1963,15 @@ describe('SQL migrations test', () => { ORDER BY received_at ASC, sent_at ASC LIMIT 1; ` - ) - .all() - .map(({ detail }) => detail) - .join('\n'); + ); assert.include(first, 'USING INDEX messages_unseen_no_story', 'first'); assert.notInclude(first, 'TEMP B-TREE', 'first'); assert.notInclude(first, 'SCAN', 'first'); - const second = db - .prepare( - ` - EXPLAIN QUERY PLAN + const second = explain( + db, + sql` SELECT * FROM messages WHERE conversationId = 'id-conversation-4' AND seenStatus = ${SeenStatus.Unseen} AND @@ -1879,10 +1980,7 @@ describe('SQL migrations test', () => { ORDER BY received_at ASC, sent_at ASC LIMIT 1; ` - ) - .all() - .map(({ detail }) => detail) - .join('\n'); + ); assert.include( second, @@ -1896,10 +1994,9 @@ describe('SQL migrations test', () => { it('creates usable index for getUnreadByConversationAndMarkRead', () => { updateToVersion(db, 56); - const first = db - .prepare( - ` - EXPLAIN QUERY PLAN + const first = explain( + db, + sql` UPDATE messages SET readStatus = ${ReadStatus.Read}, @@ -1912,19 +2009,15 @@ describe('SQL migrations test', () => { NULL IS NULL AND received_at <= 2343233; ` - ) - .all() - .map(({ detail }) => detail) - .join('\n'); + ); assert.include(first, 'USING INDEX messages_unseen_no_story', 'first'); assert.notInclude(first, 'TEMP B-TREE', 'first'); assert.notInclude(first, 'SCAN', 'first'); - const second = db - .prepare( - ` - EXPLAIN QUERY PLAN + const second = explain( + db, + sql` UPDATE messages SET readStatus = ${ReadStatus.Read}, @@ -1937,10 +2030,7 @@ describe('SQL migrations test', () => { storyId IS 'id-story-4' AND received_at <= 2343233; ` - ) - .all() - .map(({ detail }) => detail) - .join('\n'); + ); assert.include( second, @@ -1954,10 +2044,9 @@ describe('SQL migrations test', () => { it('creates usable index for getTotalUnseenForConversationSync', () => { updateToVersion(db, 56); - const first = db - .prepare( - ` - EXPLAIN QUERY PLAN + const first = explain( + db, + sql` SELECT count(id) FROM messages WHERE @@ -1966,20 +2055,16 @@ describe('SQL migrations test', () => { isStory IS 0 AND NULL IS NULL; ` - ) - .all() - .map(({ detail }) => detail) - .join('\n'); + ); // Weird, but we don't included received_at so it doesn't really matter assert.include(first, 'USING INDEX messages_unseen_with_story', 'first'); assert.notInclude(first, 'TEMP B-TREE', 'first'); assert.notInclude(first, 'SCAN', 'first'); - const second = db - .prepare( - ` - EXPLAIN QUERY PLAN + const second = explain( + db, + sql` SELECT count(id) FROM messages WHERE @@ -1988,10 +2073,7 @@ describe('SQL migrations test', () => { isStory IS 0 AND storyId IS 'id-story-4'; ` - ) - .all() - .map(({ detail }) => detail) - .join('\n'); + ); assert.include( second, @@ -2040,16 +2122,22 @@ describe('SQL migrations test', () => { ); assert.strictEqual( - db.prepare('SELECT COUNT(*) FROM messages;').pluck().get(), + db + .prepare('SELECT COUNT(*) FROM messages;', { + pluck: true, + }) + .get(), 11, 'starting total' ); assert.strictEqual( db .prepare( - `SELECT COUNT(*) FROM messages WHERE readStatus = ${ReadStatus.Unread};` + `SELECT COUNT(*) FROM messages WHERE readStatus = ${ReadStatus.Unread};`, + { + pluck: true, + } ) - .pluck() .get(), 11, 'starting unread count' @@ -2058,16 +2146,22 @@ describe('SQL migrations test', () => { updateToVersion(db, 56); assert.strictEqual( - db.prepare('SELECT COUNT(*) FROM messages;').pluck().get(), + db + .prepare('SELECT COUNT(*) FROM messages;', { + pluck: true, + }) + .get(), 11, 'ending total' ); assert.strictEqual( db .prepare( - `SELECT COUNT(*) FROM messages WHERE readStatus = ${ReadStatus.Unread};` + `SELECT COUNT(*) FROM messages WHERE readStatus = ${ReadStatus.Unread};`, + { + pluck: true, + } ) - .pluck() .get(), 10, 'ending unread count' @@ -2075,9 +2169,11 @@ describe('SQL migrations test', () => { assert.strictEqual( db .prepare( - `SELECT COUNT(*) FROM messages WHERE seenStatus = ${SeenStatus.Unseen};` + `SELECT COUNT(*) FROM messages WHERE seenStatus = ${SeenStatus.Unseen};`, + { + pluck: true, + } ) - .pluck() .get(), 10, 'ending unseen count' @@ -2086,9 +2182,11 @@ describe('SQL migrations test', () => { assert.strictEqual( db .prepare( - "SELECT readStatus FROM messages WHERE type = 'other' LIMIT 1;" + "SELECT readStatus FROM messages WHERE type = 'other' LIMIT 1;", + { + pluck: true, + } ) - .pluck() .get(), ReadStatus.Read, "checking read status for 'other' message" @@ -2115,16 +2213,22 @@ describe('SQL migrations test', () => { ); assert.strictEqual( - db.prepare('SELECT COUNT(*) FROM messages;').pluck().get(), + db + .prepare('SELECT COUNT(*) FROM messages;', { + pluck: true, + }) + .get(), 3, 'starting total' ); assert.strictEqual( db .prepare( - `SELECT COUNT(*) FROM messages WHERE readStatus = ${ReadStatus.Unread};` + `SELECT COUNT(*) FROM messages WHERE readStatus = ${ReadStatus.Unread};`, + { + pluck: true, + } ) - .pluck() .get(), 3, 'starting unread count' @@ -2133,16 +2237,22 @@ describe('SQL migrations test', () => { updateToVersion(db, 58); assert.strictEqual( - db.prepare('SELECT COUNT(*) FROM messages;').pluck().get(), + db + .prepare('SELECT COUNT(*) FROM messages;', { + pluck: true, + }) + .get(), 3, 'ending total' ); assert.strictEqual( db .prepare( - `SELECT COUNT(*) FROM messages WHERE readStatus = ${ReadStatus.Unread};` + `SELECT COUNT(*) FROM messages WHERE readStatus = ${ReadStatus.Unread};`, + { + pluck: true, + } ) - .pluck() .get(), 1, 'ending unread count' @@ -2151,9 +2261,11 @@ describe('SQL migrations test', () => { assert.strictEqual( db .prepare( - "SELECT readStatus FROM messages WHERE type = 'keychange' LIMIT 1;" + "SELECT readStatus FROM messages WHERE type = 'keychange' LIMIT 1;", + { + pluck: true, + } ) - .pluck() .get(), ReadStatus.Read, "checking read status for 'keychange' message" @@ -2161,9 +2273,11 @@ describe('SQL migrations test', () => { assert.strictEqual( db .prepare( - "SELECT seenStatus FROM messages WHERE type = 'keychange' LIMIT 1;" + "SELECT seenStatus FROM messages WHERE type = 'keychange' LIMIT 1;", + { + pluck: true, + } ) - .pluck() .get(), SeenStatus.Unseen, "checking seen status for 'keychange' message" @@ -2201,7 +2315,11 @@ describe('SQL migrations test', () => { ); assert.strictEqual( - db.prepare('SELECT COUNT(*) FROM messages;').pluck().get(), + db + .prepare('SELECT COUNT(*) FROM messages;', { + pluck: true, + }) + .get(), 4, 'starting total' ); @@ -2211,9 +2329,11 @@ describe('SQL migrations test', () => { assert.strictEqual( db .prepare( - `SELECT json FROM messages WHERE id = '${MESSAGE_ID_1}' LIMIT 1;` + `SELECT json FROM messages WHERE id = '${MESSAGE_ID_1}' LIMIT 1;`, + { + pluck: true, + } ) - .pluck() .get(), JSON.stringify({ body: 'message1', @@ -2225,9 +2345,11 @@ describe('SQL migrations test', () => { assert.strictEqual( db .prepare( - `SELECT json FROM messages WHERE id = '${MESSAGE_ID_2}' LIMIT 1;` + `SELECT json FROM messages WHERE id = '${MESSAGE_ID_2}' LIMIT 1;`, + { + pluck: true, + } ) - .pluck() .get(), JSON.stringify({ body: 'message2', readStatus: ReadStatus.Read }), 'checking JSON for message2' @@ -2235,9 +2357,11 @@ describe('SQL migrations test', () => { assert.strictEqual( db .prepare( - `SELECT json FROM messages WHERE id = '${MESSAGE_ID_3}' LIMIT 1;` + `SELECT json FROM messages WHERE id = '${MESSAGE_ID_3}' LIMIT 1;`, + { + pluck: true, + } ) - .pluck() .get(), JSON.stringify({ body: 'message3', @@ -2249,9 +2373,11 @@ describe('SQL migrations test', () => { assert.strictEqual( db .prepare( - `SELECT json FROM messages WHERE id = '${MESSAGE_ID_4}' LIMIT 1;` + `SELECT json FROM messages WHERE id = '${MESSAGE_ID_4}' LIMIT 1;`, + { + pluck: true, + } ) - .pluck() .get(), JSON.stringify({ body: 'message4', @@ -2267,10 +2393,9 @@ describe('SQL migrations test', () => { it('updates index to make query efficient', () => { updateToVersion(db, 60); - const items = db - .prepare( - ` - EXPLAIN QUERY PLAN + const detail = explain( + db, + sql` UPDATE messages INDEXED BY expiring_message_by_conversation_and_received_at SET @@ -2288,9 +2413,7 @@ describe('SQL migrations test', () => { expireTimer > 0 AND received_at <= 234234; ` - ) - .all(); - const detail = items.map(item => item.detail).join('\n'); + ); assert.notInclude(detail, 'B-TREE'); assert.notInclude(detail, 'SCAN'); @@ -2318,19 +2441,26 @@ describe('SQL migrations test', () => { ); assert.strictEqual( - db.prepare('SELECT COUNT(*) FROM sendLogPayloads;').pluck().get(), + db + .prepare('SELECT COUNT(*) FROM sendLogPayloads;', { + pluck: true, + }) + .get(), 1, 'starting total' ); - const payload = db - .prepare('SELECT * FROM sendLogPayloads LIMIT 1;') - .get(); + const payload = db.prepare('SELECT * FROM sendLogPayloads LIMIT 1;').get<{ + contentHint: number; + timestamp: number; + proto: Uint8Array; + urgent: number; + }>(); - assert.strictEqual(payload.contentHint, 1); - assert.strictEqual(payload.timestamp, timestamp); - assert.strictEqual(payload.proto.length, 8); - assert.strictEqual(payload.urgent, 1); + assert.strictEqual(payload?.contentHint, 1); + assert.strictEqual(payload?.timestamp, timestamp); + assert.strictEqual(payload?.proto.length, 8); + assert.strictEqual(payload?.urgent, 1); }); }); @@ -2372,15 +2502,16 @@ describe('SQL migrations test', () => { }); it('removes the legacy groupCallRings table', () => { - const tableCount = db - .prepare( - ` - SELECT COUNT(*) FROM sqlite_schema - WHERE type = 'table' - AND name = 'groupCallRings' - ` - ) - .pluck(); + const tableCount = db.prepare( + ` + SELECT COUNT(*) FROM sqlite_schema + WHERE type = 'table' + AND name = 'groupCallRings' + `, + { + pluck: true, + } + ); assert.strictEqual(tableCount.get(), 0); }); @@ -2429,7 +2560,11 @@ describe('SQL migrations test', () => { ); assert.strictEqual( - db.prepare('SELECT COUNT(*) FROM messages;').pluck().get(), + db + .prepare('SELECT COUNT(*) FROM messages;', { + pluck: true, + }) + .get(), 8, 'total' ); @@ -2438,9 +2573,11 @@ describe('SQL migrations test', () => { assert.strictEqual( db .prepare( - 'SELECT COUNT(*) FROM messages WHERE shouldAffectPreview IS 1;' + 'SELECT COUNT(*) FROM messages WHERE shouldAffectPreview IS 1;', + { + pluck: true, + } ) - .pluck() .get(), 4, 'shouldAffectPreview' @@ -2448,9 +2585,11 @@ describe('SQL migrations test', () => { assert.strictEqual( db .prepare( - 'SELECT COUNT(*) FROM messages WHERE shouldAffectActivity IS 1;' + 'SELECT COUNT(*) FROM messages WHERE shouldAffectActivity IS 1;', + { + pluck: true, + } ) - .pluck() .get(), 4, 'shouldAffectActivity' @@ -2460,9 +2599,11 @@ describe('SQL migrations test', () => { assert.strictEqual( db .prepare( - 'SELECT COUNT(*) FROM messages WHERE isUserInitiatedMessage IS 1;' + 'SELECT COUNT(*) FROM messages WHERE isUserInitiatedMessage IS 1;', + { + pluck: true, + } ) - .pluck() .get(), 3, 'isUserInitiatedMessage' @@ -2525,23 +2666,33 @@ describe('SQL migrations test', () => { data: {}, }); - const totalJobs = db.prepare('SELECT COUNT(*) FROM jobs;').pluck(); - const conversationJobs = db - .prepare("SELECT COUNT(*) FROM jobs WHERE queueType = 'conversation';") - .pluck(); - const deliveryJobs = db - .prepare( - "SELECT COUNT(*) FROM jobs WHERE queueType = 'delivery receipts';" - ) - .pluck(); - const readJobs = db - .prepare("SELECT COUNT(*) FROM jobs WHERE queueType = 'read receipts';") - .pluck(); - const viewedJobs = db - .prepare( - "SELECT COUNT(*) FROM jobs WHERE queueType = 'viewed receipts';" - ) - .pluck(); + const totalJobs = db.prepare('SELECT COUNT(*) FROM jobs;', { + pluck: true, + }); + const conversationJobs = db.prepare( + "SELECT COUNT(*) FROM jobs WHERE queueType = 'conversation';", + { + pluck: true, + } + ); + const deliveryJobs = db.prepare( + "SELECT COUNT(*) FROM jobs WHERE queueType = 'delivery receipts';", + { + pluck: true, + } + ); + const readJobs = db.prepare( + "SELECT COUNT(*) FROM jobs WHERE queueType = 'read receipts';", + { + pluck: true, + } + ); + const viewedJobs = db.prepare( + "SELECT COUNT(*) FROM jobs WHERE queueType = 'viewed receipts';", + { + pluck: true, + } + ); assert.strictEqual(totalJobs.get(), 5, 'before total'); assert.strictEqual(conversationJobs.get(), 1, 'before conversation'); @@ -2669,15 +2820,21 @@ describe('SQL migrations test', () => { ` ); - const totalJobs = db.prepare('SELECT COUNT(*) FROM jobs;').pluck(); - const conversationJobs = db - .prepare("SELECT COUNT(*) FROM jobs WHERE queueType = 'conversation';") - .pluck(); - const deliveryJobs = db - .prepare( - "SELECT COUNT(*) FROM jobs WHERE queueType = 'delivery receipts';" - ) - .pluck(); + const totalJobs = db.prepare('SELECT COUNT(*) FROM jobs;', { + pluck: true, + }); + const conversationJobs = db.prepare( + "SELECT COUNT(*) FROM jobs WHERE queueType = 'conversation';", + { + pluck: true, + } + ); + const deliveryJobs = db.prepare( + "SELECT COUNT(*) FROM jobs WHERE queueType = 'delivery receipts';", + { + pluck: true, + } + ); assert.strictEqual(totalJobs.get(), 7, 'total jobs before'); assert.strictEqual(conversationJobs.get(), 0, 'conversation jobs before'); @@ -2840,13 +2997,21 @@ describe('SQL migrations test', () => { ` ); - const totalJobs = db.prepare('SELECT COUNT(*) FROM jobs;').pluck(); - const conversationJobs = db - .prepare("SELECT COUNT(*) FROM jobs WHERE queueType = 'conversation';") - .pluck(); - const readJobs = db - .prepare("SELECT COUNT(*) FROM jobs WHERE queueType = 'read receipts';") - .pluck(); + const totalJobs = db.prepare('SELECT COUNT(*) FROM jobs;', { + pluck: true, + }); + const conversationJobs = db.prepare( + "SELECT COUNT(*) FROM jobs WHERE queueType = 'conversation';", + { + pluck: true, + } + ); + const readJobs = db.prepare( + "SELECT COUNT(*) FROM jobs WHERE queueType = 'read receipts';", + { + pluck: true, + } + ); assert.strictEqual(totalJobs.get(), 7, 'total jobs before'); assert.strictEqual(conversationJobs.get(), 0, 'conversation jobs before'); @@ -2999,15 +3164,21 @@ describe('SQL migrations test', () => { ` ); - const totalJobs = db.prepare('SELECT COUNT(*) FROM jobs;').pluck(); - const conversationJobs = db - .prepare("SELECT COUNT(*) FROM jobs WHERE queueType = 'conversation';") - .pluck(); - const viewedJobs = db - .prepare( - "SELECT COUNT(*) FROM jobs WHERE queueType = 'viewed receipts';" - ) - .pluck(); + const totalJobs = db.prepare('SELECT COUNT(*) FROM jobs;', { + pluck: true, + }); + const conversationJobs = db.prepare( + "SELECT COUNT(*) FROM jobs WHERE queueType = 'conversation';", + { + pluck: true, + } + ); + const viewedJobs = db.prepare( + "SELECT COUNT(*) FROM jobs WHERE queueType = 'viewed receipts';", + { + pluck: true, + } + ); assert.strictEqual(totalJobs.get(), 7, 'total jobs before'); assert.strictEqual(conversationJobs.get(), 0, 'conversation jobs before'); @@ -3064,10 +3235,9 @@ describe('SQL migrations test', () => { beforeEach(() => updateToVersion(db, 83)); it('ensures that index is used for getTotalUnreadMentionsOfMeForConversation, no storyId', () => { - const { detail } = db - .prepare( - ` - EXPLAIN QUERY PLAN + const detail = explain( + db, + sql` SELECT count(1) FROM messages WHERE @@ -3077,8 +3247,7 @@ describe('SQL migrations test', () => { isStory IS 0 AND NULL IS NULL ` - ) - .get(); + ); assert.notInclude(detail, 'B-TREE'); assert.notInclude(detail, 'SCAN'); @@ -3089,10 +3258,9 @@ describe('SQL migrations test', () => { }); it('ensures that index is used for getTotalUnreadMentionsOfMeForConversation, with storyId', () => { - const { detail } = db - .prepare( - ` - EXPLAIN QUERY PLAN + const detail = explain( + db, + sql` SELECT count(1) FROM messages WHERE @@ -3102,8 +3270,7 @@ describe('SQL migrations test', () => { isStory IS 0 AND storyId IS 'storyId' ` - ) - .get(); + ); assert.notInclude(detail, 'B-TREE'); assert.notInclude(detail, 'SCAN'); @@ -3114,10 +3281,9 @@ describe('SQL migrations test', () => { }); it('ensures that index is used for getOldestUnreadMentionOfMeForConversation, no storyId', () => { - const { detail } = db - .prepare( - ` - EXPLAIN QUERY PLAN + const detail = explain( + db, + sql` SELECT received_at, sent_at, id FROM messages WHERE conversationId = 'conversationId' AND readStatus = ${ReadStatus.Unread} AND @@ -3127,8 +3293,7 @@ describe('SQL migrations test', () => { ORDER BY received_at ASC, sent_at ASC LIMIT 1; ` - ) - .get(); + ); assert.notInclude(detail, 'B-TREE'); assert.notInclude(detail, 'SCAN'); @@ -3139,10 +3304,9 @@ describe('SQL migrations test', () => { }); it('ensures that index is used for getOldestUnreadMentionOfMeForConversation, with storyId', () => { - const { detail } = db - .prepare( - ` - EXPLAIN QUERY PLAN + const detail = explain( + db, + sql` SELECT received_at, sent_at, id FROM messages WHERE conversationId = 'conversationId' AND readStatus = ${ReadStatus.Unread} AND @@ -3152,8 +3316,7 @@ describe('SQL migrations test', () => { ORDER BY received_at ASC, sent_at ASC LIMIT 1; ` - ) - .get(); + ); assert.notInclude(detail, 'B-TREE'); assert.notInclude(detail, 'SCAN'); @@ -3174,12 +3337,12 @@ describe('SQL migrations test', () => { id?: string; mentions?: Array; boldRanges?: Array>; - }) { - const json: Partial<{ + }): { id: string; body: string; bodyRanges?: Array } { + const json: { id: string; body: string; - bodyRanges: Array; - }> = { + bodyRanges?: Array; + } = { id: id ?? generateGuid(), body: `Message body: ${id}`, }; @@ -3224,7 +3387,11 @@ describe('SQL migrations test', () => { ); assert.equal( - db.prepare('SELECT COUNT(*) FROM messages;').pluck().get(), + db + .prepare('SELECT COUNT(*) FROM messages;', { + pluck: true, + }) + .get(), messages.length ); @@ -3288,7 +3455,11 @@ describe('SQL migrations test', () => { it('Updates mention table when new messages are added', () => { updateToVersion(db, schemaVersion); assert.equal( - db.prepare('SELECT COUNT(*) FROM mentions;').pluck().get(), + db + .prepare('SELECT COUNT(*) FROM mentions;', { + pluck: true, + }) + .get(), 0 ); @@ -3336,7 +3507,11 @@ describe('SQL migrations test', () => { it('Removes mentions when messages are deleted', () => { updateToVersion(db, schemaVersion); assert.equal( - db.prepare('SELECT COUNT(*) FROM mentions;').pluck().get(), + db + .prepare('SELECT COUNT(*) FROM mentions;', { + pluck: true, + }) + .get(), 0 ); @@ -3366,7 +3541,11 @@ describe('SQL migrations test', () => { it('Updates mentions when messages are updated', () => { updateToVersion(db, schemaVersion); assert.equal( - db.prepare('SELECT COUNT(*) FROM mentions;').pluck().get(), + db + .prepare('SELECT COUNT(*) FROM mentions;', { + pluck: true, + }) + .get(), 0 ); @@ -3428,8 +3607,9 @@ describe('SQL migrations test', () => { }); it('uses the mentionUuid index for searching mentions', () => { updateToVersion(db, schemaVersion); - const [query, params] = sql` - EXPLAIN QUERY PLAN + const detail = explain( + db, + sql` SELECT messages.rowid, mentionUuid @@ -3444,8 +3624,8 @@ describe('SQL migrations test', () => { AND messages.storyId IS NULL LIMIT 100; - `; - const { detail } = db.prepare(query).get(params); + ` + ); assert.notInclude(detail, 'B-TREE'); assert.notInclude(detail, 'SCAN'); @@ -3472,11 +3652,13 @@ describe('SQL migrations test', () => { ` ).run(); - const payload = db.prepare('SELECT * FROM kyberPreKeys LIMIT 1;').get(); + const payload = db + .prepare('SELECT * FROM kyberPreKeys LIMIT 1;') + .get<{ id: string; json: string; ourUuid: string }>(); - assert.strictEqual(payload.id, id); - assert.strictEqual(payload.json, json); - assert.strictEqual(payload.ourUuid, ourUuid); + assert.strictEqual(payload?.id, id); + assert.strictEqual(payload?.json, json); + assert.strictEqual(payload?.ourUuid, ourUuid); }); it('adds a createdAt to all existing prekeys', () => { @@ -3498,11 +3680,13 @@ describe('SQL migrations test', () => { updateToVersion(db, 85); - const payload = db.prepare('SELECT * FROM preKeys LIMIT 1;').get(); + const payload = db + .prepare('SELECT * FROM preKeys LIMIT 1;') + .get<{ id: string; json: string }>(); - assert.strictEqual(payload.id, id); + assert.strictEqual(payload?.id, id); - const object = JSON.parse(payload.json); + const object = JSON.parse(payload?.json ?? ''); assert.strictEqual(object.ourUuid, ourUuid); assert.isAtLeast(object.createdAt, startingTime); }); @@ -3511,8 +3695,9 @@ describe('SQL migrations test', () => { describe('updateToSchemaVersion86', () => { it('supports the right index for first query used in getRecentStoryRepliesSync', () => { updateToVersion(db, 86); - const [query, params] = sql` - EXPLAIN QUERY PLAN + const detail = explain( + db, + sql` SELECT json FROM messages WHERE ('messageId' IS NULL OR id IS NOT 'messageId') AND isStory IS 0 AND @@ -3520,8 +3705,8 @@ describe('SQL migrations test', () => { received_at = 100000 AND sent_at < 100000 ORDER BY received_at DESC, sent_at DESC LIMIT 100 - `; - const { detail } = db.prepare(query).get(params); + ` + ); assert.notInclude(detail, 'B-TREE'); assert.notInclude(detail, 'SCAN'); @@ -3533,8 +3718,9 @@ describe('SQL migrations test', () => { it('supports the right index for second query used in getRecentStoryRepliesSync', () => { updateToVersion(db, 86); - const [query, params] = sql` - EXPLAIN QUERY PLAN + const detail = explain( + db, + sql` SELECT json FROM messages WHERE ('messageId' IS NULL OR id IS NOT 'messageId') AND isStory IS 0 AND @@ -3542,8 +3728,8 @@ describe('SQL migrations test', () => { received_at < 100000 ORDER BY received_at DESC, sent_at DESC LIMIT 100 - `; - const { detail } = db.prepare(query).get(params); + ` + ); assert.notInclude(detail, 'B-TREE'); assert.notInclude(detail, 'SCAN');