From bf43cbddbccc51a1147eb37cd7a64762bde4f506 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?coadde=20=5BM=C3=A1rcio=20Alexandre=20Silva=20Delgado=5D?= Date: Wed, 26 Aug 2015 03:41:50 -0300 Subject: add copy from the lukeshu/xbs repo --- extra/lukeshu-xbs/COPYING | 674 +++++++++++++ extra/lukeshu-xbs/TODO | 10 + extra/lukeshu-xbs/config | 48 + extra/lukeshu-xbs/config.local.import-community | 5 + extra/lukeshu-xbs/config.local.import-packages | 5 + extra/lukeshu-xbs/config.local.parabola | 14 + extra/lukeshu-xbs/config.local.svn-community | 11 + extra/lukeshu-xbs/config.local.svn-packages | 11 + .../cron-jobs/check_archlinux/check_packages.py | 508 ++++++++++ .../cron-jobs/check_archlinux/parse_pkgbuilds.sh | 153 +++ extra/lukeshu-xbs/cron-jobs/devlist-mailer | 27 + extra/lukeshu-xbs/cron-jobs/ftpdir-cleanup | 96 ++ extra/lukeshu-xbs/cron-jobs/integrity-check | 32 + extra/lukeshu-xbs/cron-jobs/make_repo_torrents | 70 ++ extra/lukeshu-xbs/cron-jobs/sourceballs | 150 +++ extra/lukeshu-xbs/cron-jobs/sourceballs.force | 4 + extra/lukeshu-xbs/cron-jobs/sourceballs.skip | 43 + extra/lukeshu-xbs/db-check-package-libraries.py | 193 ++++ extra/lukeshu-xbs/db-functions | 456 +++++++++ extra/lukeshu-xbs/db-import | 288 ++++++ extra/lukeshu-xbs/db-import.conf | 23 + extra/lukeshu-xbs/db-init | 6 + extra/lukeshu-xbs/db-list-nonfree.py | 28 + extra/lukeshu-xbs/db-list-unsigned-packages | 38 + extra/lukeshu-xbs/db-list-unsigned-packages.py | 96 ++ extra/lukeshu-xbs/db-move | 105 ++ extra/lukeshu-xbs/db-pick-mirror | 24 + extra/lukeshu-xbs/db-remove | 49 + extra/lukeshu-xbs/db-repo-add | 41 + extra/lukeshu-xbs/db-repo-remove | 37 + extra/lukeshu-xbs/db-update | 126 +++ extra/lukeshu-xbs/make_individual_torrent | 52 + extra/lukeshu-xbs/test/__init__.py | 0 extra/lukeshu-xbs/test/blacklist_sample | 2 + extra/lukeshu-xbs/test/core.db.tar.gz | Bin 0 -> 1345 bytes extra/lukeshu-xbs/test/depends | 4 + extra/lukeshu-xbs/test/desc | 39 + extra/lukeshu-xbs/test/lib/common.inc | 321 ++++++ extra/lukeshu-xbs/test/lib/shunit2 | 1048 ++++++++++++++++++++ extra/lukeshu-xbs/test/packages/pkg-any-a/PKGBUILD | 12 + extra/lukeshu-xbs/test/packages/pkg-any-b/PKGBUILD | 12 + .../test/packages/pkg-simple-a/Makefile | 1 + .../test/packages/pkg-simple-a/PKGBUILD | 22 + .../lukeshu-xbs/test/packages/pkg-simple-a/test.c | 1 + .../test/packages/pkg-simple-b/Makefile | 1 + .../test/packages/pkg-simple-b/PKGBUILD | 22 + .../lukeshu-xbs/test/packages/pkg-simple-b/test.c | 1 + .../test/packages/pkg-simple-epoch/Makefile | 1 + .../test/packages/pkg-simple-epoch/PKGBUILD | 23 + .../test/packages/pkg-simple-epoch/test.c | 1 + .../lukeshu-xbs/test/packages/pkg-split-a/Makefile | 1 + .../lukeshu-xbs/test/packages/pkg-split-a/PKGBUILD | 28 + extra/lukeshu-xbs/test/packages/pkg-split-a/test.c | 1 + .../lukeshu-xbs/test/packages/pkg-split-b/Makefile | 1 + .../lukeshu-xbs/test/packages/pkg-split-b/PKGBUILD | 29 + extra/lukeshu-xbs/test/packages/pkg-split-b/test.c | 1 + extra/lukeshu-xbs/test/rsync_output_sample | 14 + extra/lukeshu-xbs/test/runTest | 15 + extra/lukeshu-xbs/test/src/Makefile | 5 + extra/lukeshu-xbs/test/src/test.c | 7 + extra/lukeshu-xbs/test/test.d/create-filelists.sh | 105 ++ extra/lukeshu-xbs/test/test.d/db-move.sh | 122 +++ extra/lukeshu-xbs/test/test.d/db-remove.sh | 77 ++ extra/lukeshu-xbs/test/test.d/db-repo-add.sh | 54 + extra/lukeshu-xbs/test/test.d/db-repo-remove.sh | 58 ++ extra/lukeshu-xbs/test/test.d/db-update.sh | 175 ++++ extra/lukeshu-xbs/test/test.d/ftpdir-cleanup.sh | 121 +++ extra/lukeshu-xbs/test/test.d/packages.sh | 11 + extra/lukeshu-xbs/test/test.d/signed-packages.sh | 36 + extra/lukeshu-xbs/test/test.d/sourceballs.sh | 84 ++ extra/lukeshu-xbs/test/test.d/testing2x.sh | 27 + extra/lukeshu-xbs/test/test_filter.py | 196 ++++ 72 files changed, 6102 insertions(+) create mode 100644 extra/lukeshu-xbs/COPYING create mode 100644 extra/lukeshu-xbs/TODO create mode 100644 extra/lukeshu-xbs/config create mode 100644 extra/lukeshu-xbs/config.local.import-community create mode 100644 extra/lukeshu-xbs/config.local.import-packages create mode 100644 extra/lukeshu-xbs/config.local.parabola create mode 100644 extra/lukeshu-xbs/config.local.svn-community create mode 100644 extra/lukeshu-xbs/config.local.svn-packages create mode 100755 extra/lukeshu-xbs/cron-jobs/check_archlinux/check_packages.py create mode 100755 extra/lukeshu-xbs/cron-jobs/check_archlinux/parse_pkgbuilds.sh create mode 100755 extra/lukeshu-xbs/cron-jobs/devlist-mailer create mode 100755 extra/lukeshu-xbs/cron-jobs/ftpdir-cleanup create mode 100755 extra/lukeshu-xbs/cron-jobs/integrity-check create mode 100755 extra/lukeshu-xbs/cron-jobs/make_repo_torrents create mode 100755 extra/lukeshu-xbs/cron-jobs/sourceballs create mode 100644 extra/lukeshu-xbs/cron-jobs/sourceballs.force create mode 100644 extra/lukeshu-xbs/cron-jobs/sourceballs.skip create mode 100755 extra/lukeshu-xbs/db-check-package-libraries.py create mode 100644 extra/lukeshu-xbs/db-functions create mode 100755 extra/lukeshu-xbs/db-import create mode 100644 extra/lukeshu-xbs/db-import.conf create mode 100755 extra/lukeshu-xbs/db-init create mode 100755 extra/lukeshu-xbs/db-list-nonfree.py create mode 100755 extra/lukeshu-xbs/db-list-unsigned-packages create mode 100755 extra/lukeshu-xbs/db-list-unsigned-packages.py create mode 100755 extra/lukeshu-xbs/db-move create mode 100755 extra/lukeshu-xbs/db-pick-mirror create mode 100755 extra/lukeshu-xbs/db-remove create mode 100755 extra/lukeshu-xbs/db-repo-add create mode 100755 extra/lukeshu-xbs/db-repo-remove create mode 100755 extra/lukeshu-xbs/db-update create mode 100755 extra/lukeshu-xbs/make_individual_torrent create mode 100644 extra/lukeshu-xbs/test/__init__.py create mode 100644 extra/lukeshu-xbs/test/blacklist_sample create mode 100644 extra/lukeshu-xbs/test/core.db.tar.gz create mode 100644 extra/lukeshu-xbs/test/depends create mode 100644 extra/lukeshu-xbs/test/desc create mode 100644 extra/lukeshu-xbs/test/lib/common.inc create mode 100755 extra/lukeshu-xbs/test/lib/shunit2 create mode 100644 extra/lukeshu-xbs/test/packages/pkg-any-a/PKGBUILD create mode 100644 extra/lukeshu-xbs/test/packages/pkg-any-b/PKGBUILD create mode 120000 extra/lukeshu-xbs/test/packages/pkg-simple-a/Makefile create mode 100644 extra/lukeshu-xbs/test/packages/pkg-simple-a/PKGBUILD create mode 120000 extra/lukeshu-xbs/test/packages/pkg-simple-a/test.c create mode 120000 extra/lukeshu-xbs/test/packages/pkg-simple-b/Makefile create mode 100644 extra/lukeshu-xbs/test/packages/pkg-simple-b/PKGBUILD create mode 120000 extra/lukeshu-xbs/test/packages/pkg-simple-b/test.c create mode 120000 extra/lukeshu-xbs/test/packages/pkg-simple-epoch/Makefile create mode 100644 extra/lukeshu-xbs/test/packages/pkg-simple-epoch/PKGBUILD create mode 120000 extra/lukeshu-xbs/test/packages/pkg-simple-epoch/test.c create mode 120000 extra/lukeshu-xbs/test/packages/pkg-split-a/Makefile create mode 100644 extra/lukeshu-xbs/test/packages/pkg-split-a/PKGBUILD create mode 120000 extra/lukeshu-xbs/test/packages/pkg-split-a/test.c create mode 120000 extra/lukeshu-xbs/test/packages/pkg-split-b/Makefile create mode 100644 extra/lukeshu-xbs/test/packages/pkg-split-b/PKGBUILD create mode 120000 extra/lukeshu-xbs/test/packages/pkg-split-b/test.c create mode 100644 extra/lukeshu-xbs/test/rsync_output_sample create mode 100755 extra/lukeshu-xbs/test/runTest create mode 100644 extra/lukeshu-xbs/test/src/Makefile create mode 100644 extra/lukeshu-xbs/test/src/test.c create mode 100755 extra/lukeshu-xbs/test/test.d/create-filelists.sh create mode 100755 extra/lukeshu-xbs/test/test.d/db-move.sh create mode 100755 extra/lukeshu-xbs/test/test.d/db-remove.sh create mode 100755 extra/lukeshu-xbs/test/test.d/db-repo-add.sh create mode 100755 extra/lukeshu-xbs/test/test.d/db-repo-remove.sh create mode 100755 extra/lukeshu-xbs/test/test.d/db-update.sh create mode 100755 extra/lukeshu-xbs/test/test.d/ftpdir-cleanup.sh create mode 100755 extra/lukeshu-xbs/test/test.d/packages.sh create mode 100755 extra/lukeshu-xbs/test/test.d/signed-packages.sh create mode 100755 extra/lukeshu-xbs/test/test.d/sourceballs.sh create mode 100755 extra/lukeshu-xbs/test/test.d/testing2x.sh create mode 100644 extra/lukeshu-xbs/test/test_filter.py diff --git a/extra/lukeshu-xbs/COPYING b/extra/lukeshu-xbs/COPYING new file mode 100644 index 0000000..94a9ed0 --- /dev/null +++ b/extra/lukeshu-xbs/COPYING @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/extra/lukeshu-xbs/TODO b/extra/lukeshu-xbs/TODO new file mode 100644 index 0000000..9dd4b52 --- /dev/null +++ b/extra/lukeshu-xbs/TODO @@ -0,0 +1,10 @@ +* Test Suite for clean_repo.py + + - Review all repo + - Remove all blacklisted packages + - Get pending list right + - Extract licenses all right + +* Fix db-move + + - Make it use abslibre diff --git a/extra/lukeshu-xbs/config b/extra/lukeshu-xbs/config new file mode 100644 index 0000000..bd05ac8 --- /dev/null +++ b/extra/lukeshu-xbs/config @@ -0,0 +1,48 @@ +#!/hint/bash + +case "$USER" in + db-import-packages) _name=import-packages;; + db-import-community) _name=import-community;; + *) _name=parabola;; +esac + +FTP_BASE="/srv/repo/main" +PKGREPOS=() +PKGPOOL='' +SRCPOOL='' + +CLEANUP_DESTDIR="/srv/repo/private/${_name}/package-cleanup" +CLEANUP_DRYRUN=false +# Time in days to keep moved packages +CLEANUP_KEEP=30 + +SOURCE_CLEANUP_DESTDIR="/srv/repo/private/${_name}/source-cleanup" +SOURCE_CLEANUP_DRYRUN=false +# Time in days to keep moved sourcepackages +SOURCE_CLEANUP_KEEP=14 + +REQUIRE_SIGNATURE=true + +LOCK_DELAY=10 +LOCK_TIMEOUT=300 + +[ -n "${STAGING:-}" ] || STAGING="$HOME/staging/unknown/staging" +export TMPDIR="${TMPDIR:-/tmp}" +ARCHES=(i686 x86_64) +DBEXT=".db.tar.gz" +FILESEXT=".files.tar.gz" +PKGEXT=".pkg.tar.?z" +SRCEXT=".src.tar.gz" + +# Allowed licenses: get sourceballs only for licenses in this array +# Empty (commented out) to get sourceballs for all packages +#ALLOWED_LICENSES=('GPL' 'GPL1' 'GPL2' 'LGPL' 'LGPL1' 'LGPL2' 'LGPL2.1') + +# Where to send error emails, and who they are from +LIST="dev@lists.parabola.nu" +FROM="dbscripts+${_name}@$(hostname -f)" + +# Override default config with config.local +[ -f "$(dirname "${BASH_SOURCE[0]}")/config.local" ] && . "$(dirname "${BASH_SOURCE[0]}")/config.local" +[ -f "$(dirname "${BASH_SOURCE[0]}")/config.local.${_name}" ] && . "$(dirname "${BASH_SOURCE[0]}")/config.local.${_name}" +unset _name diff --git a/extra/lukeshu-xbs/config.local.import-community b/extra/lukeshu-xbs/config.local.import-community new file mode 100644 index 0000000..393fd57 --- /dev/null +++ b/extra/lukeshu-xbs/config.local.import-community @@ -0,0 +1,5 @@ +#!/hint/bash + +PKGREPOS=({community,multilib}{,-testing,-staging}) +PKGPOOL='pool/community' +SRCPOOL='sources/community' diff --git a/extra/lukeshu-xbs/config.local.import-packages b/extra/lukeshu-xbs/config.local.import-packages new file mode 100644 index 0000000..c699b28 --- /dev/null +++ b/extra/lukeshu-xbs/config.local.import-packages @@ -0,0 +1,5 @@ +#!/hint/bash + +PKGREPOS=('core' 'extra' 'testing' 'staging' {kde,gnome}-unstable) +PKGPOOL='pool/packages' +SRCPOOL='sources/packages' diff --git a/extra/lukeshu-xbs/config.local.parabola b/extra/lukeshu-xbs/config.local.parabola new file mode 100644 index 0000000..648dfbb --- /dev/null +++ b/extra/lukeshu-xbs/config.local.parabola @@ -0,0 +1,14 @@ +#!/hint/bash + +PKGREPOS=( + # Main repos + libre{,-testing} + libre-multilib{,-testing} + # Community project repos + {nonsystemd,nonprism}{,-testing} + pcr kernels cross java + # User repos + '~smv' '~xihh' '~brendan' '~lukeshu' '~emulatorman' '~aurelien' '~jorginho' '~coadde' '~drtan' +) +PKGPOOL='pool/parabola' +SRCPOOL='sources/parabola' diff --git a/extra/lukeshu-xbs/config.local.svn-community b/extra/lukeshu-xbs/config.local.svn-community new file mode 100644 index 0000000..105ea66 --- /dev/null +++ b/extra/lukeshu-xbs/config.local.svn-community @@ -0,0 +1,11 @@ +PKGREPOS=('community' 'community-testing' 'community-staging' 'multilib' 'multilib-testing' 'multilib-staging') +PKGPOOL='pool/community' +SRCPOOL='sources/community' +SVNREPO='file:///srv/repos/svn-community/svn' +SVNUSER='svn-community' +TESTING_REPO='community-testing' +STABLE_REPOS=('community') + +CLEANUP_DESTDIR="/srv/repos/svn-community/package-cleanup" +SOURCE_CLEANUP_DESTDIR="/srv/repos/svn-community/source-cleanup" +TMPDIR="/srv/repos/svn-community/tmp" diff --git a/extra/lukeshu-xbs/config.local.svn-packages b/extra/lukeshu-xbs/config.local.svn-packages new file mode 100644 index 0000000..958a483 --- /dev/null +++ b/extra/lukeshu-xbs/config.local.svn-packages @@ -0,0 +1,11 @@ +PKGREPOS=('core' 'extra' 'testing' 'staging' 'kde-unstable' 'gnome-unstable') +PKGPOOL='pool/packages' +SRCPOOL='sources/packages' +SVNREPO='file:///srv/repos/svn-packages/svn' +SVNUSER='svn-packages' +TESTING_REPO='testing' +STABLE_REPOS=('core' 'extra') + +CLEANUP_DESTDIR="/srv/repos/svn-packages/package-cleanup" +SOURCE_CLEANUP_DESTDIR="/srv/repos/svn-packages/source-cleanup" +TMPDIR="/srv/repos/svn-packages/tmp" diff --git a/extra/lukeshu-xbs/cron-jobs/check_archlinux/check_packages.py b/extra/lukeshu-xbs/cron-jobs/check_archlinux/check_packages.py new file mode 100755 index 0000000..ac0194f --- /dev/null +++ b/extra/lukeshu-xbs/cron-jobs/check_archlinux/check_packages.py @@ -0,0 +1,508 @@ +#!/usr/bin/env python2 +# +# check_archlinux.py +# +# Original script by Scott Horowitz +# Rewritten by Xavier Chantry +# +# This script currently checks for a number of issues in your ABS tree: +# 1. Directories with missing PKGBUILDS +# 2. Invalid PKGBUILDs (bash syntax error for instance) +# 3. PKGBUILD names that don't match their directory +# 4. Duplicate PKGBUILDs +# 5. Valid arch's in PKGBUILDS +# 6. Missing (make-)dependencies +# 7. Hierarchy of repos (e.g., that a core package doesn't depend on +# a non-core package) +# 8. Circular dependencies + +import os,re,commands,getopt,sys,tarfile +import pdb + +import ctypes +_alpm = ctypes.cdll.LoadLibrary("libalpm.so") + +DBEXT='.db.tar.gz' + +packages = {} # pkgname : PacmanPackage +repopkgs = {} # pkgname : PacmanPackage +provisions = {} # provision : PacmanPackage +pkgdeps,makepkgdeps = {},{} # PacmanPackage : list of the PacmanPackage dependencies +invalid_pkgbuilds = [] +missing_pkgbuilds = [] +dups = [] + +dbonly = [] +absonly = [] + +mismatches = [] +missing_deps = [] +missing_makedeps = [] +invalid_archs = [] +dep_hierarchy = [] +makedep_hierarchy = [] +circular_deps = [] # pkgname>dep1>dep2>...>pkgname +checked_deps = [] + +class PacmanPackage: + def __init__(self): + self.name,self.version = "","" + self.base = "" + self.path,self.repo = "","" + self.deps,self.makedeps = [],[] + self.provides,self.conflicts = [],[] + self.archs = [] + +class Depend: + def __init__(self,name,version,mod): + self.name = name + self.version = version + self.mod = mod + +def parse_pkgbuilds(repos,arch): + for absroot in absroots: + for repo in repos: + cmd = os.path.dirname(os.path.realpath(sys.argv[0])) + '/parse_pkgbuilds.sh ' + cmd += arch + ' ' + absroot + '/' + repo + (status,output) = commands.getstatusoutput(cmd) + if status != 0: + print "Error : failed to run '%s'" % cmd + sys.exit() + parse_data(repo,output) + +def parse_data(repo,data): + attrname = None + + for line in data.split('\n'): + if line.startswith('%'): + attrname = line.strip('%').lower() + elif line.strip() == '': + attrname = None + elif attrname == "invalid": + if repo in repos: + invalid_pkgbuilds.append(line) + elif attrname == "missing": + if repo in repos: + missing_pkgbuilds.append(line) + elif attrname == "name": + pkg = PacmanPackage() + pkg.name = line + pkg.repo = repo + dup = None + if pkg.name in packages: + dup = packages[pkg.name] + else: + packages[pkg.name] = pkg + elif attrname == "base": + pkg.base = line + elif attrname == "version": + pkg.version = line + elif attrname == "path": + pkg.path = line + if dup != None and (pkg.repo in repos or dup.repo in repos): + dups.append(pkg.path + " vs. " + dup.path) + elif attrname == "arch": + pkg.archs.append(line) + elif attrname == "depends": + pkg.deps.append(line) + elif attrname == "makedepends": + pkg.makedeps.append(line) + elif attrname == "conflicts": + pkg.conflicts.append(line) + elif attrname == "provides": + pkg.provides.append(line) + +def parse_dbs(repos,arch): + dbpkgs = {} + for repo in repos: + pkgs = set([]) + db = tarfile.open(os.path.join(repodir,repo,'os',arch,repo + DBEXT)) + for line in db.getnames(): + if not '/' in line: + pkgs.add(line.rsplit('-',2)[0]) + dbpkgs[repo] = pkgs + return(dbpkgs) + +def splitdep(dep): + name = dep + version = "" + mod = "" + for char in (">=", "<=", "=", ">", "<"): + pos = dep.find(char) + if pos > -1: + name = dep[:pos] + version = dep[pos:].replace(char, "") + mod = char + break + return Depend(name,version,mod) + +def splitprov(prov): + name = prov + version = "" + pos = prov.find("=") + if pos > -1: + name = prov[:pos] + version = prov[pos:].replace("=", "") + return (name,version) + +def vercmp(v1,mod,v2): + """ + >>> vercmp("1.0", "<=", "2.0") + True + >>> vercmp("1:1.0", ">", "2.0") + True + >>> vercmp("1.0.2", ">=", "2.1.0") + False + """ + s1 = ctypes.c_char_p(v1) + s2 = ctypes.c_char_p(v2) + res = _alpm.alpm_pkg_vercmp(s1,s2) + if res == 0: + return (mod.find("=") > -1) + elif res < 0: + return (mod.find("<") > -1) + elif res > 0: + return (mod.find(">") > -1) + return False + + +def depcmp(name,version,dep): + if name != dep.name: + return False + if dep.version == "" or dep.mod == "": + return True + if version == "": + return False + return vercmp(version,dep.mod,dep.version) + +def provcmp(pkg,dep): + for prov in pkg.provides: + (provname,provver) = splitprov(prov) + if depcmp(provname,provver,dep): + return True + return False + +def verify_dep(dep): + dep = splitdep(dep) + if dep.name in packages: + pkg = packages[dep.name] + if depcmp(pkg.name,pkg.version,dep): + return [pkg] + if dep.name in provisions: + provlist = provisions[dep.name] + results = [] + for prov in provlist: + if provcmp(prov,dep): + results.append(prov) + return results + return [] + +def verify_deps(name,repo,deps): + pkg_deps = [] + missdeps = [] + hierarchy = [] + for dep in deps: + pkglist = verify_dep(dep) + if pkglist == []: + missdeps.append(repo + "/" + name + " --> '" + dep + "'") + else: + valid_repos = get_repo_hierarchy(repo) + pkgdep = None + for pkg in pkglist: + if pkg.repo in valid_repos: + pkgdep = pkg + break + if not pkgdep: + pkgdep = pkglist[0] + hierarchy.append((repo,name,pkgdep)) + + pkg_deps.append(pkgdep) + + return (pkg_deps,missdeps,hierarchy) + +def compute_deplist(pkg): + list = [] + stack = [pkg] + while stack != []: + dep = stack.pop() + if dep in pkgdeps: + for dep2 in pkgdeps[dep]: + if dep2 not in list: + list.append(dep2) + stack.append(dep2) + if dep in makepkgdeps: + for dep2 in makepkgdeps[dep]: + if dep2 not in list: + list.append(dep2) + stack.append(dep2) + return list + +def check_hierarchy(deph): + hierarchy = [] + for (repo,name,pkgdep) in deph: + deplist = compute_deplist(pkgdep) + valid_repos = get_repo_hierarchy(repo) + extdeps = [] + for dep in deplist: + if dep.repo not in valid_repos: + extdeps.append(dep.name) + string = repo + "/" + name + " depends on " + pkgdep.repo + "/" + pkgdep.name + " (" + string += "%s extra (make)deps to pull" % len(extdeps) + if 0 < len(extdeps) < 10: + string += " : " + ' '.join(extdeps) + string += ")" + hierarchy.append(string) + return hierarchy + +def get_repo_hierarchy(repo): + repo_hierarchy = {'core': ['core'], \ + 'extra': ['core', 'extra'], \ + 'community': ['core', 'extra', 'community'], \ + 'multilib': ['core', 'extra', 'community', 'multilib'] } + if repo in repo_hierarchy: + return repo_hierarchy[repo] + else: + return ['core','extra','community'] + +def verify_archs(name,repo,archs): + valid_archs = ['any', 'i686', 'x86_64'] + invalid_archs = [] + for arch in archs: + if arch not in valid_archs: + invalid_archs.append(repo + "/" + name + " --> " + arch) + return invalid_archs + +def find_scc(packages): + # reset all variables + global index,S,pkgindex,pkglowlink + index = 0 + S = [] + pkgindex = {} + pkglowlink = {} + cycles = [] + for pkg in packages: + tarjan(pkg) + +def tarjan(pkg): + global index,S,pkgindex,pkglowlink,cycles + pkgindex[pkg] = index + pkglowlink[pkg] = index + index += 1 + checked_deps.append(pkg) + S.append(pkg) + deps = [] + if pkg in pkgdeps: + deps = pkgdeps[pkg] + for dep in deps: + if dep not in pkgindex: + tarjan(dep) + pkglowlink[pkg] = min(pkglowlink[pkg],pkglowlink[dep]) + elif dep in S: + pkglowlink[pkg] = min(pkglowlink[pkg],pkgindex[dep]) + if pkglowlink[pkg] == pkgindex[pkg]: + dep = S.pop() + if pkg == dep: + return + path = pkg.name + while pkg != dep: + path = dep.repo + "/" + dep.name + ">" + path + dep = S.pop() + path = dep.name + ">" + path + if pkg.repo in repos: + circular_deps.append(path) + +def print_heading(heading): + print "" + print "=" * (len(heading) + 4) + print "= " + heading + " =" + print "=" * (len(heading) + 4) + +def print_subheading(subheading): + print "" + print subheading + print "-" * (len(subheading) + 2) + +def print_missdeps(pkgname,missdeps) : + for d in missdeps: + print pkgname + " : " + d + +def print_result(list, subheading): + if len(list) > 0: + list.sort() + print_subheading(subheading) + for item in list: + print item + +def print_results(): + print_result(missing_pkgbuilds, "Missing PKGBUILDs") + print_result(invalid_pkgbuilds, "Invalid PKGBUILDs") + print_result(mismatches, "Mismatched Pkgnames") + print_result(dups, "Duplicate PKGBUILDs") + print_result(invalid_archs, "Invalid Archs") + print_result(missing_deps, "Missing Dependencies") + print_result(missing_makedeps, "Missing Makedepends") + print_result(dep_hierarchy, "Repo Hierarchy for Dependencies") + print_result(makedep_hierarchy, "Repo Hierarchy for Makedepends") + print_result(circular_deps, "Circular Dependencies") + print_result(dbonly, "Packages found in db, but not in tree") + print_result(absonly,"Packages found in tree, but not in db") + print_subheading("Summary") + print "Missing PKGBUILDs: ", len(missing_pkgbuilds) + print "Invalid PKGBUILDs: ", len(invalid_pkgbuilds) + print "Mismatching PKGBUILD names: ", len(mismatches) + print "Duplicate PKGBUILDs: ", len(dups) + print "Invalid archs: ", len(invalid_archs) + print "Missing (make)dependencies: ", len(missing_deps)+len(missing_makedeps) + print "Repo hierarchy problems: ", len(dep_hierarchy)+len(makedep_hierarchy) + print "Circular dependencies: ", len(circular_deps) + print "In db, but not in tree: ", len(dbonly) + print "In tree, but not in db: ", len(absonly) + print "" + +def print_usage(): + print "" + print "Usage: ./check_packages.py [OPTION]" + print "" + print "Options:" + print " --abs-tree= Check the specified tree(s) (default : /var/abs)" + print " --repos= Check the specified repos (default : core,extra)" + print " --arch= Check the specified arch (default : i686)" + print " --repo-dir= Check the dbs at the specified path (default : /srv/ftp)" + print " -h, --help Show this help and exit" + print "" + print "Examples:" + print "\n Check core and extra in existing abs tree:" + print " ./check_packages.py --abs-tree=/var/abs --repos=core,extra --arch=i686" + print "\n Check community:" + print " ./check_packages.py --abs-tree=/var/abs --repos=community --arch=i686" + print "" + +if __name__ == "__main__": + ## Default path to the abs root directory + absroots = ["/var/abs"] + ## Default list of repos to check + repos = ['core', 'extra'] + ## Default arch + arch = "i686" + ## Default repodir + repodir = "/srv/ftp" + + try: + opts, args = getopt.getopt(sys.argv[1:], "", ["abs-tree=", "repos=", + "arch=", "repo-dir="]) + except getopt.GetoptError: + print_usage() + sys.exit() + if opts != []: + for o, a in opts: + if o in ("--abs-tree"): + absroots = a.split(',') + elif o in ("--repos"): + repos = a.split(",") + elif o in ("--arch"): + arch = a + elif o in ("--repo-dir"): + repodir = a + else: + print_usage() + sys.exit() + if args != []: + print_usage() + sys.exit() + + for absroot in absroots: + if not os.path.isdir(absroot): + print "Error : the abs tree " + absroot + " does not exist" + sys.exit() + for repo in repos: + repopath = absroot + "/" + repo + if not os.path.isdir(repopath): + print("Warning : the repository " + repo + " does not exist in " + absroot) + + if not os.path.isdir(repodir): + print "Error: the repository directory %s does not exist" % repodir + sys.exit() + for repo in repos: + path = os.path.join(repodir,repo,'os',arch,repo + DBEXT) + if not os.path.isfile(path): + print "Error : repo DB %s : File not found" % path + sys.exit() + if not tarfile.is_tarfile(path): + print "Error : Cant open repo DB %s, not a valid tar file" % path + sys.exit() + # repos which need to be loaded + loadrepos = set([]) + for repo in repos: + loadrepos = loadrepos | set(get_repo_hierarchy(repo)) + + print_heading("Integrity Check " + arch + " of " + ",".join(repos)) + print("\nPerforming integrity checks...") + + print("==> parsing pkgbuilds") + parse_pkgbuilds(loadrepos,arch) + + # fill provisions + for name,pkg in packages.iteritems(): + for prov in pkg.provides: + provname=prov.split("=")[0] + if provname not in provisions: + provisions[provname] = [] + provisions[provname].append(pkg) + + # fill repopkgs + for name,pkg in packages.iteritems(): + if pkg.repo in repos: + repopkgs[name] = pkg + + print("==> parsing db files") + dbpkgs = parse_dbs(repos,arch) + + print("==> checking mismatches") + for name,pkg in repopkgs.iteritems(): + pkgdirname = pkg.path.split("/")[-1] + if name != pkgdirname and pkg.base != pkgdirname: + mismatches.append(name + " vs. " + pkg.path) + + print("==> checking archs") + for name,pkg in repopkgs.iteritems(): + archs = verify_archs(name,pkg.repo,pkg.archs) + invalid_archs.extend(archs) + + deph,makedeph = [],[] + + print("==> checking dependencies") + for name,pkg in repopkgs.iteritems(): + (deps,missdeps,hierarchy) = verify_deps(name,pkg.repo,pkg.deps) + pkgdeps[pkg] = deps + missing_deps.extend(missdeps) + deph.extend(hierarchy) + + print("==> checking makedepends") + for name,pkg in repopkgs.iteritems(): + (makedeps,missdeps,hierarchy) = verify_deps(name,pkg.repo,pkg.makedeps) + makepkgdeps[pkg] = makedeps + missing_makedeps.extend(missdeps) + makedeph.extend(hierarchy) + + print("==> checking hierarchy") + dep_hierarchy = check_hierarchy(deph) + makedep_hierarchy = check_hierarchy(makedeph) + + print("==> checking for circular dependencies") + # make sure pkgdeps is filled for every package + for name,pkg in packages.iteritems(): + if pkg not in pkgdeps: + (deps,missdeps,_) = verify_deps(name,pkg.repo,pkg.deps) + pkgdeps[pkg] = deps + find_scc(repopkgs.values()) + + print("==> checking for differences between db files and pkgbuilds") + for repo in repos: + for pkg in dbpkgs[repo]: + if not (pkg in repopkgs and repopkgs[pkg].repo == repo): + dbonly.append("%s/%s" % (repo,pkg)) + for name,pkg in repopkgs.iteritems(): + if not name in dbpkgs[pkg.repo]: + absonly.append("%s/%s" % (pkg.repo,name)) + + print_results() diff --git a/extra/lukeshu-xbs/cron-jobs/check_archlinux/parse_pkgbuilds.sh b/extra/lukeshu-xbs/cron-jobs/check_archlinux/parse_pkgbuilds.sh new file mode 100755 index 0000000..b857ac8 --- /dev/null +++ b/extra/lukeshu-xbs/cron-jobs/check_archlinux/parse_pkgbuilds.sh @@ -0,0 +1,153 @@ +#!/bin/bash + +# Usage : parse_pkgbuilds.sh arch +# Example : parse_pkgbuilds.sh i686 /var/abs/core /var/abs/extra + +exit() { return; } + +splitpkg_overrides=('depends' 'optdepends' 'provides' 'conflicts') +variables=('pkgname' 'pkgbase' 'epoch' 'pkgver' 'pkgrel' 'makedepends' 'arch' "${splitpkg_overrides[@]}") +readonly -a variables splitpkg_overrides + +backup_package_variables() { + for var in "${splitpkg_overrides[@]}"; do + indirect="${var}_backup" + eval "${indirect}=(\"\${$var[@]}\")" + done +} + +restore_package_variables() { + for var in "${splitpkg_overrides[@]}"; do + indirect="${var}_backup" + if [ -n "${!indirect}" ]; then + eval "${var}=(\"\${$indirect[@]}\")" + else + unset "${var}" + fi + done +} + +print_info() { + echo -e "%NAME%\n$pkgname\n" + if [ -n "$epoch" ]; then + echo -e "%VERSION%\n$epoch:$pkgver-$pkgrel\n" + else + echo -e "%VERSION%\n$pkgver-$pkgrel\n" + fi + echo -e "%PATH%\n$dir\n" + + if [ -n "$pkgbase" ]; then + echo -e "%BASE%\n$pkgbase\n" + fi + + if [ -n "$arch" ]; then + echo "%ARCH%" + for i in "${arch[@]}"; do echo "$i"; done + echo "" + fi + if [ -n "$depends" ]; then + echo "%DEPENDS%" + for i in "${depends[@]}"; do + echo "$i" + done + echo "" + fi + if [ -n "$makedepends" ]; then + echo "%MAKEDEPENDS%" + for i in "${makedepends[@]}"; do + echo "$i" + done + echo "" + fi + if [ -n "$conflicts" ]; then + echo "%CONFLICTS%" + for i in "${conflicts[@]}"; do echo "$i"; done + echo "" + fi + if [ -n "$provides" ]; then + echo "%PROVIDES%" + for i in "${provides[@]}"; do echo "$i"; done + echo "" + fi +} + +source_pkgbuild() { + ret=0 + dir=$1 + pkgbuild=$dir/PKGBUILD + for var in "${variables[@]}"; do + unset "${var}" + done + source "$pkgbuild" &>/dev/null || ret=$? + + # ensure $pkgname and $pkgver variables were found + if [ $ret -ne 0 -o -z "$pkgname" -o -z "$pkgver" ]; then + echo -e "%INVALID%\n$pkgbuild\n" + return 1 + fi + + if [ "${#pkgname[@]}" -gt "1" ]; then + pkgbase=${pkgbase:-${pkgname[0]}} + for pkg in "${pkgname[@]}"; do + if [ "$(type -t "package_${pkg}")" != "function" ]; then + echo -e "%INVALID%\n$pkgbuild\n" + return 1 + else + backup_package_variables + pkgname=$pkg + while IFS= read -r line; do + var=${line%%=*} + var="${var#"${var%%[![:space:]]*}"}" # remove leading whitespace characters + for realvar in "${variables[@]}"; do + if [ "$var" == "$realvar" ]; then + eval $line + break + fi + done + done < <(type "package_${pkg}") + print_info + restore_package_variables + fi + done + else + echo + print_info + fi + + return 0 +} + +find_pkgbuilds() { + #Skip over some dirs + local d="${1##*/}" + if [ "$d" = "CVS" -o "$d" = ".svn" ]; then + return + fi + + if [ -f "$1/PKGBUILD" ]; then + source_pkgbuild "$1" + return + fi + empty=1 + for dir in "$1"/*; do + if [ -d "$dir" ]; then + find_pkgbuilds "$dir" + unset empty + fi + done + if [ -n "$empty" ]; then + echo -e "%MISSING%\n$1\n" + fi +} + +if [ -z "$1" -o -z "$2" ]; then + exit 1 +fi + +CARCH=$1 +shift +for dir in "$@"; do + find_pkgbuilds "$dir" +done + +exit 0 diff --git a/extra/lukeshu-xbs/cron-jobs/devlist-mailer b/extra/lukeshu-xbs/cron-jobs/devlist-mailer new file mode 100755 index 0000000..7f298b9 --- /dev/null +++ b/extra/lukeshu-xbs/cron-jobs/devlist-mailer @@ -0,0 +1,27 @@ +#!/bin/bash +#Dummy helper to send email to arch-dev +# It does nothing if no output + +# Load $LIST and $FROM from the config file +. "$(dirname "$(readlink -e "$0")")/../config" + +SUBJECT="Repository Maintenance $(date +"%d-%m-%Y")" +if [ $# -ge 1 ]; then + SUBJECT="$1 $(date +"%d-%m-%Y")" +fi + +if [ $# -ge 2 ]; then + LIST="$2" +fi + +stdin="$(cat)" +#echo used to strip whitespace for checking for actual data +if [ -n "$(echo $stdin)" ]; then + + echo "Subject: $SUBJECT +To: $LIST +From: $FROM + +$stdin" | /usr/sbin/sendmail -F"$FROM" "$LIST" + +fi diff --git a/extra/lukeshu-xbs/cron-jobs/ftpdir-cleanup b/extra/lukeshu-xbs/cron-jobs/ftpdir-cleanup new file mode 100755 index 0000000..4063c09 --- /dev/null +++ b/extra/lukeshu-xbs/cron-jobs/ftpdir-cleanup @@ -0,0 +1,96 @@ +#!/bin/bash + +. "$(dirname "$(readlink -e "$0")")/../config" +. "$(dirname "$(readlink -e "$0")")/../db-functions" + +clean_pkg() { + local pkg + local target + + if ! "${CLEANUP_DRYRUN}"; then + for pkg in "$@"; do + if [ -h "$pkg" ]; then + rm -f "$pkg" "$pkg.sig" + else + mv_acl "$pkg" "$CLEANUP_DESTDIR/${pkg##*/}" + if [ -e "$pkg.sig" ]; then + mv_acl "$pkg.sig" "$CLEANUP_DESTDIR/${pkg##*/}.sig" + fi + touch "${CLEANUP_DESTDIR}/${pkg##*/}" + fi + done + fi +} + +script_lock + +for repo in "${PKGREPOS[@]}"; do + for arch in "${ARCHES[@]}"; do + repo_lock "${repo}" "${arch}" || exit 1 + done +done + +"${CLEANUP_DRYRUN}" && warning 'dry run mode is active' + +for repo in "${PKGREPOS[@]}"; do + for arch in "${ARCHES[@]}"; do + if [ ! -f "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}" ]; then + continue + fi + # get a list of actual available package files + find "${FTP_BASE}/${repo}/os/${arch}" -xtype f -name "*${PKGEXT}" -printf '%f\n' | sort > "${WORKDIR}/repo-${repo}-${arch}" + # get a list of package files defined in the repo db + bsdtar -xOf "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}" | awk '/^%FILENAME%/{getline;print}' | sort > "${WORKDIR}/db-${repo}-${arch}" + + missing_pkgs=($(comm -13 "${WORKDIR}/repo-${repo}-${arch}" "${WORKDIR}/db-${repo}-${arch}")) + if [ ${#missing_pkgs[@]} -ge 1 ]; then + error "Missing packages in [%s] (%s)..." "${repo}" "${arch}" + for missing_pkg in "${missing_pkgs[@]}"; do + msg2 '%s' "${missing_pkg}" + done + fi + + old_pkgs=($(comm -23 "${WORKDIR}/repo-${repo}-${arch}" "${WORKDIR}/db-${repo}-${arch}")) + if [ ${#old_pkgs[@]} -ge 1 ]; then + msg "Removing old packages from [%s] (%s)..." "${repo}" "${arch}" + for old_pkg in "${old_pkgs[@]}"; do + msg2 '%s' "${old_pkg}" + clean_pkg "${FTP_BASE}/${repo}/os/${arch}/${old_pkg}" + done + fi + done +done + +# get a list of all available packages in the pacakge pool +find "$FTP_BASE/${PKGPOOL}" -name "*${PKGEXT}" -printf '%f\n' | sort > "${WORKDIR}/pool" +# create a list of packages in our db +find "${WORKDIR}" -maxdepth 1 -type f -name 'db-*' -exec cat {} \; | sort -u > "${WORKDIR}/db" + +old_pkgs=($(comm -23 "${WORKDIR}/pool" "${WORKDIR}/db")) +if [ ${#old_pkgs[@]} -ge 1 ]; then + msg "Removing old packages from package pool..." + for old_pkg in "${old_pkgs[@]}"; do + msg2 '%s' "${old_pkg}" + clean_pkg "$FTP_BASE/${PKGPOOL}/${old_pkg}" + done +fi + +old_pkgs=($(find "${CLEANUP_DESTDIR}" -type f -name "*${PKGEXT}" -mtime +"${CLEANUP_KEEP}" -printf '%f\n')) +if [ ${#old_pkgs[@]} -ge 1 ]; then + msg "Removing old packages from the cleanup directory..." + for old_pkg in "${old_pkgs[@]}"; do + msg2 '%s' "${old_pkg}" + if ! "${CLEANUP_DRYRUN}"; then + rm -f "${CLEANUP_DESTDIR}/${old_pkg}" + rm -f "${CLEANUP_DESTDIR}/${old_pkg}.sig" + fi + done +fi + +for repo in "${PKGREPOS[@]}"; do + for arch in "${ARCHES[@]}"; do + repo_unlock "${repo}" "${arch}" + done +done + +script_unlock diff --git a/extra/lukeshu-xbs/cron-jobs/integrity-check b/extra/lukeshu-xbs/cron-jobs/integrity-check new file mode 100755 index 0000000..7459380 --- /dev/null +++ b/extra/lukeshu-xbs/cron-jobs/integrity-check @@ -0,0 +1,32 @@ +#!/bin/bash + +dirname="$(dirname "$(readlink -e "$0")")" + +. "${dirname}/../config" +. "${dirname}/../db-functions" + +script_lock + +if [ $# -ne 1 ]; then + die "usage: %s " "${0##*/}" +fi +mailto=$1 + +check() { + "${dirname}"/check_archlinux/check_packages.py \ + --repos="${repos}" \ + --abs-tree="/srv/abs/rsync/${arch},/srv/abs/rsync/any" \ + --repo-dir="${FTP_BASE}" \ + --arch="${arch}" \ + 2>&1 | "${dirname}"/devlist-mailer "Integrity Check ${arch}: ${repos}" "${mailto}" +} + +repos='core,extra,community' +arch='i686' +check + +repos='core,extra,community,multilib' +arch='x86_64' +check + +script_unlock diff --git a/extra/lukeshu-xbs/cron-jobs/make_repo_torrents b/extra/lukeshu-xbs/cron-jobs/make_repo_torrents new file mode 100755 index 0000000..2eb0978 --- /dev/null +++ b/extra/lukeshu-xbs/cron-jobs/make_repo_torrents @@ -0,0 +1,70 @@ +#!/bin/bash +# Copyright (C) 2014 Joseph Graham +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +# This script finds any updated packages and calls +# `make_indivudual_torrent' for each of them. + +username=$( id -un ) + +case "${username}" in + repo | root ) + true + ;; + * ) + echo "This script must be run as repo user or root user." + echo "ByeBye!" + exit 1 + ;; +esac + +# pacman doesn't support multiple different packages of the same name, +# so it's OK to just stuff all the torrents into a single directory. +script_directory="$(dirname "$(readlink -e "$0")")/.." +. "$(dirname "$(readlink -e "$0")")/../config" +public_location="$FTP_BASE/" +torrent_location="$FTP_BASE/torrents/" + +cd "${torrent_location}" + +# Find any directories that might have packages in then +find "${public_location}" -name 'os' -type 'd' | +while read dir +do + # Find any packages + find "${dir}" -name '*\.pkg\.tar\.xz' | + while read pkg + do + pkg_name="${pkg##*/}" + + if [[ -h "${pkg}" ]] # check if it's a symbolic link + then + # We get the target of the symlink + pkg=$( readlink -f "${pkg}" ) + fi + + # If a .torrent file does not already exist for this package, we call + # `make_individual_torrent' to make it. + if ! [[ -f "${torrent_location}${pkg_name}.torrent" ]] + then + "$script_directory/make_individual_torrent" "${pkg}" "${public_location}" + fi + done +done + +if [[ "${username}" == root ]] +then + chown repo * +fi diff --git a/extra/lukeshu-xbs/cron-jobs/sourceballs b/extra/lukeshu-xbs/cron-jobs/sourceballs new file mode 100755 index 0000000..c02912a --- /dev/null +++ b/extra/lukeshu-xbs/cron-jobs/sourceballs @@ -0,0 +1,150 @@ +#!/bin/bash + +dirname="$(dirname "$(readlink -e "$0")")" +. "${dirname}/../config" +. "${dirname}/../db-functions" +pushd "${WORKDIR}" >/dev/null + +script_lock + +for repo in "${PKGREPOS[@]}"; do + for arch in "${ARCHES[@]}"; do + repo_lock "${repo}" "${arch}" || exit 1 + done +done + +#adjust the nice level to run at a lower priority +renice +10 -p $$ > /dev/null + +# Create a readable file for each repo with the following format +# - [ ] +for repo in "${PKGREPOS[@]}"; do + for arch in "${ARCHES[@]}"; do + # Repo does not exist; skip it + if [ ! -f "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}" ]; then + continue + fi + bsdtar -xOf "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}" \ + | awk '/^%NAME%/ { getline b }; + /^%BASE%/ { getline b }; + /^%VERSION%/ { getline v }; + /^%LICENSE%/,/^$/ { + if ( !/^%LICENSE%/ ) { l=l" "$0 } + }; + /^%ARCH%/ { + getline a; + printf "%s %s %s %s\n", b, v, a, l; + l=""; + }' + done | sort -u > "${WORKDIR}/db-${repo}" +done + +for repo in "${PKGREPOS[@]}"; do + for arch in "${ARCHES[@]}"; do + repo_unlock "${repo}" "${arch}" + done +done + +# Create a list of all available source package file names +find "${FTP_BASE}/${SRCPOOL}" -xtype f -name "*${SRCEXT}" -printf '%f\n' | sort -u > "${WORKDIR}/available-src-pkgs" + +# Check for all packages if we need to build a source package +for repo in "${PKGREPOS[@]}"; do + newpkgs=() + failedpkgs=() + while read line; do + pkginfo=("${line}") + pkgbase=${pkginfo[0]} + pkgver=${pkginfo[1]} + pkgarch=${pkginfo[2]} + pkglicense=("${pkginfo[@]:3}") + + # Should this package be skipped? + if grep -Fqx "${pkgbase}" "${dirname}/sourceballs.skip"; then + continue + fi + # Check if the license or .force file does not enforce creating a source package + if ! ([[ -z ${ALLOWED_LICENSES[*]} ]] || chk_license "${pkglicense[@]}" || grep -Fqx "${pkgbase}" "${dirname}/sourceballs.force"); then + continue + fi + # Store the expected file name of the source package + echo "${pkgbase}-${pkgver}${SRCEXT}" >> "${WORKDIR}/expected-src-pkgs" + + # Build the source package if its not already there + if ! grep -Fqx "${pkgbase}-${pkgver}${SRCEXT}" "${WORKDIR}/available-src-pkgs"; then + # Check if we had failed before + if in_array "${pkgbase}-${pkgver}${SRCEXT}" "${failedpkgs[@]}"; then + continue + fi + + # Get the sources from xbs + mkdir -p -m0770 "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}" + cp -a "$(xbs releasepath "${pkgbase}" "${repo}" "${pkgarch}")" \ + "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}/${pkgbase}" >/dev/null 2>&1 + if [ $? -ge 1 ]; then + failedpkgs+=("${pkgbase}-${pkgver}${SRCEXT}") + continue + fi + + # Build the actual source package + pushd "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}/${pkgbase}" >/dev/null + SRCPKGDEST=. makepkg --nocolor --allsource --ignorearch --skippgpcheck >"${WORKDIR}/${pkgbase}.log" 2>&1 + if [ $? -eq 0 ] && [ -f "${pkgbase}-${pkgver}${SRCEXT}" ]; then + mv_acl "${pkgbase}-${pkgver}${SRCEXT}" "${FTP_BASE}/${SRCPOOL}/${pkgbase}-${pkgver}${SRCEXT}" + # Avoid creating the same source package for every arch + echo "${pkgbase}-${pkgver}${SRCEXT}" >> "${WORKDIR}/available-src-pkgs" + newpkgs+=("${pkgbase}-${pkgver}${SRCEXT}") + else + failedpkgs+=("${pkgbase}-${pkgver}${SRCEXT}") + cat "${WORKDIR}/${pkgbase}.log" >> "${WORKDIR}/makepkg-fail.log" + fi + popd >/dev/null + fi + done < "${WORKDIR}/db-${repo}" + + if [ ${#newpkgs[@]} -ge 1 ]; then + msg "Adding source packages for [%s]..." "${repo}" + for new_pkg in "${newpkgs[@]}"; do + msg2 '%s' "${new_pkg}" + done + fi + if [ ${#failedpkgs[@]} -ge 1 ]; then + msg "Failed to create source packages for [%s]..." "${repo}" + for failed_pkg in "${failedpkgs[@]}"; do + msg2 '%s' "${failed_pkg}" + done + fi +done + +# Cleanup old source packages +find "${WORKDIR}" -maxdepth 1 -type f -name 'expected-src-pkgs' -exec cat {} \; | sort -u > "${WORKDIR}/expected-src-pkgs.sort" +find "${WORKDIR}" -maxdepth 1 -type f -name 'available-src-pkgs' -exec cat {} \; | sort -u > "${WORKDIR}/available-src-pkgs.sort" +old_pkgs=($(comm -23 "${WORKDIR}/available-src-pkgs.sort" "${WORKDIR}/expected-src-pkgs.sort")) + +if [ ${#old_pkgs[@]} -ge 1 ]; then + msg "Removing old source packages..." + "${SOURCE_CLEANUP_DRYRUN}" && warning 'dry run mode is active' + for old_pkg in "${old_pkgs[@]}"; do + msg2 '%s' "${old_pkg}" + if ! "${SOURCE_CLEANUP_DRYRUN}"; then + mv_acl "$FTP_BASE/${SRCPOOL}/${old_pkg}" "${SOURCE_CLEANUP_DESTDIR}/${old_pkg}" + touch "${SOURCE_CLEANUP_DESTDIR}/${old_pkg}" + fi + done +fi + +old_pkgs=($(find "${SOURCE_CLEANUP_DESTDIR}" -type f -name "*${SRCEXT}" -mtime +"${SOURCE_CLEANUP_KEEP}" -printf '%f\n')) +if [ ${#old_pkgs[@]} -ge 1 ]; then + msg "Removing old source packages from the cleanup directory..." + for old_pkg in "${old_pkgs[@]}"; do + msg2 '%s' "${old_pkg}" + "${SOURCE_CLEANUP_DRYRUN}" || rm -f "${SOURCE_CLEANUP_DESTDIR}/${old_pkg}" + done +fi + +if [ -f "${WORKDIR}/makepkg-fail.log" ]; then + msg "Log of failed packages" + cat "${WORKDIR}/makepkg-fail.log" +fi + +script_unlock diff --git a/extra/lukeshu-xbs/cron-jobs/sourceballs.force b/extra/lukeshu-xbs/cron-jobs/sourceballs.force new file mode 100644 index 0000000..badf15d --- /dev/null +++ b/extra/lukeshu-xbs/cron-jobs/sourceballs.force @@ -0,0 +1,4 @@ +faad2 +wxgtk +wxpython +glhack diff --git a/extra/lukeshu-xbs/cron-jobs/sourceballs.skip b/extra/lukeshu-xbs/cron-jobs/sourceballs.skip new file mode 100644 index 0000000..0e1731c --- /dev/null +++ b/extra/lukeshu-xbs/cron-jobs/sourceballs.skip @@ -0,0 +1,43 @@ +0ad-data +alienarena-data +blobwars-data +btanks-data +dangerdeep-data +egoboo-data +fillets-ng-data +flightgear-data +frogatto-data +gcompris-data +naev-data +openarena-data +rocksndiamonds-data +smc-data +speed-dreams-data +torcs-data +tremulous-data +ufoai-data +vdrift-data +warmux-data +wesnoth-data +widelands-data +xonotic-data +texlive-bibtexextra +texlive-bin +texlive-core +texlive-fontsextra +texlive-formatsextra +texlive-games +texlive-genericextra +texlive-htmlxml +texlive-humanities +texlive-langcjk +texlive-langcyrillic +texlive-langextra +texlive-langgreek +texlive-latexextra +texlive-music +texlive-pictures +texlive-plainextra +texlive-pstricks +texlive-publishers +texlive-science diff --git a/extra/lukeshu-xbs/db-check-package-libraries.py b/extra/lukeshu-xbs/db-check-package-libraries.py new file mode 100755 index 0000000..bc2349b --- /dev/null +++ b/extra/lukeshu-xbs/db-check-package-libraries.py @@ -0,0 +1,193 @@ +#!/usr/bin/env python3 +# Copyright (C) 2012 Michał Masłowski +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + + +""" +Check which libraries are provided or required by a package, store +this in a database, update and list broken packages. + +Dependencies: + +- Python 3.2 or later with SQLite 3 support + +- ``bsdtar`` + +- ``readelf`` +""" + + +import os.path +import re +import sqlite3 +import subprocess +import tempfile + + +#: Regexp matching an interesting dynamic entry. +_DYNAMIC = re.compile(r"^\s*[0-9a-fx]+" + "\s*\((NEEDED|SONAME)\)[^:]*:\s*\[(.+)\]$") + + +def make_db(path): + """Make a new, empty, library database at *path*.""" + con = sqlite3.connect(path) + con.executescript(""" +create table provided( + library varchar not null, + package varchar not null +); +create table used( + library varchar not null, + package varchar not null +); +""") + con.close() + + +def begin(database): + """Connect to *database* and start a transaction.""" + con = sqlite3.connect(database) + con.execute("begin exclusive") + return con + + +def add_provided(con, package, libraries): + """Write that *package* provides *libraries*.""" + for library in libraries: + con.execute("insert into provided (package, library) values (?,?)", + (package, library)) + + +def add_used(con, package, libraries): + """Write that *package* uses *libraries*.""" + for library in libraries: + con.execute("insert into used (package, library) values (?,?)", + (package, library)) + + +def remove_package(con, package): + """Remove all entries for a package.""" + con.execute("delete from provided where package=?", (package,)) + con.execute("delete from used where package=?", (package,)) + + +def add_package(con, package): + """Add entries from a named *package*.""" + # Extract to a temporary directory. This could be done more + # efficiently, since there is no need to store more than one file + # at once. + with tempfile.TemporaryDirectory() as temp: + tar = subprocess.Popen(("bsdtar", "xf", package, "-C", temp)) + tar.communicate() + with open(os.path.join(temp, ".PKGINFO")) as pkginfo: + for line in pkginfo: + if line.startswith("pkgname ="): + pkgname = line[len("pkgname ="):].strip() + break + # Don't list previously removed libraries. + remove_package(con, pkgname) + provided = set() + used = set() + # Search for ELFs. + for dirname, dirnames, filenames in os.walk(temp): + assert dirnames is not None # unused, avoid pylint warning + for file_name in filenames: + path = os.path.join(dirname, file_name) + with open(path, "rb") as file_object: + if file_object.read(4) != b"\177ELF": + continue + readelf = subprocess.Popen(("readelf", "-d", path), + stdout=subprocess.PIPE) + for line in readelf.communicate()[0].split(b"\n"): + match = _DYNAMIC.match(line.decode("ascii")) + if match: + if match.group(1) == "SONAME": + provided.add(match.group(2)) + elif match.group(1) == "NEEDED": + used.add(match.group(2)) + else: + raise AssertionError("unknown entry type " + + match.group(1)) + add_provided(con, pkgname, provided) + add_used(con, pkgname, used) + + +def init(arguments): + """Initialize.""" + make_db(arguments.database) + + +def add(arguments): + """Add packages.""" + con = begin(arguments.database) + for package in arguments.packages: + add_package(con, package) + con.commit() + con.close() + + +def remove(arguments): + """Remove packages.""" + con = begin(arguments.database) + for package in arguments.packages: + remove_package(con, package) + con.commit() + con.close() + + +def check(arguments): + """List broken packages.""" + con = begin(arguments.database) + available = set(row[0] for row + in con.execute("select library from provided")) + for package, library in con.execute("select package, library from used"): + if library not in available: + print(package, "needs", library) + con.close() + + +def main(): + """Get arguments and run the command.""" + from argparse import ArgumentParser + parser = ArgumentParser(prog="check-package-libraries.py", + description="Check packages for " + "provided/needed libraries") + parser.add_argument("-d", "--database", type=str, + help="Database file to use", + default="package-libraries.sqlite") + subparsers = parser.add_subparsers() + subparser = subparsers.add_parser(name="init", + help="initialize the database") + subparser.set_defaults(command=init) + subparser = subparsers.add_parser(name="add", + help="add packages to database") + subparser.add_argument("packages", nargs="+", type=str, + help="package files to add") + subparser.set_defaults(command=add) + subparser = subparsers.add_parser(name="remove", + help="remove packages from database") + subparser.add_argument("packages", nargs="+", type=str, + help="package names to remove") + subparser.set_defaults(command=remove) + subparser = subparsers.add_parser(name="check", + help="list broken packages") + subparser.set_defaults(command=check) + arguments = parser.parse_args() + arguments.command(arguments) + + +if __name__ == "__main__": + main() diff --git a/extra/lukeshu-xbs/db-functions b/extra/lukeshu-xbs/db-functions new file mode 100644 index 0000000..62260bb --- /dev/null +++ b/extra/lukeshu-xbs/db-functions @@ -0,0 +1,456 @@ +#!/hint/bash + +# Some PKGBUILDs need CARCH to be set +CARCH=$(. "$(librelib conf.sh)"; load_files makepkg; echo "$CARCH") + +# Useful functions +UMASK="" +set_umask () { + [ "$UMASK" == "" ] && UMASK="$(umask)" + export UMASK + umask 002 +} + +restore_umask () { + umask "$UMASK" >/dev/null +} + +# just like mv -f, but we touch the file and then copy the content so +# default ACLs in the target dir will be applied +mv_acl() { + rm -f "$2" + touch "$2" + cat "$1" >"$2" || return 1 + rm -f "$1" +} + +# set up general environment +WORKDIR=$(mktemp -dt "${0##*/}.XXXXXXXXXX") +if [ -n "${SVNUSER}" ]; then + setfacl -m u:"${SVNUSER}":rwx "${WORKDIR}" + setfacl -m d:u:"${USER}":rwx "${WORKDIR}" + setfacl -m d:u:"${SVNUSER}":rwx "${WORKDIR}" +fi +LOCKS=() +REPO_MODIFIED=0 + +# Used: plain, msg, msg2, warning, error, in_array, get_full_version, abort, die +# Overwritten: cleanup +# Ignored: stat_busy, stat_done, +# setup_workdir, trap_abort, trap_exit, +# lock, slock, lock_close +# pkgver_equal, find_cached_package, check_root +. "$(librelib common)" + +script_lock() { + local LOCKDIR="$TMPDIR/.scriptlock.${0##*/}" + if ! mkdir "$LOCKDIR" >/dev/null 2>&1 ; then + local _owner="$(/usr/bin/stat -c %U "$LOCKDIR")" + error "Script %s is already locked by %s." "${0##*/}" "$_owner" + exit 1 + else + set_umask + return 0 + fi +} + +script_unlock() { + local LOCKDIR="$TMPDIR/.scriptlock.${0##*/}" + if [ ! -d "$LOCKDIR" ]; then + warning "Script %s was not locked!" "${0##*/}" + restore_umask + return 1 + else + rmdir "$LOCKDIR" + restore_umask + return 0 + fi +} + +cleanup() { + local l + local repo + local arch + + trap - EXIT INT QUIT TERM + for l in "${LOCKS[@]}"; do + repo=${l%.*} + arch=${l#*.} + if [ -d "$TMPDIR/.repolock.$repo.$arch" ]; then + msg "Removing left over lock from [%s] (%s)" "${repo}" "${arch}" + repo_unlock "$repo" "$arch" + fi + done + if [ -d "$TMPDIR/.scriptlock.${0##*/}" ]; then + msg "Removing left over lock from %s" "${0##*/}" + script_unlock + fi + rm -rf "$WORKDIR" + + if (( REPO_MODIFIED )); then + date +%s > "${FTP_BASE}/lastupdate" + fi + + [ "$1" ] && exit "$1" +} + +trap abort INT QUIT TERM HUP +trap cleanup EXIT + + +#repo_lock [timeout] +repo_lock () { + local LOCKDIR="$TMPDIR/.repolock.$1.$2" + local DBLOCKFILE="${FTP_BASE}/${1}/os/${2}/${1}${DBEXT}.lck" + local FILESLOCKFILE="${FTP_BASE}/${1}/os/${2}/${1}${FILESEXT}.lck" + local _count + local _trial + local _timeout + local _lockblock + local _owner + + # This is the lock file used by repo-add and repo-remove + if [ -f "${DBLOCKFILE}" ]; then + error "Repo [%s] (%s) is already locked by repo-{add,remove} process %s" "$1" "$2" "$(<"$DBLOCKFILE")" + return 1 + fi + if [ -f "${FILESLOCKFILE}" ]; then + error "Repo [%s] (%s) is already locked by repo-{add,remove} process %s" "$1" "$2" "$(<"$FILESLOCKFILE")" + return 1 + fi + + if [ $# -eq 2 ]; then + _lockblock=true + _trial=0 + elif [ $# -eq 3 ]; then + _lockblock=false + _timeout=$3 + let _trial=$_timeout/$LOCK_DELAY + fi + + _count=0 + while [ "$_count" -le "$_trial" ] || "$_lockblock" ; do + if ! mkdir "$LOCKDIR" >/dev/null 2>&1 ; then + _owner="$(/usr/bin/stat -c %U "$LOCKDIR")" + warning "Repo [%s] (%s) is already locked by %s." "${1}" "${2}" "$_owner" + msg2 "Retrying in %d seconds..." "$LOCK_DELAY" + else + LOCKS+=("$1.$2") + set_umask + return 0 + fi + sleep "$LOCK_DELAY" + let _count=$_count+1 + done + + error "Repo [%s] (%s) is already locked by %s. Giving up!" "${1}" "${2}" "$_owner" + return 1 +} + +repo_unlock () { #repo_unlock + local LOCKDIR="$TMPDIR/.repolock.$1.$2" + if [ ! -d "$LOCKDIR" ]; then + warning "Repo lock [%s] (%s) was not locked!" "${1}" "${2}" + restore_umask + return 1 + else + rmdir "$LOCKDIR" + restore_umask + return 0 + fi +} + +# usage: _grep_pkginfo pkgfile pattern +_grep_pkginfo() { + local _ret + + _ret="$(/usr/bin/bsdtar -xOqf "$1" .PKGINFO | grep -m 1 "^${2} = ")" + echo "${_ret#${2} = }" +} + + +# Get the package base or name as fallback +getpkgbase() { + local _base + + _base="$(_grep_pkginfo "$1" "pkgbase")" + if [ -z "$_base" ]; then + getpkgname "$1" + else + echo "$_base" + fi +} + +issplitpkg() { + local _base + + _base="$(_grep_pkginfo "$1" "pkgbase")" + if [ -z "$_base" ]; then + return 1 + else + return 0 + fi +} + +# Get the package name +getpkgname() { + local _name + + _name="$(_grep_pkginfo "$1" "pkgname")" + if [ -z "$_name" ]; then + error "Package '%s' has no pkgname in the PKGINFO. Fail!" "$1" + exit 1 + fi + + echo "$_name" +} + +# Get the pkgver-pkgrel of this package +getpkgver() { + local _ver + + _ver="$(_grep_pkginfo "$1" "pkgver")" + if [ -z "$_ver" ]; then + error "Package '%s' has no pkgver in the PKGINFO. Fail!" "$1" + exit 1 + fi + + echo "$_ver" +} + +getpkgarch() { + local _ver + + _ver="$(_grep_pkginfo "$1" "arch")" + if [ -z "$_ver" ]; then + error "Package '%s' has no arch in the PKGINFO. Fail!" "$1" + exit 1 + fi + + echo "$_ver" +} + +check_packager() { + local _packager + + _packager=$(_grep_pkginfo "$1" "packager") + [[ $_packager && $_packager != 'Unknown Packager' ]] +} + +getpkgfile() { + if [[ ${#} -ne 1 ]]; then + error 'No canonical package found!' + exit 1 + elif [ ! -f "${1}" ]; then + error "Package %s not found!" "${1}" + exit 1 + elif "${REQUIRE_SIGNATURE}" && [ ! -f "${1}.sig" ]; then + error "Package signature %s not found!" "${1}.sig" + exit 1 + fi + + echo "${1}" +} + +getpkgfiles() { + local f + if [ ! -z "$(printf '%s\n' "${@%\.*}" | sort | uniq -D)" ]; then + error 'Duplicate packages found!' + exit 1 + fi + + for f in "${@}"; do + if [ ! -f "${f}" ]; then + error "Package %s not found!" "${f}" + exit 1 + elif "${REQUIRE_SIGNATURE}" && [ ! -f "${f}.sig" ]; then + error "Package signature %s not found!" "${f}.sig" + exit 1 + fi + done + + echo "${@}" +} + +check_pkgfile() { + local pkgfile=$1 + + local pkgname="$(getpkgname "${pkgfile}")" + [ $? -ge 1 ] && return 1 + local pkgver="$(getpkgver "${pkgfile}")" + [ $? -ge 1 ] && return 1 + local pkgarch="$(getpkgarch "${pkgfile}")" + [ $? -ge 1 ] && return 1 + + in_array "${pkgarch}" "${ARCHES[@]}" 'any' || return 1 + + if echo "${pkgfile##*/}" | grep -q "${pkgname}-${pkgver}-${pkgarch}"; then + return 0 + else + return 1 + fi +} + +check_pkgxbs() { + local pkgfile="${1}" + local _pkgbase="$(getpkgbase "${pkgfile}")" + [ $? -ge 1 ] && return 1 + local _pkgname="$(getpkgname "${pkgfile}")" + [ $? -ge 1 ] && return 1 + local _pkgver="$(getpkgver "${pkgfile}")" + [ $? -ge 1 ] && return 1 + local _pkgarch="$(getpkgarch "${pkgfile}")" + [ $? -ge 1 ] && return 1 + local repo="${2}" + + in_array "${repo}" "${PKGREPOS[@]}" || return 1 + + local xbsver="$(. "$(xbs releasepath "${_pkgbase}" "${repo}" "${_pkgarch}")/PKGBUILD"; get_full_version "${_pkgname}")" + [ "${xbsver}" == "${_pkgver}" ] || return 1 + + local xbsnames=($(. "$(xbs releasepath "${_pkgbase}" "${repo}" "${_pkgarch}")/PKGBUILD"; echo "${pkgname[@]}")) + in_array "${_pkgname}" "${xbsnames[@]}" || return 1 + + return 0 +} + +check_splitpkgs() { + local repo="${1}" + shift + local pkgfiles=("${@}") + local pkgfile + local pkgdir + local xbsname + + mkdir -p "${WORKDIR}/check_splitpkgs/" + pushd "${WORKDIR}/check_splitpkgs" >/dev/null + + for pkgfile in "${pkgfiles[@]}"; do + issplitpkg "${pkgfile}" || continue + local _pkgbase="$(getpkgbase "${pkgfile}")" + local _pkgname="$(getpkgname "${pkgfile}")" + local _pkgarch="$(getpkgarch "${pkgfile}")" + mkdir -p "${repo}/${_pkgarch}/${_pkgbase}" + echo "${_pkgname}" >> "${repo}/${_pkgarch}/${_pkgbase}/staging" + + local xbsnames=($(. "$(xbs releasepath "${_pkgbase}" "${repo}" "${_pkgarch}")/PKGBUILD"; echo "${pkgname[@]}")) + printf '%s\n' "${xbsnames[@]}" >> "${repo}/${_pkgarch}/${_pkgbase}/xbs" + done + popd >/dev/null + + for pkgdir in "${WORKDIR}/check_splitpkgs/${repo}"/*/*; do + [ ! -d "${pkgdir}" ] && continue + sort -u "${pkgdir}/staging" -o "${pkgdir}/staging" + sort -u "${pkgdir}/xbs" -o "${pkgdir}/xbs" + if [ ! -z "$(comm -13 "${pkgdir}/staging" "${pkgdir}/xbs")" ]; then + return 1 + fi + done + + return 0 +} + +check_pkgrepos() { + local pkgfile=$1 + + local pkgname="$(getpkgname "${pkgfile}")" + [ $? -ge 1 ] && return 1 + local pkgver="$(getpkgver "${pkgfile}")" + [ $? -ge 1 ] && return 1 + local pkgarch="$(getpkgarch "${pkgfile}")" + [ $? -ge 1 ] && return 1 + + [ -f "${FTP_BASE}/${PKGPOOL}/${pkgname}-${pkgver}-${pkgarch}"${PKGEXT} ] && return 1 + [ -f "${FTP_BASE}/${PKGPOOL}/${pkgname}-${pkgver}-${pkgarch}"${PKGEXT}.sig ] && return 1 + [ -f "${FTP_BASE}/${PKGPOOL}/${pkgfile##*/}" ] && return 1 + [ -f "${FTP_BASE}/${PKGPOOL}/${pkgfile##*/}.sig" ] && return 1 + + return 0 +} + +#usage: chk_license ${license[@]}" +chk_license() { + local l + for l in "${@}"; do + in_array "${l}" "${ALLOWED_LICENSES[@]}" && return 0 + done + + return 1 +} + +check_repo_permission() { + local repo=$1 + + [ ${#PKGREPOS[@]} -eq 0 ] && return 1 + [ -z "${PKGPOOL}" ] && return 1 + + in_array "${repo}" "${PKGREPOS[@]}" || return 1 + + [ -w "$FTP_BASE/${PKGPOOL}" ] || return 1 + + local arch + for arch in "${ARCHES[@]}"; do + local dir="${FTP_BASE}/${repo}/os/${arch}/" + [ -w "${dir}" ] || return 1 + [ -f "${dir}${repo}"${DBEXT} -a ! -w "${dir}${repo}"${DBEXT} ] && return 1 + [ -f "${dir}${repo}"${FILESEXT} -a ! -w "${dir}${repo}"${FILESEXT} ] && return 1 + done + + return 0 +} + +set_repo_permission() { + local repo=$1 + local arch=$2 + local dbfile="${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}" + local filesfile="${FTP_BASE}/${repo}/os/${arch}/${repo}${FILESEXT}" + + if [ -w "${dbfile}" ]; then + local group=$(/usr/bin/stat --printf='%G' "$(dirname "${dbfile}")") + chgrp "$group" "${dbfile}" || error "Could not change group of %s to %s" "${dbfile}" "$group" + chgrp "$group" "${filesfile}" || error "Could not change group of %s to %s" "${filesfile}" "$group" + chmod g+w "${dbfile}" || error "Could not set write permission for group %s to %s" "$group" "${dbfile}" + chmod g+w "${filesfile}" || error "Could not set write permission for group %s to %s" "$group" "${filesfile}" + else + error "You don't have permission to change %s" "${dbfile}" + fi +} + +arch_repo_add() { + local repo=$1 + local arch=$2 + local pkgs=("${@:3}") + + printf -v pkgs_str -- '%q ' "${pkgs[@]}" + # package files might be relative to repo dir + pushd "${FTP_BASE}/${repo}/os/${arch}" >/dev/null + /usr/bin/repo-add -q "${repo}${DBEXT}" "${pkgs[@]}" \ + || error 'repo-add %q %s' "${repo}${DBEXT}" "${pkgs_str% }" + /usr/bin/repo-add -f -q "${repo}${FILESEXT}" "${pkgs[@]}" \ + || error 'repo-add -f %q %s' "${repo}${FILESEXT}" "${pkgs_str% }" + popd >/dev/null + set_repo_permission "${repo}" "${arch}" + + REPO_MODIFIED=1 +} + +arch_repo_remove() { + local repo=$1 + local arch=$2 + local pkgs=("${@:3}") + local dbfile="${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}" + local filesfile="${FTP_BASE}/${repo}/os/${arch}/${repo}${FILESEXT}" + + if [ ! -f "${dbfile}" ]; then + error "No database found at '%s'" "${dbfile}" + return 1 + fi + printf -v pkgs_str -- '%q ' "${pkgs[@]}" + /usr/bin/repo-remove -q "${dbfile}" "${pkgs[@]}" \ + || error 'repo-remove %q %s' "${dbfile}" "${pkgs_str% }" + /usr/bin/repo-remove -q "${filesfile}" "${pkgs[@]}" \ + || error 'repo-remove %q %s' "${filesfile}" "${pkgs_str% }" + set_repo_permission "${repo}" "${arch}" + + REPO_MODIFIED=1 +} diff --git a/extra/lukeshu-xbs/db-import b/extra/lukeshu-xbs/db-import new file mode 100755 index 0000000..a8a073d --- /dev/null +++ b/extra/lukeshu-xbs/db-import @@ -0,0 +1,288 @@ +#!/bin/bash +set -euE +# Imports Arch-like repos, running them through a blacklist +# License: GPLv3 + +. "$(dirname "$(readlink -e "$0")")/config" # for: FTP_BASE DBEXT +. "$(dirname "$(readlink -e "$0")")/db-import.conf" # for: IMPORTDIR IMPORTS +. "$(librelib messages)" +. "$(librelib blacklist)" + +# DBs = pacman DataBases + +# This replaces two scripts: +# - abslibre : imported ABS tree from Arch +# - db-sync : imported pacman DBs from Arch + +# The flow here is: +# 1. "${IMPORTDIR}/cache/${name}/dbs/" # Download the pacman databases +# 2. "${IMPORTDIR}/cache/${name}/abs/" # Download the ABS tree +# 3. "${IMPORTDIR}/clean/${name}/dbs/" # Run the pacman DBs through the blacklist +# 4. "${IMPORTDIR}/clean/${name}/pkgs/" # Download the pkg files mentioned in "clean/${name}/dbs/" +# 5. "${IMPORTDIR}/staging/${tag}" # Copy all the package files we just downloaded to here +# 6. Run `db-update on` with STAGING="${IMPORTDIR}/staging/${tag}" + +# generic arguments to pass to rsync, borrowed from `abs` +SYNCARGS='-mrtvlH --no-motd --no-p --no-o --no-g' + +main() { + blacklist-update + + local importStr + for importStr in "${IMPORTS[@]}"; do + local importAry=($importStr) + local name=${importAry[0]} + local pkgmirror=${importAry[1]} + local absmirror=${importAry[2]} + local tags=("${importAry[@]:3}") + + msg "Fetching remote package source: %s" "$name" + fetch_dbs "$name" "$pkgmirror" + fetch_abs "$name" "$absmirror" "${tags[@]}" + msg "Filtering blacklisted packages from remote package source: %s" "$name" + clean_dbs "$name" "${tags[@]}" + fetch_pkgs "$name" "${tags[@]}" + msg "Publishing changes from remote package source: %s" "$name" + publish "$name" "${tags[@]}" + done + return $r +} + +fetch_dbs() { + local name=$1 + local pkgmirror=$2 + + msg2 'Synchronizing package databases...' + + mkdir -p -- "${IMPORTDIR}/cache/${name}/dbs" + # Grab just the .db files from $pkgmirror + rsync $SYNCARGS --delete-after \ + --include="*/" \ + --include="*.db" \ + --include="*${DBEXT}" \ + --exclude="*" \ + "rsync://${pkgmirror}/" "${IMPORTDIR}/cache/${name}/dbs" +} + +fetch_abs() { + local name=$1 + local absmirror=$2 + local tags=("${@:3}") + + local fake_home + local absroot + + # Sync the ABS tree from $absmirror + local arch + for arch in $(list_arches "${tags[@]}"); do + msg2 'Synchronizing %s ABS tree...' "$arch" + + absroot="${IMPORTDIR}/cache/${name}/abs/${arch}" + mkdir -p -- "$absroot" + + # Configure `abs` for this mirror + fake_home="${IMPORTDIR}/homes/${name}/${arch}" + mkdir -p -- "$fake_home" + { + printf "ABSROOT='%s'\n" "$absroot" + printf "SYNCSERVER='%s'\n" "$absmirror" + printf "ARCH='%s'\n" "$arch" + printf 'REPOS=(\n' + list_repos "$arch" "${tags[@]}" + printf ')\n' + } > "${fake_home}/.abs.conf" + + # Run `abs` + HOME=$fake_home abs + done +} + +clean_dbs() { + local name=$1 + local tags=("${@:2}") + + rm -rf -- "${IMPORTDIR}/clean/$name" + + local tag + for tag in "${tags[@]}"; do + msg2 'Creating clean version of %s package database...' "$tag" + + local cache="${IMPORTDIR}/cache/$name/dbs/$(db_file "$tag")" + local clean="${IMPORTDIR}/clean/$name/dbs/$(db_file "$tag")" + install -Dm644 "$cache" "$clean" + + blacklist-cat | blacklist-get-pkg | xargs -d '\n' repo-remove "$clean" + done +} + +fetch_pkgs() { + local name=$1 + local tags=("${@:2}") + + local repo arch dbfile whitelist + + local tag + for tag in "${tags[@]}"; do + msg2 'Syncronizing package files for %s...' "$tag" + repo=${tag%-*} + arch=${tag##*-} + + dbfile="${IMPORTDIR}/clean/$name/dbs/$(db_file "$tag")" + whitelist="${IMPORTDIR}/clean/$name/dbs/$tag.whitelist" + + list_pkgs "$dbfile" > "$whitelist" + + # fetch the architecture-specific packages + rsync $SYNCARGS --delete-after --delete-excluded \ + --delay-updates \ + --include-from=<(sed "s|\$|-$arch.tar.?z|" "$whitelist") \ + --exclude='*' \ + "rsync://${pkgmirror}/$(db_dir "$tag")/" \ + "${IMPORTDIR}/clean/${name}/pkgs/${tag}/" + + # fetch the architecture-independent packages + rsync $SYNCARGS --delete-after --delete-excluded \ + --delay-updates \ + --include-from=<(sed "s|\$|-any.tar.?z|" "$whitelist") \ + --exclude='*' \ + "rsync://${pkgmirror}/$(db_dir "$tag")/" \ + "${IMPORTDIR}/clean/${name}/pkgs/${repo}-any/" + done +} + +publish() { + local name=$1 + local tags=("${@:2}") + + local tag + for tag in "${tags[@]}"; do + msg2 'Publishing changes to %s...' "$tag" + publish_tag "$name" "$tag" + done +} + +publish_tag() { + local name=$1 + local tag=$2 + + local repo=${tag%-*} + local arch=${tag##*-} + local dir="${IMPORTDIR}/clean/${name}/pkgs/${tag}" + + local found + local error=false + local files=() + + local pkgid pkgarch + for pkgid in $(list_added_pkgs "$name" "$tag"); do + found=false + + for pkgarch in "${arch}" any; do + file="${dir}/${pkgid}-${arch}".pkg.tar.?z + if ! $found && [[ -r $file ]]; then + files+=("$file") + found=true + fi + done + + if ! $found; then + error 'Could not find package file for %s' "$pkgid" + error=true + fi + done + + if $error; then + error 'Quitting...' + return 1 + fi + + mkdir -p -- "${IMPORTDIR}/staging/${tag}/${repo}" + cp -al -- "${files[@]}" "${IMPORTDIR}/staging/${tag}/${repo}/" + STAGING="${IMPORTDIR}/staging/${tag}" db-update + + # XXX: db-remove wants pkgbase, not pkgname + list_removed_pkgs "$name" "$tag" | xargs -d '\n' db-remove "$repo" "$arch" +} + +################################################################################ + +# Usage: list_arches repo-arch... +# Returns a list of the architectures mentioned in a list of "repo-arch" pairs. +list_arches() { + local tags=("$@") + printf '%s\n' "${tags[@]##*-}" | sort -u +} + +# Usage: list_repos arch repo-arch... +# Returns a list of all the repositories mentioned for a given architecture in a +# list of "repo-arch" pairs. +list_repos() { + local arch=$1 + local tags=("${@:2}") + printf '%s\n' "${tags[@]}" | sed -n "s/-$arch\$//p" +} + +# Usage: db_dir repo-arch +db_dir() { + local tag=$1 + local repo=${tag%-*} + local arch=${tag##*-} + echo "${repo}/os/${arch}" +} + +# Usage; db_file repo-arch +db_file() { + local tag=$1 + local repo=${tag%-*} + local arch=${tag##*-} + echo "${repo}/os/${arch}/${repo}${DBEXT}" +} + +# Usage: list_pkgs dbfile +# Prints "$pkgname-$(get_full_version "$pkgname")" for every package in $dbfile +list_pkgs() { + local dbfile=$1 + bsdtar tf "$dbfile" | cut -d/ -f1 +} + +# Usage: list_pkgs | sep_ver +# Separates the pkgname from the version (replaces the '-' with ' ') for the +# list provided on stdin. +sep_ver() { + sed -r 's/-([^-]*-[^-]*)$/ \1/' +} + +# Usage: list_removed_pkgs importsrc repo-arch +# Prints "$pkgname-$(get_full_version "$pkgname")" for every removed package. +list_removed_pkgs() { + local name=$1 + local tag=$2 + + local old="${FTP_BASE}/$(db_file "$tag")" + local new="${IMPORTDIR}/clean/$name/dbs/$(db_file "$tag")" + + # make a list of: + # pkgname oldver[ newver] + # It will include removed or updated packages (changed packages) + join -a1 \ + <(list_pkgs "$old"|sep_ver|sort) \ + <(list_pkgs "$new"|sep_ver|sort) + | grep -v ' .* ' # remove updated packages + | sed 's/ /-/' # re-combine the pkgname and version +} + +# Usage: list_added_pkgs importsrc repo-arch +# slightly a misnomer; added and updated +# Prints "$pkgname-$(get_full_version "$pkgname")" for every added or updated +# package. +list_added_pkgs() { + local name=$1 + local tag=$2 + + local old="${FTP_BASE}/$(db_file "$tag")" + local new="${IMPORTDIR}/clean/$name/dbs/$(db_file "$tag")" + + comm -13 <(list_pkgs "$old") <(list_pkgs "$new") +} + +main "$@" diff --git a/extra/lukeshu-xbs/db-import.conf b/extra/lukeshu-xbs/db-import.conf new file mode 100644 index 0000000..aaf7b0f --- /dev/null +++ b/extra/lukeshu-xbs/db-import.conf @@ -0,0 +1,23 @@ +#!/hint/bash + +IMPORTDIR=/srv/repo/import + +case "$USER" in + db-import-packages) + _archrepos=( + {core,extra,testing,staging}-{i686,x86_64} + {gnome,kde}-unstable-{i686,x86_64} + );; + db-import-community) + _archrepos=( + community{,-testing,-staging}-{i686,x86_64} + multilib{,-testing,-staging}-x86_64 + );; +esac + +_archpkgmirror=$(db-pick-mirror rsync https://www.archlinux.org/mirrors/status/tier/1/json/) + +# name pkgmirror absmirror repo-arch... +IMPORTS=("archlinux ${_archpkgmirror} rsync.archlinux.org ${_archrepos[*]}") + +unset _archrepos _archpkgmirror diff --git a/extra/lukeshu-xbs/db-init b/extra/lukeshu-xbs/db-init new file mode 100755 index 0000000..e25dbff --- /dev/null +++ b/extra/lukeshu-xbs/db-init @@ -0,0 +1,6 @@ +#!/bin/bash +# Creates the repo structure defined in config + +source "$(dirname "$(readlink -e "$0")")/config" + +mkdir -p -- "${FTP_BASE}"/{"${PKGPOOL}","${SRCPOOL}"} "${CLEANUP_DESTDIR}" "${SOURCE_CLEANUP_DESTDIR}" "${STAGING}" diff --git a/extra/lukeshu-xbs/db-list-nonfree.py b/extra/lukeshu-xbs/db-list-nonfree.py new file mode 100755 index 0000000..a486fa5 --- /dev/null +++ b/extra/lukeshu-xbs/db-list-nonfree.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python2 +#-*- encoding: utf-8 -*- +from filter import * +import argparse + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + prog="nonfree_in_db", + description="Cleans nonfree files on repo",) + + parser.add_argument("-k", "--blacklist-file", type=str, + help="File containing blacklisted names", + required=True,) + + parser.add_argument("-b", "--database", type=str, + help="dabatase to clean", + required=True,) + + args=parser.parse_args() + + if not (args.blacklist_file and args.database): + parser.print_help() + exit(1) + + blacklist=listado(args.blacklist_file) + pkgs=get_pkginfo_from_db(args.database) + + print(" ".join([pkg["name"] for pkg in pkgs if pkg["name"] in blacklist])) diff --git a/extra/lukeshu-xbs/db-list-unsigned-packages b/extra/lukeshu-xbs/db-list-unsigned-packages new file mode 100755 index 0000000..095e1e6 --- /dev/null +++ b/extra/lukeshu-xbs/db-list-unsigned-packages @@ -0,0 +1,38 @@ +#!/bin/bash +# Copyright (C) 2012 Michał Masłowski +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +set -e + +# Output a list of repo/package-name-and-version pairs representing +# unsigned packages available for architecture $1 and specified for +# architecture $2 (usually $1 or any, default is to list all). + +. "$(dirname "$(readlink -e "$0")")/config" +. "$(dirname "$(readlink -e "$0")")/db-functions" + +if [ $# -lt 1 ]; then + msg "usage: %s " "${0##*/}" + exit 1 +fi + +arch=$1 +shift + +for repo in "${PKGREPOS[@]}" +do + db="${FTP_BASE}/${repo}/os/${arch}/${repo}.db" + [ -f "$db" ] && "$(dirname "$(readlink -e "$0")")/db-list-unsigned-packages.py" "$repo" "$@" < "$db" +done diff --git a/extra/lukeshu-xbs/db-list-unsigned-packages.py b/extra/lukeshu-xbs/db-list-unsigned-packages.py new file mode 100755 index 0000000..80cff51 --- /dev/null +++ b/extra/lukeshu-xbs/db-list-unsigned-packages.py @@ -0,0 +1,96 @@ +#!/usr/bin/env python3 +# Copyright (C) 2012 Michał Masłowski +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + + +""" +Output a list of repo/package-name-and-version pairs representing +unsigned packages in the database at standard input of repo named in +the first argument and specified for architectures listed in the +following arguments (usually the one of the database or any, default +is to list all). + +If the --keyset argument is passed, print the key fingerprint of every +signed package. +""" + + +import base64 +import subprocess +import sys +import tarfile + + +def main(): + """Do the job.""" + check_keys = False + if "--keyset" in sys.argv: + sys.argv.remove("--keyset") + check_keys = True + repo = sys.argv[1] + pkgarches = frozenset(name.encode("utf-8") for name in sys.argv[2:]) + packages = [] + keys = [] + with tarfile.open(fileobj=sys.stdin.buffer) as archive: + for entry in archive: + if entry.name.endswith("/desc"): + content = archive.extractfile(entry) + skip = False + is_arch = False + key = None + for line in content: + if is_arch: + is_arch = False + if pkgarches and line.strip() not in pkgarches: + skip = True # different architecture + break + if line == b"%PGPSIG%\n": + skip = True # signed + key = b"" + if check_keys: + continue + else: + break + if line == b"%ARCH%\n": + is_arch = True + continue + if key is not None: + if line.strip(): + key += line.strip() + else: + break + if check_keys and key: + key_binary = base64.b64decode(key) + keys.append(key_binary) + packages.append(repo + "/" + entry.name[:-5]) + if skip: + continue + print(repo + "/" + entry.name[:-5]) + if check_keys and keys: + # We have collected all signed package names in packages and + # all keys in keys. Let's now ask gpg to list all signatures + # and find which keys made them. + packets = subprocess.check_output(("gpg", "--list-packets"), + input=b"".join(keys)) + i = 0 + for line in packets.decode("latin1").split("\n"): + if line.startswith(":signature packet:"): + keyid = line[line.index("keyid ") + len("keyid "):] + print(packages[i], keyid) + i += 1 + + +if __name__ == "__main__": + main() diff --git a/extra/lukeshu-xbs/db-move b/extra/lukeshu-xbs/db-move new file mode 100755 index 0000000..89a0ad6 --- /dev/null +++ b/extra/lukeshu-xbs/db-move @@ -0,0 +1,105 @@ +#!/bin/bash + +. "$(dirname "$(readlink -e "$0")")/config" +. "$(dirname "$(readlink -e "$0")")/db-functions" + +if [ $# -lt 3 ]; then + msg "usage: %s ..." "${0##*/}" + exit 1 +fi + +args=("${@}") +repo_from="${args[0]}" +repo_to="${args[1]}" +ftppath_from="${FTP_BASE}/${repo_from}/os/" +ftppath_to="${FTP_BASE}/${repo_to}/os/" + +if ! check_repo_permission "$repo_to" || ! check_repo_permission "$repo_from"; then + die "You don't have permission to move packages from %s to %s" "${repo_from}" "${repo_to}" +fi + +# TODO: this might lock too much (architectures) +for pkgarch in "${ARCHES[@]}"; do + repo_lock "${repo_to}" "${pkgarch}" || exit 1 + repo_lock "${repo_from}" "${pkgarch}" || exit 1 +done + +# First loop is to check that all necessary files exist +for pkgbase in "${args[@]:2}"; do + for pkgarch in "${ARCHES[@]}" 'any'; do + xbsrepo_from="$(xbs releasepath "${pkgbase}" "${repo_from}" "${pkgarch}")" + if [ -r "${xbsrepo_from}/PKGBUILD" ]; then + pkgnames=($(. "${xbsrepo_from}/PKGBUILD"; echo "${pkgname[@]}")) + if [ ${#pkgnames[@]} -lt 1 ]; then + die "Could not read pkgname" + fi + + if [ "${pkgarch}" == 'any' ]; then + tarches=("${ARCHES[@]}") + else + tarches=("${pkgarch}") + fi + + for pkgname in "${pkgnames[@]}"; do + pkgver=$(. "${xbsrepo_from}/PKGBUILD"; get_full_version "${pkgname}") + if [ -z "${pkgver}" ]; then + die "Could not read pkgver" + fi + for tarch in "${tarches[@]}"; do + getpkgfile "${ftppath_from}/${tarch}/${pkgname}-${pkgver}-${pkgarch}"${PKGEXT} >/dev/null + done + done + continue 2 + fi + done + die "%s not found in %s" "${pkgbase}" "${repo_from}" +done + +msg "Moving packages from [%s] to [%s]..." "${repo_from}" "${repo_to}" + +declare -A add_pkgs +declare -A remove_pkgs +for pkgbase in "${args[@]:2}"; do + # move the package in xbs + arches=($(xbs move "${repo_from}" "${repo_to}" "${pkgbase}")) + # move the package in ftp + for pkgarch in "${arches[@]}"; do + xbsrepo_to="$(xbs releasepath "$pkgbase" "$repo_to" "$pkgarch")" + if true; then # to add an indent level to make merging easier + if [ "${pkgarch}" == 'any' ]; then + tarches=("${ARCHES[@]}") + else + tarches=("${pkgarch}") + fi + msg2 "%s (%s)" "${pkgbase}" "${tarches[*]}" + pkgnames=($(. "${xbsrepo_to}/PKGBUILD"; echo "${pkgname[@]}")) + + for pkgname in "${pkgnames[@]}"; do + pkgver=$(. "${xbsrepo_to}/PKGBUILD"; get_full_version "${pkgname}") + for tarch in "${tarches[@]}"; do + pkgpath=$(getpkgfile "${ftppath_from}/${tarch}/${pkgname}-${pkgver}-${pkgarch}"${PKGEXT}) + pkgfile="${pkgpath##*/}" + + ln -s "../../../${PKGPOOL}/${pkgfile}" "${ftppath_to}/${tarch}/" + if [ -f "${FTP_BASE}/${PKGPOOL}/${pkgfile}.sig" ]; then + ln -s "../../../${PKGPOOL}/${pkgfile}.sig" "${ftppath_to}/${tarch}/" + fi + add_pkgs[${tarch}]+="${FTP_BASE}/${PKGPOOL}/${pkgfile} " + remove_pkgs[${tarch}]+="${pkgname} " + done + done + fi + done +done + +for tarch in "${ARCHES[@]}"; do + if [ -n "${add_pkgs[${tarch}]}" ]; then + arch_repo_add "${repo_to}" "${tarch}" ${add_pkgs[${tarch}]} + arch_repo_remove "${repo_from}" "${tarch}" ${remove_pkgs[${tarch}]} + fi +done + +for pkgarch in "${ARCHES[@]}"; do + repo_unlock "${repo_from}" "${pkgarch}" + repo_unlock "${repo_to}" "${pkgarch}" +done diff --git a/extra/lukeshu-xbs/db-pick-mirror b/extra/lukeshu-xbs/db-pick-mirror new file mode 100755 index 0000000..9474ed7 --- /dev/null +++ b/extra/lukeshu-xbs/db-pick-mirror @@ -0,0 +1,24 @@ +#!/usr/bin/env ruby + +require 'json' +require 'rest_client' + +protocol = ARGV[0] +jsonurl = ARGV[1] + +data = JSON::parse(RestClient.get(jsonurl)) + +if data["version"] != 3 + print "Data format version != 3" + exit 1 +end + +urls = data["urls"] +rsync_urls = urls.select{|a| a["protocol"]==protocol} + +# By score ( (delay+speed)/completion ) +#best = rsync_urls.sort{|a,b| (a["score"] || Float::INFINITY) <=> (b["score"] || Float::INFINITY) }.first +# By delay/completion +best = rsync_urls.sort{|a,b| a["delay"]/a["completion_pct"] <=> b["delay"]/b["completion_pct"] }.first + +puts best["url"] diff --git a/extra/lukeshu-xbs/db-remove b/extra/lukeshu-xbs/db-remove new file mode 100755 index 0000000..dcbe4b4 --- /dev/null +++ b/extra/lukeshu-xbs/db-remove @@ -0,0 +1,49 @@ +#!/bin/bash + +. "$(dirname "$(readlink -e "$0")")/config" +. "$(dirname "$(readlink -e "$0")")/db-functions" + +if [ $# -lt 3 ]; then + msg "usage: %s ..." "${0##*/}" + exit 1 +fi + +repo="$1" +arch="$2" +pkgbases=("${@:3}") + +if ! check_repo_permission "$repo"; then + die "You don't have permission to remove packages from %s" "${repo}" +fi + +if [ "$arch" == "any" ]; then + tarches=("${ARCHES[@]}") +else + tarches=("$arch") +fi + +for tarch in "${tarches[@]}"; do + repo_lock "$repo" "$tarch" || exit 1 +done + +remove_pkgs=() +for pkgbase in "${pkgbases[@]}"; do + msg "Removing %s from [%s]..." "$pkgbase" "$repo" + + path="$(xbs releasepath "$pkgbase" "$repo" "$arch")" + if [ -d "$path" ]; then + remove_pkgs+=($(. "$path/PKGBUILD"; echo "${pkgname[@]}")) + xbs unrelease "$pkgbase" "$repo" "$arch" + else + warning "%s not found in %s for %s" \ + "$pkgbase" "$(xbs name)" "$repo-$arch" + warning "Removing only %s from the repo" "$pkgbase" + warning "If it was a split package you have to remove the others yourself!" + remove_pkgs+=("$pkgbase") + fi +done + +for tarch in "${tarches[@]}"; do + arch_repo_remove "${repo}" "${tarch}" "${remove_pkgs[@]}" + repo_unlock "$repo" "$tarch" +done diff --git a/extra/lukeshu-xbs/db-repo-add b/extra/lukeshu-xbs/db-repo-add new file mode 100755 index 0000000..4611bdf --- /dev/null +++ b/extra/lukeshu-xbs/db-repo-add @@ -0,0 +1,41 @@ +#!/bin/bash + +. "$(dirname "$(readlink -e "$0")")/config" +. "$(dirname "$(readlink -e "$0")")/db-functions" + +if [ $# -lt 3 ]; then + msg "usage: %s ..." "${0##*/}" + exit 1 +fi + +repo="$1" +arch="$2" +pkgfiles=("${@:3}") + +ftppath="$FTP_BASE/$repo/os" + +if ! check_repo_permission "$repo"; then + die "You don't have permission to add packages to %s" "${repo}" +fi + +if [ "$arch" == "any" ]; then + tarches=("${ARCHES[@]}") +else + tarches=("$arch") +fi + +for tarch in "${tarches[@]}"; do + repo_lock "$repo" "$tarch" || exit 1 +done + +for tarch in "${tarches[@]}"; do + for pkgfile in "${pkgfiles[@]}"; do + if [[ ! -f "${FTP_BASE}/${repo}/os/${arch}/${pkgfile##*/}" ]]; then + die "Package file %s not found in %s" "${pkgfile##*/}" "${FTP_BASE}/${repo}/os/${arch}/" + else + msg "Adding %s to [%s]..." "$pkgfile" "$repo" + fi + done + arch_repo_add "${repo}" "${tarch}" "${pkgfiles[@]}" + repo_unlock "$repo" "$tarch" +done diff --git a/extra/lukeshu-xbs/db-repo-remove b/extra/lukeshu-xbs/db-repo-remove new file mode 100755 index 0000000..aadc4ce --- /dev/null +++ b/extra/lukeshu-xbs/db-repo-remove @@ -0,0 +1,37 @@ +#!/bin/bash + +. "$(dirname "$(readlink -e "$0")")/config" +. "$(dirname "$(readlink -e "$0")")/db-functions" + +if [ $# -lt 3 ]; then + msg "usage: %s ..." "${0##*/}" + exit 1 +fi + +repo="$1" +arch="$2" +pkgnames=("${@:3}") + +ftppath="$FTP_BASE/$repo/os" + +if ! check_repo_permission "$repo"; then + die "You don't have permission to remove packages from %s" "${repo}" +fi + +if [ "$arch" == "any" ]; then + tarches=("${ARCHES[@]}") +else + tarches=("$arch") +fi + +for tarch in "${tarches[@]}"; do + repo_lock "$repo" "$tarch" || exit 1 +done + +for tarch in "${tarches[@]}"; do + for pkgname in "${pkgnames[@]}"; do + msg "Removing %s from [%s]..." "$pkgname" "$repo" + done + arch_repo_remove "${repo}" "${tarch}" "${pkgnames[@]}" + repo_unlock "$repo" "$tarch" +done diff --git a/extra/lukeshu-xbs/db-update b/extra/lukeshu-xbs/db-update new file mode 100755 index 0000000..559ef3f --- /dev/null +++ b/extra/lukeshu-xbs/db-update @@ -0,0 +1,126 @@ +#!/bin/bash + +. "$(dirname "$(readlink -e "$0")")/config" +. "$(dirname "$(readlink -e "$0")")/db-functions" + +if [ $# -ge 1 ]; then + warning "Calling %s with a specific repository is no longer supported" "${0##*/}" + exit 1 +fi + +# Find repos with packages to release +staging_repos=($(find "${STAGING}" -mindepth 1 -type f -name "*${PKGEXT}" -printf '%h\n' | sort -u)) +if [ $? -ge 1 ]; then + die "Could not read %s" "${STAGING}" +fi + +repos=() +for staging_repo in "${staging_repos[@]##*/}"; do + if in_array "${staging_repo}" "${PKGREPOS[@]}"; then + repos+=("${staging_repo}") + fi +done + +# TODO: this might lock too much (architectures) +for repo in "${repos[@]}"; do + for pkgarch in "${ARCHES[@]}"; do + repo_lock "${repo}" "${pkgarch}" || exit 1 + done +done + +# check if packages are valid +for repo in "${repos[@]}"; do + if ! check_repo_permission "${repo}"; then + die "You don't have permission to update packages in %s" "${repo}" + fi + pkgs=($(getpkgfiles "${STAGING}/${repo}/"*${PKGEXT})) + if [ $? -eq 0 ]; then + for pkg in "${pkgs[@]}"; do + if [ -h "${pkg}" ]; then + die "Package %s is a symbolic link" "${repo}/${pkg##*/}" + fi + if ! check_pkgfile "${pkg}"; then + die "Package %s is not consistent with its meta data" "${repo}/${pkg##*/}" + fi + if "${REQUIRE_SIGNATURE}" && ! pacman-key -v "${pkg}.sig" >/dev/null 2>&1; then + die "Package %s does not have a valid signature" "${repo}/${pkg##*/}" + fi + if ! check_pkgxbs "${pkg}" "${repo}"; then + die "Package %s is not consistent with %s" "${repo}/${pkg##*/}" "$(xbs name)" + fi + if ! check_pkgrepos "${pkg}"; then + die "Package %s already exists in another repository" "${repo}/${pkg##*/}" + fi + if ! check_packager "${pkg}"; then + die "Package ${repo}/${pkg##*/} does not have a valid packager" + fi + done + if ! check_splitpkgs "${repo}" "${pkgs[@]}"; then + die "Missing split packages for %s" "${repo}" + fi + else + die "Could not read %s" "${STAGING}" + fi +done + +dirs=() +for repo in "${repos[@]}"; do + msg "Updating [%s]..." "${repo}" + any_pkgs=($(getpkgfiles "${STAGING}/${repo}/"*-any${PKGEXT} 2>/dev/null)) + for pkgarch in "${ARCHES[@]}"; do + add_dirs=() + add_pkgs=() + arch_pkgs=($(getpkgfiles "${STAGING}/${repo}/"*-"${pkgarch}"${PKGEXT} 2>/dev/null)) + for pkg in "${arch_pkgs[@]}" "${any_pkgs[@]}"; do + pkgfile="${pkg##*/}" + msg2 "%s (%s)" "${pkgfile}" "${pkgarch}" + # any packages might have been moved by the previous run + if [ -f "${pkg}" ]; then + mv "${pkg}" "$FTP_BASE/${PKGPOOL}" + fi + ln -s "../../../${PKGPOOL}/${pkgfile}" "$FTP_BASE/$repo/os/${pkgarch}" + # also move signatures + if [ -f "${pkg}.sig" ]; then + mv "${pkg}.sig" "$FTP_BASE/${PKGPOOL}" + fi + if [ -f "$FTP_BASE/${PKGPOOL}/${pkgfile}.sig" ]; then + ln -s "../../../${PKGPOOL}/${pkgfile}.sig" "$FTP_BASE/$repo/os/${pkgarch}" + fi + add_dirs+=("${STAGING}/abslibre/$(getpkgarch "$FTP_BASE/$PKGPOOL/$pkgfile")/$repo/$(getpkgbase "$FTP_BASE/$PKGPOOL/$pkgfile")") + add_pkgs+=("${pkgfile}") + done + for add_dir in "${add_dirs[@]}"; do + (cd "${add_dir}" && xbs release-server "${repo}" "${pkgarch}") || + error 'cd %q && xbs release-server %q %q' "${add_dir}" "${repo}" "${pkgarch}" + done + if [ ${#add_pkgs[@]} -ge 1 ]; then + arch_repo_add "${repo}" "${pkgarch}" "${add_pkgs[@]}" + fi + dirs+=("${add_dirs[@]}") + done +done + +for repo in "${repos[@]}"; do + for pkgarch in "${ARCHES[@]}"; do + repo_unlock "${repo}" "${pkgarch}" + done +done + +cd "${STAGING}" + +# Remove left over XBS files +rm -rf -- "${dirs[@]}" +dirname -z -- "${dirs[@]}" | + xargs -0 realpath -zm --relative-to="${STAGING}" -- | + xargs -0 rmdir -p -- 2>/dev/null + +# Stage generated source files +while read -r file; do + pub="${FTP_BASE}/${file}" + if [[ -f "$pub" ]]; then + warning "file already exists: %s" "${file}" + else + mkdir -p -- "${pub%/*}" + mv -vn "$file" "$pub" + fi +done < <(find other sources -type f 2>/dev/null) diff --git a/extra/lukeshu-xbs/make_individual_torrent b/extra/lukeshu-xbs/make_individual_torrent new file mode 100755 index 0000000..0a7e778 --- /dev/null +++ b/extra/lukeshu-xbs/make_individual_torrent @@ -0,0 +1,52 @@ +#!/bin/bash +# Copyright (C) 2014 Joseph Graham +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +# This script is called by `make_repo_torrents' to make a torrent. It +# depends on `mktorrent'. It takes the following args: +# $1 - path of package +# $2 - public location + +# Comma seperated list of trackers, no spaces +# t67.eu is run by Xylon, hackcoop by fauno & friends +trackers='http://t67.eu:6969/announce,http://tracker.hackcoop.com.ar/announce' + +# This mirror is put as a webseed. Which mirror we use for a webseed +# doesn't really matter since it's re-written on the client machine by +# pacman2pacman so it won't normally be used anyway. +seed_url='http://repo.parabolagnulinux.org/' + +if [[ -z "${1}" ]] +then + echo "Error. First arg must be the path of the package." + echo 1 +fi + +if [[ -z "${2}" ]] +then + echo "Error. Second arg must be the public location." + echo 1 +fi + +pkg="${1}" +public_location="${2}" + +pkg_name="${pkg##*/}" + +# URL of the actual package for the webseed +webseed="${seed_url}${pkg#${public_location}}" + +mktorrent -a "${trackers}" "${pkg}" -w "${webseed}" >/dev/null || +echo "Error making torrent for \"${pkg}\"" diff --git a/extra/lukeshu-xbs/test/__init__.py b/extra/lukeshu-xbs/test/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/extra/lukeshu-xbs/test/blacklist_sample b/extra/lukeshu-xbs/test/blacklist_sample new file mode 100644 index 0000000..2a02af6 --- /dev/null +++ b/extra/lukeshu-xbs/test/blacklist_sample @@ -0,0 +1,2 @@ +alex:alex-libre: Aquí va un comentario +gmime22 ::Non free dependencies \ No newline at end of file diff --git a/extra/lukeshu-xbs/test/core.db.tar.gz b/extra/lukeshu-xbs/test/core.db.tar.gz new file mode 100644 index 0000000..5eb2081 Binary files /dev/null and b/extra/lukeshu-xbs/test/core.db.tar.gz differ diff --git a/extra/lukeshu-xbs/test/depends b/extra/lukeshu-xbs/test/depends new file mode 100644 index 0000000..7ff3ad4 --- /dev/null +++ b/extra/lukeshu-xbs/test/depends @@ -0,0 +1,4 @@ +%DEPENDS% +glibc>=2.13 +zlib + diff --git a/extra/lukeshu-xbs/test/desc b/extra/lukeshu-xbs/test/desc new file mode 100644 index 0000000..abba644 --- /dev/null +++ b/extra/lukeshu-xbs/test/desc @@ -0,0 +1,39 @@ +%FILENAME% +binutils-2.21-4-x86_64.pkg.tar.xz + +%NAME% +binutils + +%VERSION% +2.21-4 + +%DESC% +A set of programs to assemble and manipulate binary and object files + +%GROUPS% +base + +%CSIZE% +3412892 + +%ISIZE% +17571840 + +%MD5SUM% +4e666f87c78998f4839f33dc06d2043a + +%URL% +http://www.gnu.org/software/binutils/ + +%LICENSE% +GPL + +%ARCH% +x86_64 + +%BUILDDATE% +1297240369 + +%PACKAGER% +Allan McRae + diff --git a/extra/lukeshu-xbs/test/lib/common.inc b/extra/lukeshu-xbs/test/lib/common.inc new file mode 100644 index 0000000..b9a2f9d --- /dev/null +++ b/extra/lukeshu-xbs/test/lib/common.inc @@ -0,0 +1,321 @@ +set -E + +# override the default TMPDIR +init_tmpdir() { + [[ -n $MASTER_TMPDIR ]] || export MASTER_TMPDIR="$(mktemp -dqt ${0##*/}.XXXXXXXXXX)" + export TMPDIR=$MASTER_TMPDIR +} + +_TMPDIR=$TMPDIR +. "$(dirname ${BASH_SOURCE[0]})/../../config" +TMPDIR=$_TMPDIR +init_tmpdir + +. "$(dirname ${BASH_SOURCE[0]})/../../db-functions" + +arches() ( + . $(librelib conf.sh) + load_files libretools + printf '%s\n' "${ARCHES[*]}" +) + +arch_svn() { + /usr/bin/svn "$@" +} + +signpkg() { + if [[ -r '/etc/makepkg.conf' ]]; then + source '/etc/makepkg.conf' + else + die '/etc/makepkg.conf not found!' + fi + if [[ -r ~/.makepkg.conf ]]; then + . ~/.makepkg.conf + fi + if [[ -n $GPGKEY ]]; then + SIGNWITHKEY=(-u "${GPGKEY}") + fi + gpg --detach-sign --use-agent "${SIGNWITHKEY[@]}" "${@}" || die +} + +oneTimeSetUp() { + local p + local d + local a + local arches=($(arches)) + local pkgname + local pkgarch + local pkgversion + local build + pkgdir="$(mktemp -dt "${0##*/}.XXXXXXXXXX")" + cp -Lr "$(dirname "${BASH_SOURCE[0]}")"/../packages/* "${pkgdir}" + msg 'Building packages...' + for d in "${pkgdir}"/*; do + pushd $d >/dev/null + pkgname=($(. PKGBUILD; echo "${pkgname[@]}")) + pkgarch=($(. PKGBUILD; echo "${arch[@]}")) + pkgversion=$(. PKGBUILD; get_full_version) + + build=true + for a in "${pkgarch[@]}"; do + for p in "${pkgname[@]}"; do + [ ! -f "${p}-${pkgversion}-${a}"${PKGEXT} ] && build=false + done + done + + if ! "${build}"; then + if [ "${pkgarch[0]}" == 'any' ]; then + sudo libremakepkg || die 'libremakepkg failed' + else + for a in "${pkgarch[@]}"; do + if in_array "$a" "${arches[@]}"; then + sudo setarch "$a" libremakepkg -n "$a" || die "setarch ${a} libremakepkg -n ${a} failed" + for p in "${pkgname[@]}"; do + cp "${p}-${pkgversion}-${a}"${PKGEXT} "$(dirname "${BASH_SOURCE[0]})/../packages/${d##*/}")" + done + else + warning "skipping arch %s" "$a" + fi + done + fi + fi + popd >/dev/null + done +} + +oneTimeTearDown() { + rm -rf "${pkgdir}" +} + +setUp() { + local p + local pkg + local r + local a + + [ -f "$(dirname "${BASH_SOURCE[0]}")/../../config.local" ] && die "$(dirname "${BASH_SOURCE[0]}")/../../config.local exists" + init_tmpdir + TMP="$(mktemp -dt "${0##*/}.XXXXXXXXXX")" + #msg "Using ${TMP}" + + PKGREPOS=('core' 'extra' 'testing') + PKGPOOL='pool/packages' + SRCPOOL='pool/sources' + mkdir -p "${TMP}/"{ftp,tmp,staging,{package,source}-cleanup,svn-packages-{copy,repo}} + + for r in "${PKGREPOS[@]}"; do + mkdir -p "${TMP}/staging/${r}" + for a in "${ARCHES[@]}"; do + mkdir -p "${TMP}/ftp/${r}/os/${a}" + done + done + mkdir -p "${TMP}/ftp/${PKGPOOL}" + mkdir -p "${TMP}/ftp/${SRCPOOL}" + + msg 'Creating svn repository...' + svnadmin create "${TMP}/svn-packages-repo" + arch_svn checkout -q "file://${TMP}/svn-packages-repo" "${TMP}/svn-packages-copy" + + for p in "${pkgdir}"/*; do + pkg=${p##*/} + mkdir -p "${TMP}/svn-packages-copy/${pkg}"/{trunk,repos} + cp "${p}"/* "${TMP}/svn-packages-copy/${pkg}/trunk/" + arch_svn add -q "${TMP}/svn-packages-copy/${pkg}" + arch_svn commit -q -m"initial commit of ${pkg}" "${TMP}/svn-packages-copy" + done + + mkdir -p "${TMP}/home/.config/libretools" + export XDG_CONFIG_HOME="${TMP}/home/.config" + printf '%s\n' \ + 'SVNURL=foo' \ + "SVNREPO=\"${TMP}/svn-packages-copy\"" \ + "ARCHES=($(arches))" \ + > "$XDG_CONFIG_HOME/libretools/xbs-abs.conf" + printf '%s\n' 'BUILDSYSTEM=abs' > "$XDG_CONFIG_HOME/xbs.conf" + + cat < "$(dirname "${BASH_SOURCE[0]}")/../../config.local" + FTP_BASE="${TMP}/ftp" + SVNREPO="${TMP}/svn-packages-copy" + PKGREPOS=("${PKGREPOS[@]}") + PKGPOOL="${PKGPOOL}" + SRCPOOL="${SRCPOOL}" + TESTING_REPO='testing' + STABLE_REPOS=('core' 'extra') + CLEANUP_DESTDIR="${TMP}/package-cleanup" + SOURCE_CLEANUP_DESTDIR="${TMP}/source-cleanup" + STAGING="${TMP}/staging" + TMPDIR="${TMP}/tmp" + CLEANUP_DRYRUN=false + SOURCE_CLEANUP_DRYRUN=false + REQUIRE_SIGNATURE=true +eot + . "$(dirname "${BASH_SOURCE[0]}")/../../config" +} + +tearDown() { + rm -rf "${TMP}" + rm -f "$(dirname "${BASH_SOURCE[0]}")/../../config.local" + echo +} + +releasePackage() { + local repo=$1 + local pkgbase=$2 + local arch=$3 + local a + local p + local pkgver + local pkgname + + pushd "${TMP}/svn-packages-copy/${pkgbase}/trunk/" >/dev/null + xbs release "${repo}" "${arch}" >/dev/null 2>&1 + pkgver=$(. PKGBUILD; get_full_version) + pkgname=($(. PKGBUILD; echo "${pkgname[@]}")) + popd >/dev/null + cp "${pkgdir}/${pkgbase}"/*-"${pkgver}-${arch}"${PKGEXT} "${STAGING}/${repo}/" + + if "${REQUIRE_SIGNATURE}"; then + for a in "${arch[@]}"; do + for p in "${pkgname[@]}"; do + signpkg "${STAGING}/${repo}/${p}-${pkgver}-${a}"${PKGEXT} + done + done + fi +} + +checkAnyPackageDB() { + local repo=$1 + local pkg=$2 + local arch + local db + + [ -r "${FTP_BASE}/${PKGPOOL}/${pkg}" ] || fail "${PKGPOOL}/${pkg} not found" + if "${REQUIRE_SIGNATURE}"; then + [ -r "${FTP_BASE}/${PKGPOOL}/${pkg}.sig" ] || fail "${PKGPOOL}/${pkg}.sig not found" + fi + + for arch in $(arches); do + [ -L "${FTP_BASE}/${repo}/os/${arch}/${pkg}" ] || fail "${repo}/os/${arch}/${pkg} is not a symlink" + [ "$(readlink -e "${FTP_BASE}/${repo}/os/${arch}/${pkg}")" == "$(readlink -e "${FTP_BASE}/${PKGPOOL}/${pkg}")" ] \ + || fail "${repo}/os/${arch}/${pkg} does not link to ${PKGPOOL}/${pkg}" + + if ${REQUIRE_SIGNATURE}; then + [ -L "${FTP_BASE}/${repo}/os/${arch}/${pkg}.sig" ] || fail "${repo}/os/${arch}/${pkg}.sig is not a symlink" + [ "$(readlink -e "${FTP_BASE}/${repo}/os/${arch}/${pkg}.sig")" == "$(readlink -e "${FTP_BASE}/${PKGPOOL}/${pkg}.sig")" ] \ + || fail "${repo}/os/${arch}/${pkg}.sig does not link to ${PKGPOOL}/${pkg}.sig" + fi + + for db in ${DBEXT} ${FILESEXT}; do + ( [ -r "${FTP_BASE}/${repo}/os/${arch}/${repo}${db%.tar.*}" ] \ + && bsdtar -xf "${FTP_BASE}/${repo}/os/${arch}/${repo}${db%.tar.*}" -O | grep ${pkg} &>/dev/null) \ + || fail "${pkg} not in ${repo}/os/${arch}/${repo}${db%.tar.*}" + done + done + [ -r "${STAGING}"/${repo}/${pkg} ] && fail "${repo}/${pkg} found in staging dir" + [ -r "${STAGING}"/${repo}/${pkg}.sig ] && fail "${repo}/${pkg}.sig found in staging dir" +} + +checkAnyPackage() { + local repo=$1 + local pkg=$2 + + checkAnyPackageDB $repo $pkg + + local pkgbase=$(getpkgbase "${FTP_BASE}/${PKGPOOL}/${pkg}") + arch_svn up -q "${TMP}/svn-packages-copy/${pkgbase}" + [ -d "${TMP}/svn-packages-copy/${pkgbase}/repos/${repo}-any" ] \ + || fail "svn-packages-copy/${pkgbase}/repos/${repo}-any does not exist" +} + +checkPackageDB() { + local repo=$1 + local pkg=$2 + local arch=$3 + local db + + [ -r "${FTP_BASE}/${PKGPOOL}/${pkg}" ] || fail "${PKGPOOL}/${pkg} not found" + [ -L "${FTP_BASE}/${repo}/os/${arch}/${pkg}" ] || fail "${repo}/os/${arch}/${pkg} not a symlink" + [ -r "${STAGING}"/${repo}/${pkg} ] && fail "${repo}/${pkg} found in staging dir" + + [ "$(readlink -e "${FTP_BASE}/${repo}/os/${arch}/${pkg}")" == "$(readlink -e "${FTP_BASE}/${PKGPOOL}/${pkg}")" ] \ + || fail "${repo}/os/${arch}/${pkg} does not link to ${PKGPOOL}/${pkg}" + + if ${REQUIRE_SIGNATURE}; then + [ -r "${FTP_BASE}/${PKGPOOL}/${pkg}.sig" ] || fail "${PKGPOOL}/${pkg}.sig not found" + [ -L "${FTP_BASE}/${repo}/os/${arch}/${pkg}.sig" ] || fail "${repo}/os/${arch}/${pkg}.sig is not a symlink" + [ -r "${STAGING}"/${repo}/${pkg}.sig ] && fail "${repo}/${pkg}.sig found in staging dir" + + [ "$(readlink -e "${FTP_BASE}/${repo}/os/${arch}/${pkg}.sig")" == "$(readlink -e "${FTP_BASE}/${PKGPOOL}/${pkg}.sig")" ] \ + || fail "${repo}/os/${arch}/${pkg}.sig does not link to ${PKGPOOL}/${pkg}.sig" + fi + + for db in ${DBEXT} ${FILESEXT}; do + ( [ -r "${FTP_BASE}/${repo}/os/${arch}/${repo}${db%.tar.*}" ] \ + && bsdtar -xf "${FTP_BASE}/${repo}/os/${arch}/${repo}${db%.tar.*}" -O | grep ${pkg} &>/dev/null) \ + || fail "${pkg} not in ${repo}/os/${arch}/${repo}${db%.tar.*}" + done +} + +checkPackage() { + local repo=$1 + local pkg=$2 + local arch=$3 + + checkPackageDB $repo $pkg $arch + + local pkgbase=$(getpkgbase "${FTP_BASE}/${PKGPOOL}/${pkg}") + arch_svn up -q "${TMP}/svn-packages-copy/${pkgbase}" + [ -d "${TMP}/svn-packages-copy/${pkgbase}/repos/${repo}-${arch}" ] \ + || fail "svn-packages-copy/${pkgbase}/repos/${repo}-${arch} does not exist" +} + +checkRemovedPackageDB() { + local repo=$1 + local pkgbase=$2 + local arch=$3 + local db + + for db in ${DBEXT} ${FILESEXT}; do + ( [ -r "${FTP_BASE}/${repo}/os/${arch}/${repo}${db%.tar.*}" ] \ + && bsdtar -xf "${FTP_BASE}/${repo}/os/${arch}/${repo}${db%.tar.*}" -O | grep ${pkgbase} &>/dev/null) \ + && fail "${pkgbase} should not be in ${repo}/os/${arch}/${repo}${db%.tar.*}" + done +} + +checkRemovedPackage() { + local repo=$1 + local pkgbase=$2 + local arch=$3 + + checkRemovedPackageDB $repo $pkgbase $arch + + arch_svn up -q "${TMP}/svn-packages-copy/${pkgbase}" + [ -d "${TMP}/svn-packages-copy/${pkgbase}/repos/${repo}-${arch}" ] \ + && fail "svn-packages-copy/${pkgbase}/repos/${repo}-${arch} should not exist" +} + +checkRemovedAnyPackageDB() { + local repo=$1 + local pkgbase=$2 + local arch + local db + + for db in ${DBEXT} ${FILESEXT}; do + for arch in $(arches); do + ( [ -r "${FTP_BASE}/${repo}/os/${arch}/${repo}${db%.tar.*}" ] \ + && bsdtar -xf "${FTP_BASE}/${repo}/os/${arch}/${repo}${db%.tar.*}" -O | grep ${pkgbase} &>/dev/null) \ + && fail "${pkgbase} should not be in ${repo}/os/${arch}/${repo}${db%.tar.*}" + done + done +} + +checkRemovedAnyPackage() { + local repo=$1 + local pkgbase=$2 + + checkRemovedAnyPackageDB $repo $pkgbase + + arch_svn up -q "${TMP}/svn-packages-copy/${pkgbase}" + [ -d "${TMP}/svn-packages-copy/${pkgbase}/repos/${repo}-any" ] \ + && fail "svn-packages-copy/${pkgbase}/repos/${repo}-any should not exist" +} diff --git a/extra/lukeshu-xbs/test/lib/shunit2 b/extra/lukeshu-xbs/test/lib/shunit2 new file mode 100755 index 0000000..8862ffd --- /dev/null +++ b/extra/lukeshu-xbs/test/lib/shunit2 @@ -0,0 +1,1048 @@ +#! /bin/sh +# $Id: shunit2 335 2011-05-01 20:10:33Z kate.ward@forestent.com $ +# vim:et:ft=sh:sts=2:sw=2 +# +# Copyright 2008 Kate Ward. All Rights Reserved. +# Released under the LGPL (GNU Lesser General Public License) +# +# shUnit2 -- Unit testing framework for Unix shell scripts. +# http://code.google.com/p/shunit2/ +# +# Author: kate.ward@forestent.com (Kate Ward) +# +# shUnit2 is a xUnit based unit test framework for Bourne shell scripts. It is +# based on the popular JUnit unit testing framework for Java. + +# return if shunit already loaded +[ -n "${SHUNIT_VERSION:-}" ] && exit 0 + +SHUNIT_VERSION='2.1.6' + +SHUNIT_TRUE=0 +SHUNIT_FALSE=1 +SHUNIT_ERROR=2 + +# enable strict mode by default +SHUNIT_STRICT=${SHUNIT_STRICT:-${SHUNIT_TRUE}} + +_shunit_warn() { echo "shunit2:WARN $@" >&2; } +_shunit_error() { echo "shunit2:ERROR $@" >&2; } +_shunit_fatal() { echo "shunit2:FATAL $@" >&2; exit ${SHUNIT_ERROR}; } + +# specific shell checks +if [ -n "${ZSH_VERSION:-}" ]; then + setopt |grep "^shwordsplit$" >/dev/null + if [ $? -ne ${SHUNIT_TRUE} ]; then + _shunit_fatal 'zsh shwordsplit option is required for proper operation' + fi + if [ -z "${SHUNIT_PARENT:-}" ]; then + _shunit_fatal "zsh does not pass \$0 through properly. please declare \ +\"SHUNIT_PARENT=\$0\" before calling shUnit2" + fi +fi + +# +# constants +# + +__SHUNIT_ASSERT_MSG_PREFIX='ASSERT:' +__SHUNIT_MODE_SOURCED='sourced' +__SHUNIT_MODE_STANDALONE='standalone' +__SHUNIT_PARENT=${SHUNIT_PARENT:-$0} + +# set the constants readonly +shunit_constants_=`set |grep '^__SHUNIT_' |cut -d= -f1` +echo "${shunit_constants_}" |grep '^Binary file' >/dev/null && \ + shunit_constants_=`set |grep -a '^__SHUNIT_' |cut -d= -f1` +for shunit_constant_ in ${shunit_constants_}; do + shunit_ro_opts_='' + case ${ZSH_VERSION:-} in + '') ;; # this isn't zsh + [123].*) ;; # early versions (1.x, 2.x, 3.x) + *) shunit_ro_opts_='-g' ;; # all later versions. declare readonly globally + esac + readonly ${shunit_ro_opts_} ${shunit_constant_} +done +unset shunit_constant_ shunit_constants_ shunit_ro_opts_ + +# variables +__shunit_lineno='' # line number of executed test +__shunit_mode=${__SHUNIT_MODE_SOURCED} # operating mode +__shunit_reportGenerated=${SHUNIT_FALSE} # is report generated +__shunit_script='' # filename of unittest script (standalone mode) +__shunit_skip=${SHUNIT_FALSE} # is skipping enabled +__shunit_suite='' # suite of tests to execute + +# counts of tests +__shunit_testSuccess=${SHUNIT_TRUE} +__shunit_testsTotal=0 +__shunit_testsPassed=0 +__shunit_testsFailed=0 + +# counts of asserts +__shunit_assertsTotal=0 +__shunit_assertsPassed=0 +__shunit_assertsFailed=0 +__shunit_assertsSkipped=0 + +# macros +_SHUNIT_LINENO_='eval __shunit_lineno=""; if [ "${1:-}" = "--lineno" ]; then [ -n "$2" ] && __shunit_lineno="[$2] "; shift 2; fi' + +#----------------------------------------------------------------------------- +# assert functions +# + +# Assert that two values are equal to one another. +# +# Args: +# message: string: failure message [optional] +# expected: string: expected value +# actual: string: actual value +# Returns: +# integer: success (TRUE/FALSE/ERROR constant) +assertEquals() +{ + ${_SHUNIT_LINENO_} + if [ $# -lt 2 -o $# -gt 3 ]; then + _shunit_error "assertEquals() requires two or three arguments; $# given" + _shunit_error "1: ${1:+$1} 2: ${2:+$2} 3: ${3:+$3}${4:+ 4: $4}" + return ${SHUNIT_ERROR} + fi + _shunit_shouldSkip && return ${SHUNIT_TRUE} + + shunit_message_=${__shunit_lineno} + if [ $# -eq 3 ]; then + shunit_message_="${shunit_message_}$1" + shift + fi + shunit_expected_=$1 + shunit_actual_=$2 + + shunit_return=${SHUNIT_TRUE} + if [ "${shunit_expected_}" = "${shunit_actual_}" ]; then + _shunit_assertPass + else + failNotEquals "${shunit_message_}" "${shunit_expected_}" "${shunit_actual_}" + shunit_return=${SHUNIT_FALSE} + fi + + unset shunit_message_ shunit_expected_ shunit_actual_ + return ${shunit_return} +} +_ASSERT_EQUALS_='eval assertEquals --lineno "${LINENO:-}"' + +# Assert that two values are not equal to one another. +# +# Args: +# message: string: failure message [optional] +# expected: string: expected value +# actual: string: actual value +# Returns: +# integer: success (TRUE/FALSE/ERROR constant) +assertNotEquals() +{ + ${_SHUNIT_LINENO_} + if [ $# -lt 2 -o $# -gt 3 ]; then + _shunit_error "assertNotEquals() requires two or three arguments; $# given" + return ${SHUNIT_ERROR} + fi + _shunit_shouldSkip && return ${SHUNIT_TRUE} + + shunit_message_=${__shunit_lineno} + if [ $# -eq 3 ]; then + shunit_message_="${shunit_message_}$1" + shift + fi + shunit_expected_=$1 + shunit_actual_=$2 + + shunit_return=${SHUNIT_TRUE} + if [ "${shunit_expected_}" != "${shunit_actual_}" ]; then + _shunit_assertPass + else + failSame "${shunit_message_}" "$@" + shunit_return=${SHUNIT_FALSE} + fi + + unset shunit_message_ shunit_expected_ shunit_actual_ + return ${shunit_return} +} +_ASSERT_NOT_EQUALS_='eval assertNotEquals --lineno "${LINENO:-}"' + +# Assert that a value is null (i.e. an empty string) +# +# Args: +# message: string: failure message [optional] +# actual: string: actual value +# Returns: +# integer: success (TRUE/FALSE/ERROR constant) +assertNull() +{ + ${_SHUNIT_LINENO_} + if [ $# -lt 1 -o $# -gt 2 ]; then + _shunit_error "assertNull() requires one or two arguments; $# given" + return ${SHUNIT_ERROR} + fi + _shunit_shouldSkip && return ${SHUNIT_TRUE} + + shunit_message_=${__shunit_lineno} + if [ $# -eq 2 ]; then + shunit_message_="${shunit_message_}$1" + shift + fi + assertTrue "${shunit_message_}" "[ -z '$1' ]" + shunit_return=$? + + unset shunit_message_ + return ${shunit_return} +} +_ASSERT_NULL_='eval assertNull --lineno "${LINENO:-}"' + +# Assert that a value is not null (i.e. a non-empty string) +# +# Args: +# message: string: failure message [optional] +# actual: string: actual value +# Returns: +# integer: success (TRUE/FALSE/ERROR constant) +assertNotNull() +{ + ${_SHUNIT_LINENO_} + if [ $# -gt 2 ]; then # allowing 0 arguments as $1 might actually be null + _shunit_error "assertNotNull() requires one or two arguments; $# given" + return ${SHUNIT_ERROR} + fi + _shunit_shouldSkip && return ${SHUNIT_TRUE} + + shunit_message_=${__shunit_lineno} + if [ $# -eq 2 ]; then + shunit_message_="${shunit_message_}$1" + shift + fi + shunit_actual_=`_shunit_escapeCharactersInString "${1:-}"` + test -n "${shunit_actual_}" + assertTrue "${shunit_message_}" $? + shunit_return=$? + + unset shunit_actual_ shunit_message_ + return ${shunit_return} +} +_ASSERT_NOT_NULL_='eval assertNotNull --lineno "${LINENO:-}"' + +# Assert that two values are the same (i.e. equal to one another). +# +# Args: +# message: string: failure message [optional] +# expected: string: expected value +# actual: string: actual value +# Returns: +# integer: success (TRUE/FALSE/ERROR constant) +assertSame() +{ + ${_SHUNIT_LINENO_} + if [ $# -lt 2 -o $# -gt 3 ]; then + _shunit_error "assertSame() requires two or three arguments; $# given" + return ${SHUNIT_ERROR} + fi + _shunit_shouldSkip && return ${SHUNIT_TRUE} + + shunit_message_=${__shunit_lineno} + if [ $# -eq 3 ]; then + shunit_message_="${shunit_message_}$1" + shift + fi + assertEquals "${shunit_message_}" "$1" "$2" + shunit_return=$? + + unset shunit_message_ + return ${shunit_return} +} +_ASSERT_SAME_='eval assertSame --lineno "${LINENO:-}"' + +# Assert that two values are not the same (i.e. not equal to one another). +# +# Args: +# message: string: failure message [optional] +# expected: string: expected value +# actual: string: actual value +# Returns: +# integer: success (TRUE/FALSE/ERROR constant) +assertNotSame() +{ + ${_SHUNIT_LINENO_} + if [ $# -lt 2 -o $# -gt 3 ]; then + _shunit_error "assertNotSame() requires two or three arguments; $# given" + return ${SHUNIT_ERROR} + fi + _shunit_shouldSkip && return ${SHUNIT_TRUE} + + shunit_message_=${__shunit_lineno} + if [ $# -eq 3 ]; then + shunit_message_="${shunit_message_:-}$1" + shift + fi + assertNotEquals "${shunit_message_}" "$1" "$2" + shunit_return=$? + + unset shunit_message_ + return ${shunit_return} +} +_ASSERT_NOT_SAME_='eval assertNotSame --lineno "${LINENO:-}"' + +# Assert that a value or shell test condition is true. +# +# In shell, a value of 0 is true and a non-zero value is false. Any integer +# value passed can thereby be tested. +# +# Shell supports much more complicated tests though, and a means to support +# them was needed. As such, this function tests that conditions are true or +# false through evaluation rather than just looking for a true or false. +# +# The following test will succeed: +# assertTrue 0 +# assertTrue "[ 34 -gt 23 ]" +# The folloing test will fail with a message: +# assertTrue 123 +# assertTrue "test failed" "[ -r '/non/existant/file' ]" +# +# Args: +# message: string: failure message [optional] +# condition: string: integer value or shell conditional statement +# Returns: +# integer: success (TRUE/FALSE/ERROR constant) +assertTrue() +{ + ${_SHUNIT_LINENO_} + if [ $# -gt 2 ]; then + _shunit_error "assertTrue() takes one two arguments; $# given" + return ${SHUNIT_ERROR} + fi + _shunit_shouldSkip && return ${SHUNIT_TRUE} + + shunit_message_=${__shunit_lineno} + if [ $# -eq 2 ]; then + shunit_message_="${shunit_message_}$1" + shift + fi + shunit_condition_=$1 + + # see if condition is an integer, i.e. a return value + shunit_match_=`expr "${shunit_condition_}" : '\([0-9]*\)'` + shunit_return=${SHUNIT_TRUE} + if [ -z "${shunit_condition_}" ]; then + # null condition + shunit_return=${SHUNIT_FALSE} + elif [ -n "${shunit_match_}" -a "${shunit_condition_}" = "${shunit_match_}" ] + then + # possible return value. treating 0 as true, and non-zero as false. + [ ${shunit_condition_} -ne 0 ] && shunit_return=${SHUNIT_FALSE} + else + # (hopefully) a condition + ( eval ${shunit_condition_} ) >/dev/null 2>&1 + [ $? -ne 0 ] && shunit_return=${SHUNIT_FALSE} + fi + + # record the test + if [ ${shunit_return} -eq ${SHUNIT_TRUE} ]; then + _shunit_assertPass + else + _shunit_assertFail "${shunit_message_}" + fi + + unset shunit_message_ shunit_condition_ shunit_match_ + return ${shunit_return} +} +_ASSERT_TRUE_='eval assertTrue --lineno "${LINENO:-}"' + +# Assert that a value or shell test condition is false. +# +# In shell, a value of 0 is true and a non-zero value is false. Any integer +# value passed can thereby be tested. +# +# Shell supports much more complicated tests though, and a means to support +# them was needed. As such, this function tests that conditions are true or +# false through evaluation rather than just looking for a true or false. +# +# The following test will succeed: +# assertFalse 1 +# assertFalse "[ 'apples' = 'oranges' ]" +# The folloing test will fail with a message: +# assertFalse 0 +# assertFalse "test failed" "[ 1 -eq 1 -a 2 -eq 2 ]" +# +# Args: +# message: string: failure message [optional] +# condition: string: integer value or shell conditional statement +# Returns: +# integer: success (TRUE/FALSE/ERROR constant) +assertFalse() +{ + ${_SHUNIT_LINENO_} + if [ $# -lt 1 -o $# -gt 2 ]; then + _shunit_error "assertFalse() quires one or two arguments; $# given" + return ${SHUNIT_ERROR} + fi + _shunit_shouldSkip && return ${SHUNIT_TRUE} + + shunit_message_=${__shunit_lineno} + if [ $# -eq 2 ]; then + shunit_message_="${shunit_message_}$1" + shift + fi + shunit_condition_=$1 + + # see if condition is an integer, i.e. a return value + shunit_match_=`expr "${shunit_condition_}" : '\([0-9]*\)'` + shunit_return=${SHUNIT_TRUE} + if [ -z "${shunit_condition_}" ]; then + # null condition + shunit_return=${SHUNIT_FALSE} + elif [ -n "${shunit_match_}" -a "${shunit_condition_}" = "${shunit_match_}" ] + then + # possible return value. treating 0 as true, and non-zero as false. + [ ${shunit_condition_} -eq 0 ] && shunit_return=${SHUNIT_FALSE} + else + # (hopefully) a condition + ( eval ${shunit_condition_} ) >/dev/null 2>&1 + [ $? -eq 0 ] && shunit_return=${SHUNIT_FALSE} + fi + + # record the test + if [ ${shunit_return} -eq ${SHUNIT_TRUE} ]; then + _shunit_assertPass + else + _shunit_assertFail "${shunit_message_}" + fi + + unset shunit_message_ shunit_condition_ shunit_match_ + return ${shunit_return} +} +_ASSERT_FALSE_='eval assertFalse --lineno "${LINENO:-}"' + +#----------------------------------------------------------------------------- +# failure functions +# + +# Records a test failure. +# +# Args: +# message: string: failure message [optional] +# Returns: +# integer: success (TRUE/FALSE/ERROR constant) +fail() +{ + ${_SHUNIT_LINENO_} + if [ $# -gt 1 ]; then + _shunit_error "fail() requires zero or one arguments; $# given" + return ${SHUNIT_ERROR} + fi + _shunit_shouldSkip && return ${SHUNIT_TRUE} + + shunit_message_=${__shunit_lineno} + if [ $# -eq 1 ]; then + shunit_message_="${shunit_message_}$1" + shift + fi + + _shunit_assertFail "${shunit_message_}" + + unset shunit_message_ + return ${SHUNIT_FALSE} +} +_FAIL_='eval fail --lineno "${LINENO:-}"' + +# Records a test failure, stating two values were not equal. +# +# Args: +# message: string: failure message [optional] +# expected: string: expected value +# actual: string: actual value +# Returns: +# integer: success (TRUE/FALSE/ERROR constant) +failNotEquals() +{ + ${_SHUNIT_LINENO_} + if [ $# -lt 2 -o $# -gt 3 ]; then + _shunit_error "failNotEquals() requires one or two arguments; $# given" + return ${SHUNIT_ERROR} + fi + _shunit_shouldSkip && return ${SHUNIT_TRUE} + + shunit_message_=${__shunit_lineno} + if [ $# -eq 3 ]; then + shunit_message_="${shunit_message_}$1" + shift + fi + shunit_expected_=$1 + shunit_actual_=$2 + + _shunit_assertFail "${shunit_message_:+${shunit_message_} }expected:<${shunit_expected_}> but was:<${shunit_actual_}>" + + unset shunit_message_ shunit_expected_ shunit_actual_ + return ${SHUNIT_FALSE} +} +_FAIL_NOT_EQUALS_='eval failNotEquals --lineno "${LINENO:-}"' + +# Records a test failure, stating two values should have been the same. +# +# Args: +# message: string: failure message [optional] +# expected: string: expected value +# actual: string: actual value +# Returns: +# integer: success (TRUE/FALSE/ERROR constant) +failSame() +{ + ${_SHUNIT_LINENO_} + if [ $# -lt 2 -o $# -gt 3 ]; then + _shunit_error "failSame() requires two or three arguments; $# given" + return ${SHUNIT_ERROR} + fi + _shunit_shouldSkip && return ${SHUNIT_TRUE} + + shunit_message_=${__shunit_lineno} + if [ $# -eq 3 ]; then + shunit_message_="${shunit_message_}$1" + shift + fi + + _shunit_assertFail "${shunit_message_:+${shunit_message_} }expected not same" + + unset shunit_message_ + return ${SHUNIT_FALSE} +} +_FAIL_SAME_='eval failSame --lineno "${LINENO:-}"' + +# Records a test failure, stating two values were not equal. +# +# This is functionally equivalent to calling failNotEquals(). +# +# Args: +# message: string: failure message [optional] +# expected: string: expected value +# actual: string: actual value +# Returns: +# integer: success (TRUE/FALSE/ERROR constant) +failNotSame() +{ + ${_SHUNIT_LINENO_} + if [ $# -lt 2 -o $# -gt 3 ]; then + _shunit_error "failNotEquals() requires one or two arguments; $# given" + return ${SHUNIT_ERROR} + fi + _shunit_shouldSkip && return ${SHUNIT_TRUE} + + shunit_message_=${__shunit_lineno} + if [ $# -eq 3 ]; then + shunit_message_="${shunit_message_}$1" + shift + fi + failNotEquals "${shunit_message_}" "$1" "$2" + shunit_return=$? + + unset shunit_message_ + return ${shunit_return} +} +_FAIL_NOT_SAME_='eval failNotSame --lineno "${LINENO:-}"' + +#----------------------------------------------------------------------------- +# skipping functions +# + +# Force remaining assert and fail functions to be "skipped". +# +# This function forces the remaining assert and fail functions to be "skipped", +# i.e. they will have no effect. Each function skipped will be recorded so that +# the total of asserts and fails will not be altered. +# +# Args: +# None +startSkipping() +{ + __shunit_skip=${SHUNIT_TRUE} +} + +# Resume the normal recording behavior of assert and fail calls. +# +# Args: +# None +endSkipping() +{ + __shunit_skip=${SHUNIT_FALSE} +} + +# Returns the state of assert and fail call skipping. +# +# Args: +# None +# Returns: +# boolean: (TRUE/FALSE constant) +isSkipping() +{ + return ${__shunit_skip} +} + +#----------------------------------------------------------------------------- +# suite functions +# + +# Stub. This function should contains all unit test calls to be made. +# +# DEPRECATED (as of 2.1.0) +# +# This function can be optionally overridden by the user in their test suite. +# +# If this function exists, it will be called when shunit2 is sourced. If it +# does not exist, shunit2 will search the parent script for all functions +# beginning with the word 'test', and they will be added dynamically to the +# test suite. +# +# This function should be overridden by the user in their unit test suite. +# Note: see _shunit_mktempFunc() for actual implementation +# +# Args: +# None +#suite() { :; } # DO NOT UNCOMMENT THIS FUNCTION + +# Adds a function name to the list of tests schedule for execution. +# +# This function should only be called from within the suite() function. +# +# Args: +# function: string: name of a function to add to current unit test suite +suite_addTest() +{ + shunit_func_=${1:-} + + __shunit_suite="${__shunit_suite:+${__shunit_suite} }${shunit_func_}" + __shunit_testsTotal=`expr ${__shunit_testsTotal} + 1` + + unset shunit_func_ +} + +# Stub. This function will be called once before any tests are run. +# +# Common one-time environment preparation tasks shared by all tests can be +# defined here. +# +# This function should be overridden by the user in their unit test suite. +# Note: see _shunit_mktempFunc() for actual implementation +# +# Args: +# None +#oneTimeSetUp() { :; } # DO NOT UNCOMMENT THIS FUNCTION + +# Stub. This function will be called once after all tests are finished. +# +# Common one-time environment cleanup tasks shared by all tests can be defined +# here. +# +# This function should be overridden by the user in their unit test suite. +# Note: see _shunit_mktempFunc() for actual implementation +# +# Args: +# None +#oneTimeTearDown() { :; } # DO NOT UNCOMMENT THIS FUNCTION + +# Stub. This function will be called before each test is run. +# +# Common environment preparation tasks shared by all tests can be defined here. +# +# This function should be overridden by the user in their unit test suite. +# Note: see _shunit_mktempFunc() for actual implementation +# +# Args: +# None +#setUp() { :; } + +# Note: see _shunit_mktempFunc() for actual implementation +# Stub. This function will be called after each test is run. +# +# Common environment cleanup tasks shared by all tests can be defined here. +# +# This function should be overridden by the user in their unit test suite. +# Note: see _shunit_mktempFunc() for actual implementation +# +# Args: +# None +#tearDown() { :; } # DO NOT UNCOMMENT THIS FUNCTION + +#------------------------------------------------------------------------------ +# internal shUnit2 functions +# + +# Create a temporary directory to store various run-time files in. +# +# This function is a cross-platform temporary directory creation tool. Not all +# OSes have the mktemp function, so one is included here. +# +# Args: +# None +# Outputs: +# string: the temporary directory that was created +_shunit_mktempDir() +{ + # try the standard mktemp function + ( exec mktemp -dqt shunit.XXXXXX 2>/dev/null ) && return + + # the standard mktemp didn't work. doing our own. + if [ -r '/dev/urandom' -a -x '/usr/bin/od' ]; then + _shunit_random_=`/usr/bin/od -vAn -N4 -tx4 "${_shunit_file_}" +#! /bin/sh +exit ${SHUNIT_TRUE} +EOF + chmod +x "${_shunit_file_}" + done + + unset _shunit_file_ +} + +# Final cleanup function to leave things as we found them. +# +# Besides removing the temporary directory, this function is in charge of the +# final exit code of the unit test. The exit code is based on how the script +# was ended (e.g. normal exit, or via Ctrl-C). +# +# Args: +# name: string: name of the trap called (specified when trap defined) +_shunit_cleanup() +{ + _shunit_name_=$1 + + case ${_shunit_name_} in + EXIT) _shunit_signal_=0 ;; + INT) _shunit_signal_=2 ;; + TERM) _shunit_signal_=15 ;; + *) + _shunit_warn "unrecognized trap value (${_shunit_name_})" + _shunit_signal_=0 + ;; + esac + + # do our work + rm -fr "${__shunit_tmpDir}" + + # exit for all non-EXIT signals + if [ ${_shunit_name_} != 'EXIT' ]; then + _shunit_warn "trapped and now handling the (${_shunit_name_}) signal" + # disable EXIT trap + trap 0 + # add 128 to signal and exit + exit `expr ${_shunit_signal_} + 128` + elif [ ${__shunit_reportGenerated} -eq ${SHUNIT_FALSE} ] ; then + _shunit_assertFail 'Unknown failure encountered running a test' + _shunit_generateReport + exit ${SHUNIT_ERROR} + fi + + unset _shunit_name_ _shunit_signal_ +} + +# The actual running of the tests happens here. +# +# Args: +# None +_shunit_execSuite() +{ + for _shunit_test_ in ${__shunit_suite}; do + __shunit_testSuccess=${SHUNIT_TRUE} + + # disable skipping + endSkipping + + # execute the per-test setup function + setUp + + # execute the test + echo "${_shunit_test_}" + eval ${_shunit_test_} + + # execute the per-test tear-down function + tearDown + + # update stats + if [ ${__shunit_testSuccess} -eq ${SHUNIT_TRUE} ]; then + __shunit_testsPassed=`expr ${__shunit_testsPassed} + 1` + else + __shunit_testsFailed=`expr ${__shunit_testsFailed} + 1` + fi + done + + unset _shunit_test_ +} + +# Generates the user friendly report with appropriate OK/FAILED message. +# +# Args: +# None +# Output: +# string: the report of successful and failed tests, as well as totals. +_shunit_generateReport() +{ + _shunit_ok_=${SHUNIT_TRUE} + + # if no exit code was provided one, determine an appropriate one + [ ${__shunit_testsFailed} -gt 0 \ + -o ${__shunit_testSuccess} -eq ${SHUNIT_FALSE} ] \ + && _shunit_ok_=${SHUNIT_FALSE} + + echo + if [ ${__shunit_testsTotal} -eq 1 ]; then + echo "Ran ${__shunit_testsTotal} test." + else + echo "Ran ${__shunit_testsTotal} tests." + fi + + _shunit_failures_='' + _shunit_skipped_='' + [ ${__shunit_assertsFailed} -gt 0 ] \ + && _shunit_failures_="failures=${__shunit_assertsFailed}" + [ ${__shunit_assertsSkipped} -gt 0 ] \ + && _shunit_skipped_="skipped=${__shunit_assertsSkipped}" + + if [ ${_shunit_ok_} -eq ${SHUNIT_TRUE} ]; then + _shunit_msg_='OK' + [ -n "${_shunit_skipped_}" ] \ + && _shunit_msg_="${_shunit_msg_} (${_shunit_skipped_})" + else + _shunit_msg_="FAILED (${_shunit_failures_}" + [ -n "${_shunit_skipped_}" ] \ + && _shunit_msg_="${_shunit_msg_},${_shunit_skipped_}" + _shunit_msg_="${_shunit_msg_})" + fi + + echo + echo ${_shunit_msg_} + __shunit_reportGenerated=${SHUNIT_TRUE} + + unset _shunit_failures_ _shunit_msg_ _shunit_ok_ _shunit_skipped_ +} + +# Test for whether a function should be skipped. +# +# Args: +# None +# Returns: +# boolean: whether the test should be skipped (TRUE/FALSE constant) +_shunit_shouldSkip() +{ + [ ${__shunit_skip} -eq ${SHUNIT_FALSE} ] && return ${SHUNIT_FALSE} + _shunit_assertSkip +} + +# Records a successful test. +# +# Args: +# None +_shunit_assertPass() +{ + __shunit_assertsPassed=`expr ${__shunit_assertsPassed} + 1` + __shunit_assertsTotal=`expr ${__shunit_assertsTotal} + 1` +} + +# Records a test failure. +# +# Args: +# message: string: failure message to provide user +_shunit_assertFail() +{ + _shunit_msg_=$1 + + __shunit_testSuccess=${SHUNIT_FALSE} + __shunit_assertsFailed=`expr ${__shunit_assertsFailed} + 1` + __shunit_assertsTotal=`expr ${__shunit_assertsTotal} + 1` + echo "${__SHUNIT_ASSERT_MSG_PREFIX}${_shunit_msg_}" + + unset _shunit_msg_ +} + +# Records a skipped test. +# +# Args: +# None +_shunit_assertSkip() +{ + __shunit_assertsSkipped=`expr ${__shunit_assertsSkipped} + 1` + __shunit_assertsTotal=`expr ${__shunit_assertsTotal} + 1` +} + +# Prepare a script filename for sourcing. +# +# Args: +# script: string: path to a script to source +# Returns: +# string: filename prefixed with ./ (if necessary) +_shunit_prepForSourcing() +{ + _shunit_script_=$1 + case "${_shunit_script_}" in + /*|./*) echo "${_shunit_script_}" ;; + *) echo "./${_shunit_script_}" ;; + esac + unset _shunit_script_ +} + +# Escape a character in a string. +# +# Args: +# c: string: unescaped character +# s: string: to escape character in +# Returns: +# string: with escaped character(s) +_shunit_escapeCharInStr() +{ + [ -n "$2" ] || return # no point in doing work on an empty string + + # Note: using shorter variable names to prevent conflicts with + # _shunit_escapeCharactersInString(). + _shunit_c_=$1 + _shunit_s_=$2 + + + # escape the character + echo ''${_shunit_s_}'' |sed 's/\'${_shunit_c_}'/\\\'${_shunit_c_}'/g' + + unset _shunit_c_ _shunit_s_ +} + +# Escape a character in a string. +# +# Args: +# str: string: to escape characters in +# Returns: +# string: with escaped character(s) +_shunit_escapeCharactersInString() +{ + [ -n "$1" ] || return # no point in doing work on an empty string + + _shunit_str_=$1 + + # Note: using longer variable names to prevent conflicts with + # _shunit_escapeCharInStr(). + for _shunit_char_ in '"' '$' "'" '`'; do + _shunit_str_=`_shunit_escapeCharInStr "${_shunit_char_}" "${_shunit_str_}"` + done + + echo "${_shunit_str_}" + unset _shunit_char_ _shunit_str_ +} + +# Extract list of functions to run tests against. +# +# Args: +# script: string: name of script to extract functions from +# Returns: +# string: of function names +_shunit_extractTestFunctions() +{ + _shunit_script_=$1 + + # extract the lines with test function names, strip of anything besides the + # function name, and output everything on a single line. + _shunit_regex_='^[ ]*(function )*test[A-Za-z0-9_]* *\(\)' + egrep "${_shunit_regex_}" "${_shunit_script_}" \ + |sed 's/^[^A-Za-z0-9_]*//;s/^function //;s/\([A-Za-z0-9_]*\).*/\1/g' \ + |xargs + + unset _shunit_regex_ _shunit_script_ +} + +#------------------------------------------------------------------------------ +# main +# + +# determine the operating mode +if [ $# -eq 0 ]; then + __shunit_script=${__SHUNIT_PARENT} + __shunit_mode=${__SHUNIT_MODE_SOURCED} +else + __shunit_script=$1 + [ -r "${__shunit_script}" ] || \ + _shunit_fatal "unable to read from ${__shunit_script}" + __shunit_mode=${__SHUNIT_MODE_STANDALONE} +fi + +# create a temporary storage location +__shunit_tmpDir=`_shunit_mktempDir` + +# provide a public temporary directory for unit test scripts +# TODO(kward): document this +SHUNIT_TMPDIR="${__shunit_tmpDir}/tmp" +mkdir "${SHUNIT_TMPDIR}" + +# setup traps to clean up after ourselves +trap '_shunit_cleanup EXIT' 0 +trap '_shunit_cleanup INT' 2 +trap '_shunit_cleanup TERM' 15 + +# create phantom functions to work around issues with Cygwin +_shunit_mktempFunc +PATH="${__shunit_tmpDir}:${PATH}" + +# make sure phantom functions are executable. this will bite if /tmp (or the +# current $TMPDIR) points to a path on a partition that was mounted with the +# 'noexec' option. the noexec command was created with _shunit_mktempFunc(). +noexec 2>/dev/null || _shunit_fatal \ + 'please declare TMPDIR with path on partition with exec permission' + +# we must manually source the tests in standalone mode +if [ "${__shunit_mode}" = "${__SHUNIT_MODE_STANDALONE}" ]; then + . "`_shunit_prepForSourcing \"${__shunit_script}\"`" +fi + +# execute the oneTimeSetUp function (if it exists) +oneTimeSetUp + +# execute the suite function defined in the parent test script +# deprecated as of 2.1.0 +suite + +# if no suite function was defined, dynamically build a list of functions +if [ -z "${__shunit_suite}" ]; then + shunit_funcs_=`_shunit_extractTestFunctions "${__shunit_script}"` + for shunit_func_ in ${shunit_funcs_}; do + suite_addTest ${shunit_func_} + done +fi +unset shunit_func_ shunit_funcs_ + +# execute the tests +_shunit_execSuite + +# execute the oneTimeTearDown function (if it exists) +oneTimeTearDown + +# generate the report +_shunit_generateReport + +# that's it folks +[ ${__shunit_testsFailed} -eq 0 ] +exit $? diff --git a/extra/lukeshu-xbs/test/packages/pkg-any-a/PKGBUILD b/extra/lukeshu-xbs/test/packages/pkg-any-a/PKGBUILD new file mode 100644 index 0000000..8749a35 --- /dev/null +++ b/extra/lukeshu-xbs/test/packages/pkg-any-a/PKGBUILD @@ -0,0 +1,12 @@ +pkgname=pkg-any-a +pkgver=1 +pkgrel=1 +pkgdesc="A package called ${pkgname}" +arch=('any') +url='http://www.archlinux.org/' +license=('GPL') + +package() { + install -d -m755 ${pkgdir}/usr/share/${pkgname} + echo 'test' > ${pkgdir}/usr/share/${pkgname}/test +} diff --git a/extra/lukeshu-xbs/test/packages/pkg-any-b/PKGBUILD b/extra/lukeshu-xbs/test/packages/pkg-any-b/PKGBUILD new file mode 100644 index 0000000..e6a0498 --- /dev/null +++ b/extra/lukeshu-xbs/test/packages/pkg-any-b/PKGBUILD @@ -0,0 +1,12 @@ +pkgname=pkg-any-b +pkgver=1 +pkgrel=1 +pkgdesc="A package called ${pkgname}" +arch=('any') +url='http://www.archlinux.org/' +license=('GPL') + +package() { + install -d -m755 ${pkgdir}/usr/share/${pkgname} + echo 'test' > ${pkgdir}/usr/share/${pkgname}/test +} diff --git a/extra/lukeshu-xbs/test/packages/pkg-simple-a/Makefile b/extra/lukeshu-xbs/test/packages/pkg-simple-a/Makefile new file mode 120000 index 0000000..50be211 --- /dev/null +++ b/extra/lukeshu-xbs/test/packages/pkg-simple-a/Makefile @@ -0,0 +1 @@ +../../src/Makefile \ No newline at end of file diff --git a/extra/lukeshu-xbs/test/packages/pkg-simple-a/PKGBUILD b/extra/lukeshu-xbs/test/packages/pkg-simple-a/PKGBUILD new file mode 100644 index 0000000..953ecfa --- /dev/null +++ b/extra/lukeshu-xbs/test/packages/pkg-simple-a/PKGBUILD @@ -0,0 +1,22 @@ +pkgname=pkg-simple-a +pkgver=1 +pkgrel=1 +pkgdesc="A package called ${pkgname}" +arch=('i686' 'x86_64') +url='http://www.archlinux.org/' +license=('GPL') +depends=('glibc') +makedepends=('gcc') +source=('Makefile' 'test.c') +md5sums=('c6cb8dcc86253355fed559416d0c8dcf' + '3c1e4279feb678fd9cabaccdb28e40d0') + +build() { + cd ${srcdir} + make +} + +package() { + cd ${srcdir} + make install DESTDIR=${pkgdir} DESTBIN=${pkgname} +} diff --git a/extra/lukeshu-xbs/test/packages/pkg-simple-a/test.c b/extra/lukeshu-xbs/test/packages/pkg-simple-a/test.c new file mode 120000 index 0000000..ed5b5ac --- /dev/null +++ b/extra/lukeshu-xbs/test/packages/pkg-simple-a/test.c @@ -0,0 +1 @@ +../../src/test.c \ No newline at end of file diff --git a/extra/lukeshu-xbs/test/packages/pkg-simple-b/Makefile b/extra/lukeshu-xbs/test/packages/pkg-simple-b/Makefile new file mode 120000 index 0000000..50be211 --- /dev/null +++ b/extra/lukeshu-xbs/test/packages/pkg-simple-b/Makefile @@ -0,0 +1 @@ +../../src/Makefile \ No newline at end of file diff --git a/extra/lukeshu-xbs/test/packages/pkg-simple-b/PKGBUILD b/extra/lukeshu-xbs/test/packages/pkg-simple-b/PKGBUILD new file mode 100644 index 0000000..95ffd09 --- /dev/null +++ b/extra/lukeshu-xbs/test/packages/pkg-simple-b/PKGBUILD @@ -0,0 +1,22 @@ +pkgname=pkg-simple-b +pkgver=1 +pkgrel=1 +pkgdesc="A package called ${pkgname}" +arch=('i686' 'x86_64') +url='http://www.archlinux.org/' +license=('GPL') +depends=('glibc') +makedepends=('gcc') +source=('Makefile' 'test.c') +md5sums=('c6cb8dcc86253355fed559416d0c8dcf' + '3c1e4279feb678fd9cabaccdb28e40d0') + +build() { + cd ${srcdir} + make +} + +package() { + cd ${srcdir} + make install DESTDIR=${pkgdir} DESTBIN=${pkgname} +} diff --git a/extra/lukeshu-xbs/test/packages/pkg-simple-b/test.c b/extra/lukeshu-xbs/test/packages/pkg-simple-b/test.c new file mode 120000 index 0000000..ed5b5ac --- /dev/null +++ b/extra/lukeshu-xbs/test/packages/pkg-simple-b/test.c @@ -0,0 +1 @@ +../../src/test.c \ No newline at end of file diff --git a/extra/lukeshu-xbs/test/packages/pkg-simple-epoch/Makefile b/extra/lukeshu-xbs/test/packages/pkg-simple-epoch/Makefile new file mode 120000 index 0000000..50be211 --- /dev/null +++ b/extra/lukeshu-xbs/test/packages/pkg-simple-epoch/Makefile @@ -0,0 +1 @@ +../../src/Makefile \ No newline at end of file diff --git a/extra/lukeshu-xbs/test/packages/pkg-simple-epoch/PKGBUILD b/extra/lukeshu-xbs/test/packages/pkg-simple-epoch/PKGBUILD new file mode 100644 index 0000000..eebe2bd --- /dev/null +++ b/extra/lukeshu-xbs/test/packages/pkg-simple-epoch/PKGBUILD @@ -0,0 +1,23 @@ +pkgname=pkg-simple-epoch +pkgver=1 +pkgrel=1 +epoch=1 +pkgdesc="A package called ${pkgname}" +arch=('i686' 'x86_64') +url='http://www.archlinux.org/' +license=('GPL') +depends=('glibc') +makedepends=('gcc') +source=('Makefile' 'test.c') +md5sums=('c6cb8dcc86253355fed559416d0c8dcf' + '3c1e4279feb678fd9cabaccdb28e40d0') + +build() { + cd ${srcdir} + make +} + +package() { + cd ${srcdir} + make install DESTDIR=${pkgdir} DESTBIN=${pkgname} +} diff --git a/extra/lukeshu-xbs/test/packages/pkg-simple-epoch/test.c b/extra/lukeshu-xbs/test/packages/pkg-simple-epoch/test.c new file mode 120000 index 0000000..ed5b5ac --- /dev/null +++ b/extra/lukeshu-xbs/test/packages/pkg-simple-epoch/test.c @@ -0,0 +1 @@ +../../src/test.c \ No newline at end of file diff --git a/extra/lukeshu-xbs/test/packages/pkg-split-a/Makefile b/extra/lukeshu-xbs/test/packages/pkg-split-a/Makefile new file mode 120000 index 0000000..50be211 --- /dev/null +++ b/extra/lukeshu-xbs/test/packages/pkg-split-a/Makefile @@ -0,0 +1 @@ +../../src/Makefile \ No newline at end of file diff --git a/extra/lukeshu-xbs/test/packages/pkg-split-a/PKGBUILD b/extra/lukeshu-xbs/test/packages/pkg-split-a/PKGBUILD new file mode 100644 index 0000000..e941976 --- /dev/null +++ b/extra/lukeshu-xbs/test/packages/pkg-split-a/PKGBUILD @@ -0,0 +1,28 @@ +pkgbase=pkg-split-a +pkgname=('pkg-split-a1' 'pkg-split-a2') +pkgver=1 +pkgrel=1 +pkgdesc="A split package called ${pkgbase}" +arch=('i686' 'x86_64') +url='http://www.archlinux.org/' +license=('GPL') +depends=('glibc') +makedepends=('gcc') +source=('Makefile' 'test.c') +md5sums=('c6cb8dcc86253355fed559416d0c8dcf' + '3c1e4279feb678fd9cabaccdb28e40d0') + +build() { + cd ${srcdir} + make +} + +package_pkg-split-a1() { + cd ${srcdir} + make install DESTDIR=${pkgdir} DESTBIN=${pkgname[0]} +} + +package_pkg-split-a2() { + cd ${srcdir} + make install DESTDIR=${pkgdir} DESTBIN=${pkgname[1]} +} diff --git a/extra/lukeshu-xbs/test/packages/pkg-split-a/test.c b/extra/lukeshu-xbs/test/packages/pkg-split-a/test.c new file mode 120000 index 0000000..ed5b5ac --- /dev/null +++ b/extra/lukeshu-xbs/test/packages/pkg-split-a/test.c @@ -0,0 +1 @@ +../../src/test.c \ No newline at end of file diff --git a/extra/lukeshu-xbs/test/packages/pkg-split-b/Makefile b/extra/lukeshu-xbs/test/packages/pkg-split-b/Makefile new file mode 120000 index 0000000..50be211 --- /dev/null +++ b/extra/lukeshu-xbs/test/packages/pkg-split-b/Makefile @@ -0,0 +1 @@ +../../src/Makefile \ No newline at end of file diff --git a/extra/lukeshu-xbs/test/packages/pkg-split-b/PKGBUILD b/extra/lukeshu-xbs/test/packages/pkg-split-b/PKGBUILD new file mode 100644 index 0000000..6ddbc45 --- /dev/null +++ b/extra/lukeshu-xbs/test/packages/pkg-split-b/PKGBUILD @@ -0,0 +1,29 @@ +pkgbase=pkg-split-b +pkgname=('pkg-split-b1' 'pkg-split-b2') +pkgver=1 +pkgrel=1 +pkgdesc="A split package called ${pkgbase}" +arch=('i686' 'x86_64') +url='http://www.archlinux.org/' +license=('GPL') + +depends=('glibc') +makedepends=('gcc') +source=('Makefile' 'test.c') +md5sums=('c6cb8dcc86253355fed559416d0c8dcf' + '3c1e4279feb678fd9cabaccdb28e40d0') + +build() { + cd ${srcdir} + make +} + +package_pkg-split-b1() { + cd ${srcdir} + make install DESTDIR=${pkgdir} DESTBIN=${pkgname[0]} +} + +package_pkg-split-b2() { + cd ${srcdir} + make install DESTDIR=${pkgdir} DESTBIN=${pkgname[1]} +} diff --git a/extra/lukeshu-xbs/test/packages/pkg-split-b/test.c b/extra/lukeshu-xbs/test/packages/pkg-split-b/test.c new file mode 120000 index 0000000..ed5b5ac --- /dev/null +++ b/extra/lukeshu-xbs/test/packages/pkg-split-b/test.c @@ -0,0 +1 @@ +../../src/test.c \ No newline at end of file diff --git a/extra/lukeshu-xbs/test/rsync_output_sample b/extra/lukeshu-xbs/test/rsync_output_sample new file mode 100644 index 0000000..72d9cd0 --- /dev/null +++ b/extra/lukeshu-xbs/test/rsync_output_sample @@ -0,0 +1,14 @@ +dr-xr-sr-x 4096 2010/09/11 11:37:10 . +-rw-r--r-- 11 2011/02/08 00:00:01 lastsync +drwxrwxr-x 15 2010/09/11 11:28:50 community-staging +drwxrwxr-x 30 2010/09/11 11:28:50 community-staging/os +drwxrwxr-x 8192 2011/02/07 17:00:01 community-staging/os/i686 +lrwxrwxrwx 52 2010/12/23 16:51:01 community-staging/os/i686/alex-2.3.4-1-i686.pkg.tar.xz -> ../../../pool/community/alex-2.3.4-1-i686.pkg.tar.xz +lrwxrwxrwx 27 2011/02/07 14:02:54 community-staging/os/i686/community-staging.db -> community-staging.db.tar.gz +-rw-rw-r-- 2237 2011/02/07 14:02:54 community-staging/os/i686/community-staging.db.tar.gz +-rw-rw-r-- 3209 2011/02/07 14:00:13 community-staging/os/i686/community-staging.db.tar.gz.old +drwxrwxr-x 15 2009/07/22 15:07:56 community +drwxrwxr-x 40 2009/08/04 15:57:42 community/os +drwxrwsr-x 36864 2011/02/03 05:00:01 community/os/any +-rw-rw-r-- 303336 2010/07/16 10:06:28 community/os/any/any2dvd-0.34-4-any.pkg.tar.xz +-rw-rw-r-- 221664 2010/03/28 15:55:48 community/os/x86_64/gmime22-2.2.26-1-x86_64.pkg.tar.xz diff --git a/extra/lukeshu-xbs/test/runTest b/extra/lukeshu-xbs/test/runTest new file mode 100755 index 0000000..b8713d8 --- /dev/null +++ b/extra/lukeshu-xbs/test/runTest @@ -0,0 +1,15 @@ +#!/bin/bash + +. "$(dirname ${BASH_SOURCE[0]})/lib/common.inc" + +for t in "$(dirname ${BASH_SOURCE[0]})/test.d/"*.sh; do + l=$(basename ${t} .sh) + if [ -x ${t} ]; then + msg "Running test '${l}'" + ${t} + [ $? -ne 0 ] && die "Test '${l}' failed" + echo -e "\n\n\n" + else + warning "Skipping test ${l}" + fi +done diff --git a/extra/lukeshu-xbs/test/src/Makefile b/extra/lukeshu-xbs/test/src/Makefile new file mode 100644 index 0000000..105b730 --- /dev/null +++ b/extra/lukeshu-xbs/test/src/Makefile @@ -0,0 +1,5 @@ +all: + gcc $(CFLAGS) -o test test.c + +install: + install -D -m755 test $(DESTDIR)/usr/bin/$(DESTBIN) diff --git a/extra/lukeshu-xbs/test/src/test.c b/extra/lukeshu-xbs/test/src/test.c new file mode 100644 index 0000000..a661689 --- /dev/null +++ b/extra/lukeshu-xbs/test/src/test.c @@ -0,0 +1,7 @@ +#include +#include + +int main(void) { + printf("Arch is the best!\n"); + return EXIT_SUCCESS; +} diff --git a/extra/lukeshu-xbs/test/test.d/create-filelists.sh b/extra/lukeshu-xbs/test/test.d/create-filelists.sh new file mode 100755 index 0000000..b5ec5c8 --- /dev/null +++ b/extra/lukeshu-xbs/test/test.d/create-filelists.sh @@ -0,0 +1,105 @@ +#!/bin/bash + +curdir="$(readlink -e "$(dirname "$0")")" +. "${curdir}/../lib/common.inc" + +testCreateSimpleFileLists() { + local arches=(`arches`) + local pkgs=('pkg-simple-a' 'pkg-simple-b' 'pkg-simple-epoch') + local pkgbase + local arch + + for pkgbase in "${pkgs[@]}"; do + for arch in "${arches[@]}"; do + releasePackage extra "${pkgbase}" "${arch}" + done + done + ../db-update + + for pkgbase in "${pkgs[@]}"; do + for arch in "${arches[@]}"; do + if ! bsdtar -xOf "${FTP_BASE}/extra/os/${arch}/extra${FILESEXT}" | grep "usr/bin/${pkgbase}" &>/dev/null; then + fail "usr/bin/${pkgbase} not found in ${arch}/extra${FILESEXT}" + fi + done + done +} + +testCreateAnyFileLists() { + local arches=(`arches`) + local pkgs=('pkg-any-a' 'pkg-any-b') + local pkgbase + local arch + + for pkgbase in "${pkgs[@]}"; do + releasePackage extra "${pkgbase}" any + done + ../db-update + + for pkgbase in "${pkgs[@]}"; do + for arch in "${arches[@]}"; do + if ! bsdtar -xOf "${FTP_BASE}/extra/os/${arch}/extra${FILESEXT}" | grep "usr/share/${pkgbase}/test" &>/dev/null; then + fail "usr/share/${pkgbase}/test not found in ${arch}/extra${FILESEXT}" + fi + done + done +} + +testCreateSplitFileLists() { + local arches=(`arches`) + local pkgs=('pkg-split-a' 'pkg-split-b') + local pkg + local pkgbase + local pkgname + local pkgnames + local arch + + for pkgbase in "${pkgs[@]}"; do + for arch in "${arches[@]}"; do + releasePackage extra "${pkgbase}" "${arch}" + done + done + ../db-update + + for pkgbase in "${pkgs[@]}"; do + pkgnames=($(source "${TMP}/svn-packages-copy/${pkgbase}/trunk/PKGBUILD"; echo "${pkgname[@]}")) + for pkgname in "${pkgnames[@]}"; do + for arch in "${arches[@]}"; do + if ! bsdtar -xOf "${FTP_BASE}/extra/os/${arch}/extra${FILESEXT}" | grep "usr/bin/${pkgname}" &>/dev/null; then + fail "usr/bin/${pkgname} not found in ${arch}/extra${FILESEXT}" + fi + done + done + done +} + + +testCleanupFileLists() { + local arches=(`arches`) + local pkgs=('pkg-simple-a' 'pkg-simple-b') + local pkgbase + local arch + + for pkgbase in ${pkgs[@]}; do + for arch in ${arches[@]}; do + releasePackage extra ${pkgbase} ${arch} + done + done + ../db-update + + for arch in ${arches[@]}; do + ../db-remove extra ${arch} pkg-simple-a + done + + for arch in ${arches[@]}; do + if ! bsdtar -xOf "${FTP_BASE}/extra/os/${arch}/extra${FILESEXT}" | grep "usr/bin/pkg-simple-b" &>/dev/null; then + fail "usr/bin/pkg-simple-b not found in ${arch}/extra${FILESEXT}" + fi + if bsdtar -xOf "${FTP_BASE}/extra/os/${arch}/extra${FILESEXT}" | grep "usr/bin/pkg-simple-a" &>/dev/null; then + fail "usr/bin/pkg-simple-a still found in ${arch}/extra${FILESEXT}" + fi + done + +} + +. "${curdir}/../lib/shunit2" diff --git a/extra/lukeshu-xbs/test/test.d/db-move.sh b/extra/lukeshu-xbs/test/test.d/db-move.sh new file mode 100755 index 0000000..3cf355b --- /dev/null +++ b/extra/lukeshu-xbs/test/test.d/db-move.sh @@ -0,0 +1,122 @@ +#!/bin/bash + +curdir=$(readlink -e $(dirname $0)) +. "${curdir}/../lib/common.inc" + +testMoveSimplePackages() { + local arches=(`arches`) + local pkgs=('pkg-simple-a' 'pkg-simple-b') + local pkgbase + local arch + + for pkgbase in ${pkgs[@]}; do + for arch in ${arches[@]}; do + releasePackage testing ${pkgbase} ${arch} + done + done + + ../db-update + + ../db-move testing extra pkg-simple-a + + for arch in ${arches[@]}; do + checkPackage extra pkg-simple-a-1-1-${arch}.pkg.tar.xz ${arch} + checkRemovedPackage testing pkg-simple-a-1-1-${arch}.pkg.tar.xz ${arch} + + checkPackage testing pkg-simple-b-1-1-${arch}.pkg.tar.xz ${arch} + done +} + +testMoveMultiplePackages() { + local arches=(`arches`) + local pkgs=('pkg-simple-a' 'pkg-simple-b') + local pkgbase + local arch + + for pkgbase in ${pkgs[@]}; do + for arch in ${arches[@]}; do + releasePackage testing ${pkgbase} ${arch} + done + done + + ../db-update + + ../db-move testing extra pkg-simple-a pkg-simple-b + + for pkgbase in ${pkgs[@]}; do + for arch in ${arches[@]}; do + checkPackage extra ${pkgbase}-1-1-${arch}.pkg.tar.xz ${arch} + checkRemovedPackage testing ${pkgbase}-1-1-${arch}.pkg.tar.xz ${arch} + done + done +} + +testMoveEpochPackages() { + local arches=(`arches`) + local pkgs=('pkg-simple-epoch') + local pkgbase + local arch + + for pkgbase in ${pkgs[@]}; do + for arch in ${arches[@]}; do + releasePackage testing ${pkgbase} ${arch} + done + done + + ../db-update + + ../db-move testing extra pkg-simple-epoch + + for arch in ${arches[@]}; do + checkPackage extra pkg-simple-epoch-1:1-1-${arch}.pkg.tar.xz ${arch} + checkRemovedPackage testing pkg-simple-epoch-1:1-1-${arch}.pkg.tar.xz ${arch} + done +} + +testMoveAnyPackages() { + local pkgs=('pkg-any-a' 'pkg-any-b') + local pkgbase + + for pkgbase in ${pkgs[@]}; do + releasePackage testing ${pkgbase} any + done + + ../db-update + ../db-move testing extra pkg-any-a + + checkAnyPackage extra pkg-any-a-1-1-any.pkg.tar.xz + checkRemovedAnyPackage testing pkg-any-a + checkAnyPackage testing pkg-any-b-1-1-any.pkg.tar.xz +} + +testMoveSplitPackages() { + local arches=(`arches`) + local pkgs=('pkg-split-a' 'pkg-split-b') + local pkg + local pkgbase + local arch + + for pkgbase in ${pkgs[@]}; do + for arch in ${arches[@]}; do + releasePackage testing ${pkgbase} ${arch} + done + done + + ../db-update + ../db-move testing extra pkg-split-a + + for arch in ${arches[@]}; do + for pkg in "${pkgdir}/pkg-split-a"/*-${arch}${PKGEXT}; do + checkPackage extra ${pkg##*/} ${arch} + done + done + for arch in ${arches[@]}; do + for pkg in "${pkgdir}/pkg-split-b"/*-${arch}${PKGEXT}; do + checkPackage testing ${pkg##*/} ${arch} + done + done + + checkRemovedAnyPackage testing pkg-split-a +} + +. "${curdir}/../lib/shunit2" diff --git a/extra/lukeshu-xbs/test/test.d/db-remove.sh b/extra/lukeshu-xbs/test/test.d/db-remove.sh new file mode 100755 index 0000000..d79605e --- /dev/null +++ b/extra/lukeshu-xbs/test/test.d/db-remove.sh @@ -0,0 +1,77 @@ +#!/bin/bash + +curdir=$(readlink -e $(dirname $0)) +. "${curdir}/../lib/common.inc" + +testRemovePackages() { + local arches=(`arches`) + local pkgs=('pkg-simple-a' 'pkg-simple-b' 'pkg-split-a' 'pkg-split-b' 'pkg-simple-epoch') + local pkgbase + local arch + + for pkgbase in ${pkgs[@]}; do + for arch in ${arches[@]}; do + releasePackage extra ${pkgbase} ${arch} + done + done + + ../db-update + + for pkgbase in ${pkgs[@]}; do + for arch in ${arches[@]}; do + ../db-remove extra ${arch} ${pkgbase} + done + done + + for pkgbase in ${pkgs[@]}; do + for arch in ${arches[@]}; do + checkRemovedPackage extra ${pkgbase} ${arch} + done + done +} + +testRemoveMultiplePackages() { + local arches=(`arches`) + local pkgs=('pkg-simple-a' 'pkg-simple-b' 'pkg-split-a' 'pkg-split-b' 'pkg-simple-epoch') + local pkgbase + local arch + + for pkgbase in ${pkgs[@]}; do + for arch in ${arches[@]}; do + releasePackage extra ${pkgbase} ${arch} + done + done + + ../db-update + + for arch in ${arches[@]}; do + ../db-remove extra ${arch} ${pkgs[@]} + done + + for pkgbase in ${pkgs[@]}; do + for arch in ${arches[@]}; do + checkRemovedPackage extra ${pkgbase} ${arch} + done + done +} + +testRemoveAnyPackages() { + local pkgs=('pkg-any-a' 'pkg-any-b') + local pkgbase + + for pkgbase in ${pkgs[@]}; do + releasePackage extra ${pkgbase} any + done + + ../db-update + + for pkgbase in ${pkgs[@]}; do + ../db-remove extra any ${pkgbase} + done + + for pkgbase in ${pkgs[@]}; do + checkRemovedAnyPackage extra ${pkgbase} + done +} + +. "${curdir}/../lib/shunit2" diff --git a/extra/lukeshu-xbs/test/test.d/db-repo-add.sh b/extra/lukeshu-xbs/test/test.d/db-repo-add.sh new file mode 100755 index 0000000..09fc52f --- /dev/null +++ b/extra/lukeshu-xbs/test/test.d/db-repo-add.sh @@ -0,0 +1,54 @@ +#!/bin/bash + +curdir=$(readlink -e $(dirname $0)) +. "${curdir}/../lib/common.inc" + +testAddSimplePackages() { + local arches=(`arches`) + local pkgs=('pkg-simple-a' 'pkg-simple-b') + local pkgbase + local arch + + for pkgbase in ${pkgs[@]}; do + for arch in ${arches[@]}; do + cp "${pkgdir}/${pkgbase}/${pkgbase}-1-1-${arch}.pkg.tar.xz" "${FTP_BASE}/${PKGPOOL}/" + touch "${FTP_BASE}/${PKGPOOL}/${pkgbase}-1-1-${arch}.pkg.tar.xz.sig" + ln -s "${FTP_BASE}/${PKGPOOL}/${pkgbase}-1-1-${arch}.pkg.tar.xz" "${FTP_BASE}/extra/os/${arch}/" + ln -s "${FTP_BASE}/${PKGPOOL}/${pkgbase}-1-1-${arch}.pkg.tar.xz.sig" "${FTP_BASE}/extra/os/${arch}/" + ../db-repo-add extra ${arch} ${pkgbase}-1-1-${arch}.pkg.tar.xz + done + done + + for pkgbase in ${pkgs[@]}; do + for arch in ${arches[@]}; do + checkPackageDB extra ${pkgbase}-1-1-${arch}.pkg.tar.xz ${arch} + done + done +} + +testAddMultiplePackages() { + local arches=(`arches`) + local pkgs=('pkg-simple-a' 'pkg-simple-b') + local pkgbase + local arch + + for arch in ${arches[@]}; do + add_pkgs=() + for pkgbase in ${pkgs[@]}; do + cp "${pkgdir}/${pkgbase}/${pkgbase}-1-1-${arch}.pkg.tar.xz" "${FTP_BASE}/${PKGPOOL}/" + touch "${FTP_BASE}/${PKGPOOL}/${pkgbase}-1-1-${arch}.pkg.tar.xz.sig" + ln -s "${FTP_BASE}/${PKGPOOL}/${pkgbase}-1-1-${arch}.pkg.tar.xz" "${FTP_BASE}/extra/os/${arch}/" + ln -s "${FTP_BASE}/${PKGPOOL}/${pkgbase}-1-1-${arch}.pkg.tar.xz.sig" "${FTP_BASE}/extra/os/${arch}/" + add_pkgs[${#add_pkgs[*]}]=${pkgbase}-1-1-${arch}.pkg.tar.xz + done + ../db-repo-add extra ${arch} ${add_pkgs[@]} + done + + for pkgbase in ${pkgs[@]}; do + for arch in ${arches[@]}; do + checkPackageDB extra ${pkgbase}-1-1-${arch}.pkg.tar.xz ${arch} + done + done +} + +. "${curdir}/../lib/shunit2" diff --git a/extra/lukeshu-xbs/test/test.d/db-repo-remove.sh b/extra/lukeshu-xbs/test/test.d/db-repo-remove.sh new file mode 100755 index 0000000..eec0109 --- /dev/null +++ b/extra/lukeshu-xbs/test/test.d/db-repo-remove.sh @@ -0,0 +1,58 @@ +#!/bin/bash + +curdir=$(readlink -e $(dirname $0)) +. "${curdir}/../lib/common.inc" + +testRemovePackages() { + local arches=(`arches`) + local pkgs=('pkg-simple-a' 'pkg-simple-b' 'pkg-simple-epoch') + local pkgbase + local arch + + for pkgbase in ${pkgs[@]}; do + for arch in ${arches[@]}; do + releasePackage extra ${pkgbase} ${arch} + done + done + + ../db-update + + for pkgbase in ${pkgs[@]}; do + for arch in ${arches[@]}; do + ../db-repo-remove extra ${arch} ${pkgbase} + done + done + + for pkgbase in ${pkgs[@]}; do + for arch in ${arches[@]}; do + checkRemovedPackageDB extra ${pkgbase} ${arch} + done + done +} + +testRemoveMultiplePackages() { + local arches=(`arches`) + local pkgs=('pkg-simple-a' 'pkg-simple-b' 'pkg-simple-epoch') + local pkgbase + local arch + + for pkgbase in ${pkgs[@]}; do + for arch in ${arches[@]}; do + releasePackage extra ${pkgbase} ${arch} + done + done + + ../db-update + + for arch in ${arches[@]}; do + ../db-repo-remove extra ${arch} ${pkgs[@]} + done + + for pkgbase in ${pkgs[@]}; do + for arch in ${arches[@]}; do + checkRemovedPackageDB extra ${pkgbase} ${arch} + done + done +} + +. "${curdir}/../lib/shunit2" diff --git a/extra/lukeshu-xbs/test/test.d/db-update.sh b/extra/lukeshu-xbs/test/test.d/db-update.sh new file mode 100755 index 0000000..7f1874b --- /dev/null +++ b/extra/lukeshu-xbs/test/test.d/db-update.sh @@ -0,0 +1,175 @@ +#!/bin/bash + +curdir=$(readlink -e $(dirname $0)) +. "${curdir}/../lib/common.inc" + +testAddSimplePackages() { + local arches=(`arches`) + local pkgs=('pkg-simple-a' 'pkg-simple-b') + local pkgbase + local arch + + for pkgbase in ${pkgs[@]}; do + for arch in ${arches[@]}; do + releasePackage extra ${pkgbase} ${arch} + done + done + + ../db-update + + for pkgbase in ${pkgs[@]}; do + for arch in ${arches[@]}; do + checkPackage extra ${pkgbase}-1-1-${arch}.pkg.tar.xz ${arch} + done + done +} + +testAddSingleSimplePackage() { + releasePackage extra 'pkg-simple-a' 'i686' + ../db-update + checkPackage extra 'pkg-simple-a-1-1-i686.pkg.tar.xz' 'i686' +} + +testAddSingleEpochPackage() { + releasePackage extra 'pkg-simple-epoch' 'i686' + ../db-update + checkPackage extra 'pkg-simple-epoch-1:1-1-i686.pkg.tar.xz' 'i686' +} + +testAddAnyPackages() { + local pkgs=('pkg-any-a' 'pkg-any-b') + local pkgbase + + for pkgbase in ${pkgs[@]}; do + releasePackage extra ${pkgbase} any + done + + ../db-update + + for pkgbase in ${pkgs[@]}; do + checkAnyPackage extra ${pkgbase}-1-1-any.pkg.tar.xz + done +} + +testAddSplitPackages() { + local arches=(`arches`) + local pkgs=('pkg-split-a' 'pkg-split-b') + local pkg + local pkgbase + local arch + + for pkgbase in ${pkgs[@]}; do + for arch in ${arches[@]}; do + releasePackage extra ${pkgbase} ${arch} + done + done + + ../db-update + + for pkgbase in ${pkgs[@]}; do + for arch in ${arches[@]}; do + for pkg in "${pkgdir}/${pkgbase}"/*-${arch}${PKGEXT}; do + checkPackage extra ${pkg##*/} ${arch} + done + done + done +} + +testUpdateAnyPackage() { + releasePackage extra pkg-any-a any + ../db-update + + pushd "${TMP}/svn-packages-copy/pkg-any-a/trunk/" >/dev/null + sed 's/pkgrel=1/pkgrel=2/g' -i PKGBUILD + arch_svn commit -q -m"update pkg to pkgrel=2" >/dev/null + sudo libremakepkg + mv pkg-any-a-1-2-any.pkg.tar.xz "${pkgdir}/pkg-any-a/" + popd >/dev/null + + releasePackage extra pkg-any-a any + ../db-update + + checkAnyPackage extra pkg-any-a-1-2-any.pkg.tar.xz any + + rm -f "${pkgdir}/pkg-any-a/pkg-any-a-1-2-any.pkg.tar.xz" +} + +testUpdateAnyPackageToDifferentRepositoriesAtOnce() { + releasePackage extra pkg-any-a any + + pushd "${TMP}/svn-packages-copy/pkg-any-a/trunk/" >/dev/null + sed 's/pkgrel=1/pkgrel=2/g' -i PKGBUILD + arch_svn commit -q -m"update pkg to pkgrel=2" >/dev/null + sudo libremakepkg + mv pkg-any-a-1-2-any.pkg.tar.xz "${pkgdir}/pkg-any-a/" + popd >/dev/null + + releasePackage testing pkg-any-a any + + ../db-update + + checkAnyPackage extra pkg-any-a-1-1-any.pkg.tar.xz any + checkAnyPackage testing pkg-any-a-1-2-any.pkg.tar.xz any + + rm -f "${pkgdir}/pkg-any-a/pkg-any-a-1-2-any.pkg.tar.xz" +} + +testUpdateSameAnyPackageToSameRepository() { + releasePackage extra pkg-any-a any + ../db-update + checkAnyPackage extra pkg-any-a-1-1-any.pkg.tar.xz any + + releasePackage extra pkg-any-a any + ../db-update >/dev/null 2>&1 && (fail 'Adding an existing package to the same repository should fail'; return 1) +} + +testUpdateSameAnyPackageToDifferentRepositories() { + releasePackage extra pkg-any-a any + ../db-update + checkAnyPackage extra pkg-any-a-1-1-any.pkg.tar.xz any + + releasePackage testing pkg-any-a any + ../db-update >/dev/null 2>&1 && (fail 'Adding an existing package to another repository should fail'; return 1) + + local arch + for arch in $(arches); do + ( [ -r "${FTP_BASE}/testing/os/${arch}/testing${DBEXT%.tar.*}" ] \ + && bsdtar -xf "${FTP_BASE}/testing/os/${arch}/testing${DBEXT%.tar.*}" -O | grep ${pkgbase} &>/dev/null) \ + && fail "${pkgbase} should not be in testing/os/${arch}/testing${DBEXT%.tar.*}" + done +} + + +testAddIncompleteSplitPackage() { + local arches=(`arches`) + local repo='extra' + local pkgbase='pkg-split-a' + local arch + + for arch in ${arches[@]}; do + releasePackage ${repo} ${pkgbase} ${arch} + done + + # remove a split package to make db-update fail + rm "${STAGING}"/extra/${pkgbase}1-* + + ../db-update >/dev/null 2>&1 && fail "db-update should fail when a split package is missing!" + + for arch in ${arches[@]}; do + ( [ -r "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT%.tar.*}" ] \ + && bsdtar -xf "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT%.tar.*}" -O | grep ${pkgbase} &>/dev/null) \ + && fail "${pkgbase} should not be in ${repo}/os/${arch}/${repo}${DBEXT%.tar.*}" + done +} + +testUnknownRepo() { + mkdir "${STAGING}/unknown/" + releasePackage extra 'pkg-simple-a' 'i686' + releasePackage unknown 'pkg-simple-b' 'i686' + ../db-update + checkPackage extra 'pkg-simple-a-1-1-i686.pkg.tar.xz' 'i686' + [ -e "${FTP_BASE}/unknown" ] && fail "db-update pushed a package into an unknown repository" + rm -rf "${STAGING}/unknown/" +} + +. "${curdir}/../lib/shunit2" diff --git a/extra/lukeshu-xbs/test/test.d/ftpdir-cleanup.sh b/extra/lukeshu-xbs/test/test.d/ftpdir-cleanup.sh new file mode 100755 index 0000000..630b88f --- /dev/null +++ b/extra/lukeshu-xbs/test/test.d/ftpdir-cleanup.sh @@ -0,0 +1,121 @@ +#!/bin/bash + +curdir=$(readlink -e $(dirname $0)) +. "${curdir}/../lib/common.inc" + +testCleanupSimplePackages() { + local arches=(`arches`) + local pkgs=('pkg-simple-a' 'pkg-simple-b') + local pkgbase + local arch + + for pkgbase in ${pkgs[@]}; do + for arch in ${arches[@]}; do + releasePackage extra ${pkgbase} ${arch} + done + done + + ../db-update + + for arch in ${arches[@]}; do + ../db-remove extra ${arch} pkg-simple-a + done + + ../cron-jobs/ftpdir-cleanup >/dev/null + + for arch in ${arches[@]}; do + local pkg1="pkg-simple-a-1-1-${arch}.pkg.tar.xz" + checkRemovedPackage extra 'pkg-simple-a' ${arch} + [ -f "${FTP_BASE}/${PKGPOOL}/${pkg1}" ] && fail "${PKGPOOL}/${pkg1} found" + [ -f "${FTP_BASE}/${repo}/os/${arch}/${pkg1}" ] && fail "${repo}/os/${arch}/${pkg1} found" + + local pkg2="pkg-simple-b-1-1-${arch}.pkg.tar.xz" + checkPackage extra ${pkg2} ${arch} + done +} + +testCleanupEpochPackages() { + local arches=(`arches`) + local pkgs=('pkg-simple-epoch') + local pkgbase + local arch + + for pkgbase in ${pkgs[@]}; do + for arch in ${arches[@]}; do + releasePackage extra ${pkgbase} ${arch} + done + done + + ../db-update + + for arch in ${arches[@]}; do + ../db-remove extra ${arch} pkg-simple-epoch + done + + ../cron-jobs/ftpdir-cleanup >/dev/null + + for arch in ${arches[@]}; do + local pkg1="pkg-simple-epoch-1:1-1-${arch}.pkg.tar.xz" + checkRemovedPackage extra 'pkg-simple-epoch' ${arch} + [ -f "${FTP_BASE}/${PKGPOOL}/${pkg1}" ] && fail "${PKGPOOL}/${pkg1} found" + [ -f "${FTP_BASE}/${repo}/os/${arch}/${pkg1}" ] && fail "${repo}/os/${arch}/${pkg1} found" + done +} + +testCleanupAnyPackages() { + local pkgs=('pkg-any-a' 'pkg-any-b') + local pkgbase + local arch='any' + + for pkgbase in ${pkgs[@]}; do + releasePackage extra ${pkgbase} any + done + + ../db-update + ../db-remove extra any pkg-any-a + ../cron-jobs/ftpdir-cleanup >/dev/null + + local pkg1='pkg-any-a-1-1-any.pkg.tar.xz' + checkRemovedAnyPackage extra 'pkg-any-a' + [ -f "${FTP_BASE}/${PKGPOOL}/${pkg1}" ] && fail "${PKGPOOL}/${pkg1} found" + [ -f "${FTP_BASE}/${repo}/os/${arch}/${pkg1}" ] && fail "${repo}/os/${arch}/${pkg1} found" + + local pkg2="pkg-any-b-1-1-${arch}.pkg.tar.xz" + checkAnyPackage extra ${pkg2} +} + +testCleanupSplitPackages() { + local arches=(`arches`) + local pkgs=('pkg-split-a' 'pkg-split-b') + local pkg + local pkgbase + local arch + + for pkgbase in ${pkgs[@]}; do + for arch in ${arches[@]}; do + releasePackage extra ${pkgbase} ${arch} + done + done + + ../db-update + + for arch in ${arches[@]}; do + ../db-remove extra ${arch} ${pkgs[0]} + done + + ../cron-jobs/ftpdir-cleanup >/dev/null + + for arch in ${arches[@]}; do + for pkg in "${pkgdir}/${pkgs[0]}"/*-${arch}${PKGEXT}; do + checkRemovedPackage extra ${pkgs[0]} ${arch} + [ -f "${FTP_BASE}/${PKGPOOL}/${pkg}" ] && fail "${PKGPOOL}/${pkg} found" + [ -f "${FTP_BASE}/${repo}/os/${arch}/${pkg}" ] && fail "${repo}/os/${arch}/${pkg} found" + done + + for pkg in "${pkgdir}/${pkgs[1]}"/*-${arch}${PKGEXT}; do + checkPackage extra ${pkg##*/} ${arch} + done + done +} + +. "${curdir}/../lib/shunit2" diff --git a/extra/lukeshu-xbs/test/test.d/packages.sh b/extra/lukeshu-xbs/test/test.d/packages.sh new file mode 100755 index 0000000..488cb15 --- /dev/null +++ b/extra/lukeshu-xbs/test/test.d/packages.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +curdir=$(readlink -e $(dirname $0)) +. "${curdir}/../lib/common.inc" + +testPackages() { + # TODO: namcap -r sodepends fails with i686 packages + find "${pkgdir}" -name "*${PKGEXT}" -exec namcap -e sodepends,pkgnameindesc {} + || fail 'namcap failed' +} + +. "${curdir}/../lib/shunit2" diff --git a/extra/lukeshu-xbs/test/test.d/signed-packages.sh b/extra/lukeshu-xbs/test/test.d/signed-packages.sh new file mode 100755 index 0000000..03566ef --- /dev/null +++ b/extra/lukeshu-xbs/test/test.d/signed-packages.sh @@ -0,0 +1,36 @@ +#!/bin/bash + +curdir=$(readlink -e $(dirname $0)) +. "${curdir}/../lib/common.inc" + +testAddSignedPackage() { + releasePackage extra 'pkg-simple-a' 'i686' + ../db-update || fail "db-update failed!" +} + +testAddUnsignedPackage() { + releasePackage extra 'pkg-simple-a' 'i686' + rm "${STAGING}"/extra/*.sig + ../db-update >/dev/null 2>&1 && fail "db-update should fail when a signature is missing!" +} + +testAddInvalidSignedPackage() { + local p + releasePackage extra 'pkg-simple-a' 'i686' + for p in "${STAGING}"/extra/*${PKGEXT}; do + unxz $p + xz -0 ${p%%.xz} + done + ../db-update >/dev/null 2>&1 && fail "db-update should fail when a signature is invalid!" +} + +testAddBrokenSignature() { + local s + releasePackage extra 'pkg-simple-a' 'i686' + for s in "${STAGING}"/extra/*.sig; do + echo 0 > $s + done + ../db-update >/dev/null 2>&1 && fail "db-update should fail when a signature is broken!" +} + +. "${curdir}/../lib/shunit2" diff --git a/extra/lukeshu-xbs/test/test.d/sourceballs.sh b/extra/lukeshu-xbs/test/test.d/sourceballs.sh new file mode 100755 index 0000000..472cb30 --- /dev/null +++ b/extra/lukeshu-xbs/test/test.d/sourceballs.sh @@ -0,0 +1,84 @@ +#!/bin/bash + +curdir=$(readlink -e $(dirname $0)) +. "${curdir}/../lib/common.inc" + +testSourceballs() { + local arches=(`arches`) + local pkgs=('pkg-simple-a' 'pkg-simple-b' 'pkg-simple-epoch') + local pkgbase + local arch + + for pkgbase in ${pkgs[@]}; do + for arch in ${arches[@]}; do + releasePackage extra ${pkgbase} ${arch} + done + done + ../db-update + + ../cron-jobs/sourceballs + for pkgbase in ${pkgs[@]}; do + [ ! -r ${FTP_BASE}/${SRCPOOL}/${pkgbase}-*${SRCEXT} ] && fail "source package not found!" + done +} + +testAnySourceballs() { + local pkgs=('pkg-any-a' 'pkg-any-b') + local pkgbase + + for pkgbase in ${pkgs[@]}; do + releasePackage extra ${pkgbase} any + done + ../db-update + + ../cron-jobs/sourceballs + for pkgbase in ${pkgs[@]}; do + [ ! -r ${FTP_BASE}/${SRCPOOL}/${pkgbase}-*${SRCEXT} ] && fail "source package not found!" + done +} + +testSplitSourceballs() { + local arches=(`arches`) + local pkgs=('pkg-split-a' 'pkg-split-b') + local pkg + local pkgbase + local arch + + for pkgbase in ${pkgs[@]}; do + for arch in ${arches[@]}; do + releasePackage extra ${pkgbase} ${arch} + done + done + + ../db-update + + ../cron-jobs/sourceballs + for pkgbase in ${pkgs[@]}; do + [ ! -r ${FTP_BASE}/${SRCPOOL}/${pkgbase}-*${SRCEXT} ] && fail "source package not found!" + done +} + +testSourceballsCleanup() { + local arches=(`arches`) + local pkgs=('pkg-simple-a' 'pkg-simple-b') + local pkgbase + local arch + + for pkgbase in ${pkgs[@]}; do + for arch in ${arches[@]}; do + releasePackage extra ${pkgbase} ${arch} + done + done + ../db-update + ../cron-jobs/sourceballs + + for arch in ${arches[@]}; do + ../db-remove extra ${arch} pkg-simple-a + done + + ../cron-jobs/sourceballs + [ -r ${FTP_BASE}/${SRCPOOL}/pkg-simple-a-*${SRCEXT} ] && fail "source package was not removed!" + [ ! -r ${FTP_BASE}/${SRCPOOL}/pkg-simple-b-*${SRCEXT} ] && fail "source package not found!" +} + +. "${curdir}/../lib/shunit2" diff --git a/extra/lukeshu-xbs/test/test.d/testing2x.sh b/extra/lukeshu-xbs/test/test.d/testing2x.sh new file mode 100755 index 0000000..0c2fa83 --- /dev/null +++ b/extra/lukeshu-xbs/test/test.d/testing2x.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +curdir=$(readlink -e $(dirname $0)) +. "${curdir}/../lib/common.inc" + +testTesting2xAnyPackage() { + releasePackage core pkg-any-a any + ../db-update + + pushd "${TMP}/svn-packages-copy/pkg-any-a/trunk/" >/dev/null + sed 's/pkgrel=1/pkgrel=2/g' -i PKGBUILD + arch_svn commit -q -m"update pkg to pkgrel=2" >/dev/null + sudo libremakepkg + mv pkg-any-a-1-2-any.pkg.tar.xz "${pkgdir}/pkg-any-a/" + popd >/dev/null + + releasePackage testing pkg-any-a any + ../db-update + rm -f "${pkgdir}/pkg-any-a/pkg-any-a-1-2-any.pkg.tar.xz" + + ../testing2x pkg-any-a + + checkAnyPackage core pkg-any-a-1-2-any.pkg.tar.xz any + checkRemovedAnyPackage testing pkg-any-a +} + +. "${curdir}/../lib/shunit2" diff --git a/extra/lukeshu-xbs/test/test_filter.py b/extra/lukeshu-xbs/test/test_filter.py new file mode 100644 index 0000000..d8006f9 --- /dev/null +++ b/extra/lukeshu-xbs/test/test_filter.py @@ -0,0 +1,196 @@ +# -*- encoding: utf-8 -*- +""" """ + +__author__ = "Joshua Ismael Haase Hernández " +__version__ = "$Revision: 1.1 $" +__date__ = "$Date: 2011/02/08 $" +__copyright__ = "Copyright (c) 2011 Joshua Ismael Haase Hernández" +__license__ = "GPL3+" + +from repm.config import * +from repm.filter import * +import unittest + +class pkginfo_from_file_KnownValues(unittest.TestCase): + # (filename, name, version, release, arch) + # filename is location + known=( + ("community-testing/os/i686/inputattach-1.24-3-i686.pkg.tar.xz","inputattach","1.24","3","i686"), + ("community-testing/os/i686/ngspice-22-1-i686.pkg.tar.xz","ngspice","22","1","i686"), + ("community-testing/os/i686/tmux-1.4-2-i686.pkg.tar.xz","tmux","1.4","2","i686"), + ("community-testing/os/i686/tor-0.2.1.29-2-i686.pkg.tar.xz","tor","0.2.1.29","2","i686"), + ("../../../pool/community/tor-0.2.1.29-2-i686.pkg.tar.xz","tor","0.2.1.29","2","i686"), + ("community-testing/os/x86_64/inputattach-1.24-3-x86_64.pkg.tar.xz","inputattach","1.24","3","x86_64"), + ("../../../pool/community/inputattach-1.24-3-x86_64.pkg.tar.xz","inputattach","1.24","3","x86_64"), + ("tor-0.2.1.29-2-x86_64.pkg.tar.xz","tor","0.2.1.29","2","x86_64"), + ) + + def generate_results(self, example_tuple, attr): + location, name, version, release, arch = example_tuple + return pkginfo_from_filename(location)[attr], locals()[attr] + + def testReturnPackageObject(self): + for i in self.known: + location, name, version, release, arch = i + self.assertIsInstance(pkginfo_from_filename(location),Package) + + def testNames(self): + for i in self.known: + k,v = self.generate_results(example_tuple=i,attr="name") + self.assertEqual(k, v) + + def testVersions(self): + for i in self.known: + k,v = self.generate_results(example_tuple=i,attr="version") + self.assertEqual(k, v) + + def testArchs(self): + for i in self.known: + k,v = self.generate_results(example_tuple=i,attr="arch") + self.assertEqual(k, v) + + def testReleases(self): + for i in self.known: + k,v = self.generate_results(example_tuple=i,attr="release") + self.assertEqual(k, v) + + def testLocations(self): + for i in self.known: + k,v = self.generate_results(example_tuple=i,attr="location") + self.assertEqual(k, v) + +class pkginfo_from_file_BadInput(unittest.TestCase): + bad=("community-testing/os/i686/community-testing.db", + "community-testing/os/i686/community-testing.db.tar.gz", + "community-testing/os/i686/community-testing.db.tar.gz.old", + "community-testing/os/i686/community-testing.files", + "community-testing/os/i686/community-testing.files.tar.gz", + "community-testing/os/x86_64") + + def testBadInput(self): + for i in self.bad: + self.assertRaises(NonValidFile,pkginfo_from_filename,i) + +class pkginfoFromRsyncOutput(unittest.TestCase): + example_package_list=(Package(),Package(),Package()) + example_package_list[0].package_info={ "name" : "alex", + "version" : "2.3.4", + "release" : "1", + "arch" : "i686", + "license" : False, + "location": "community-staging/os/i686/alex-2.3.4-1-i686.pkg.tar.xz", + "depends" : False,} + example_package_list[1].package_info={ "name" : "any2dvd", + "version" : "0.34", + "release" : "4", + "arch" : "any", + "license" : False, + "location": "community/os/any/any2dvd-0.34-4-any.pkg.tar.xz", + "depends" : False,} + example_package_list[2].package_info={ "name" : "gmime22", + "version" : "2.2.26", + "release" : "1", + "arch" : "x86_64", + "license" : False, + "location": "community/os/x86_64/gmime22-2.2.26-1-x86_64.pkg.tar.xz", + "depends" : False,} + + try: + output_file = open("rsync_output_sample") + rsync_out= output_file.read() + output_file.close() + except IOError: print("There is no rsync_output_sample file") + + pkglist = pkginfo_from_rsync_output(rsync_out) + + def testOutputArePackages(self): + if not self.pkglist: + self.fail("not pkglist:" + str(self.pkglist)) + for pkg in self.pkglist: + self.assertIsInstance(pkg,Package) + + def testPackageInfo(self): + if not self.pkglist: + self.fail("Pkglist doesn't exist: " + str(self.pkglist)) + self.assertEqual(self.pkglist,self.example_package_list) + +class generateRsyncBlacklist(unittest.TestCase): + example_package_list=(Package(),Package(),Package()) + example_package_list[0].package_info={ "name" : "alex", + "version" : "2.3.4", + "release" : "1", + "arch" : "i686", + "license" : False, + "location": "community-staging/os/i686/alex-2.3.4-1-i686.pkg.tar.xz", + "depends" : False,} + example_package_list[1].package_info={ "name" : "any2dvd", + "version" : "0.34", + "release" : "4", + "arch" : "any", + "license" : False, + "location": "community/os/any/any2dvd-0.34-4-any.pkg.tar.xz", + "depends" : False,} + example_package_list[2].package_info={ "name" : "gmime22", + "version" : "2.2.26", + "release" : "1", + "arch" : "x86_64", + "license" : False, + "location": "community/os/x86_64/gmime22-2.2.26-1-x86_64.pkg.tar.xz", + "depends" : False,} + + def testListado(self): + self.assertEqual(listado("blacklist_sample"),["alex","gmime22"]) + + def testExcludeFiles(self): + a=rsyncBlacklist_from_blacklist(self.example_package_list, + listado("blacklist_sample"), + False) + b=[self.example_package_list[0]["location"],self.example_package_list[2]["location"]] + self.assertEqual(a,b) + +class pkginfo_from_descKnownValues(unittest.TestCase): + pkgsample=Package() + pkgsample.package_info={"name" : "binutils", + "version" : "2.21", + "release" : "4", + "arch" : "x86_64", + "license" : "GPL", + "location": "binutils-2.21-4-x86_64.pkg.tar.xz", + "depends" : False,} + fsock=open("desc") + pkggen=pkginfo_from_desc(fsock.read()) + fsock.close() + def testPkginfoFromDesc(self): + if self.pkggen is None: + self.fail("return value is None") + self.assertEqual(self.pkgsample,self.pkggen) + +class pkginfo_from_db(unittest.TestCase): + archdb = os.path.join("./workdir") + example_package_list=(Package(),Package(),Package()) + example_package_list[0].package_info={ "name" : "acl", + "version" : "2.2.49", + "release" : "2", + "arch" : "x86_64", + "license" : ("LGPL",), + "location": "acl-2.2.49-2-x86_64.pkg.tar.xz", + "depends" : ("attr>=2.4.41"),} + example_package_list[1].package_info={ "name" : "glibc", + "version" : "2.13", + "release" : "4", + "arch" : "x86_64", + "license" : ("GPL","LGPL"), + "location": "glibc-2.13-4-x86_64.pkg.tar.xz", + "depends" : ("linux-api-headers>=2.6.37","tzdata",),} + example_package_list[2].package_info={ "name" : "", + "version" : "2.2.26", + "release" : "1", + "arch" : "x86_64", + "license" : False, + "location": "", + "depends" : False,} + + +if __name__ == "__main__": + unittest.main() + -- cgit v1.2.3