summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorcoadde [Márcio Alexandre Silva Delgado] <coadde@parabola.nu>2015-08-26 03:40:56 -0300
committercoadde [Márcio Alexandre Silva Delgado] <coadde@parabola.nu>2015-08-26 03:40:56 -0300
commit50d2db5c271537d78ea0037fe285d081df38fa02 (patch)
treebcb8911f516a5561b3ec37cb0a010e603a494586
parent8eafe04ec5eaf917d9e2d58993604c454f1a26d1 (diff)
add copy from the master repo
-rw-r--r--extra/legacy/COPYING674
-rw-r--r--extra/legacy/TODO10
-rwxr-xr-xextra/legacy/abslibre129
-rwxr-xr-xextra/legacy/any-to-ours71
-rwxr-xr-xextra/legacy/check-package-libraries.py193
-rw-r--r--extra/legacy/config57
-rwxr-xr-xextra/legacy/create-repo21
-rwxr-xr-xextra/legacy/createrepos8
-rwxr-xr-xextra/legacy/cron-jobs/check_archlinux/check_packages.py508
-rwxr-xr-xextra/legacy/cron-jobs/check_archlinux/parse_pkgbuilds.sh153
-rwxr-xr-xextra/legacy/cron-jobs/devlist-mailer28
-rwxr-xr-xextra/legacy/cron-jobs/ftpdir-cleanup88
-rwxr-xr-xextra/legacy/cron-jobs/integrity-check32
-rwxr-xr-xextra/legacy/cron-jobs/make_repo_torrents70
-rwxr-xr-xextra/legacy/cron-jobs/repo-sanity-check57
-rwxr-xr-xextra/legacy/cron-jobs/sourceballs151
-rw-r--r--extra/legacy/cron-jobs/sourceballs.force4
-rw-r--r--extra/legacy/cron-jobs/sourceballs.skip29
-rwxr-xr-xextra/legacy/db-check-nonfree46
-rwxr-xr-xextra/legacy/db-cleanup69
-rw-r--r--extra/legacy/db-functions524
-rwxr-xr-xextra/legacy/db-libremessages83
-rwxr-xr-xextra/legacy/db-list-unsigned-packages38
-rwxr-xr-xextra/legacy/db-list-unsigned-packages.py96
-rwxr-xr-xextra/legacy/db-move106
-rwxr-xr-xextra/legacy/db-remove49
-rwxr-xr-xextra/legacy/db-repo-add41
-rwxr-xr-xextra/legacy/db-repo-remove37
-rwxr-xr-xextra/legacy/db-sync208
-rw-r--r--extra/legacy/db-sync.conf11
-rwxr-xr-xextra/legacy/db-update118
-rwxr-xr-xextra/legacy/list_nonfree_in_db.py28
-rwxr-xr-xextra/legacy/make_individual_torrent52
-rwxr-xr-xextra/legacy/mkrepo15
-rw-r--r--extra/legacy/test/__init__.py0
-rw-r--r--extra/legacy/test/blacklist_sample2
-rw-r--r--extra/legacy/test/core.db.tar.gzbin0 -> 1345 bytes
-rw-r--r--extra/legacy/test/depends4
-rw-r--r--extra/legacy/test/desc39
-rw-r--r--extra/legacy/test/lib/common.inc266
-rwxr-xr-xextra/legacy/test/lib/shunit21048
-rw-r--r--extra/legacy/test/packages/pkg-any-a/PKGBUILD12
-rw-r--r--extra/legacy/test/packages/pkg-any-b/PKGBUILD12
l---------extra/legacy/test/packages/pkg-simple-a/Makefile1
-rw-r--r--extra/legacy/test/packages/pkg-simple-a/PKGBUILD22
l---------extra/legacy/test/packages/pkg-simple-a/test.c1
l---------extra/legacy/test/packages/pkg-simple-b/Makefile1
-rw-r--r--extra/legacy/test/packages/pkg-simple-b/PKGBUILD22
l---------extra/legacy/test/packages/pkg-simple-b/test.c1
l---------extra/legacy/test/packages/pkg-simple-epoch/Makefile1
-rw-r--r--extra/legacy/test/packages/pkg-simple-epoch/PKGBUILD23
l---------extra/legacy/test/packages/pkg-simple-epoch/test.c1
l---------extra/legacy/test/packages/pkg-split-a/Makefile1
-rw-r--r--extra/legacy/test/packages/pkg-split-a/PKGBUILD28
l---------extra/legacy/test/packages/pkg-split-a/test.c1
l---------extra/legacy/test/packages/pkg-split-b/Makefile1
-rw-r--r--extra/legacy/test/packages/pkg-split-b/PKGBUILD29
l---------extra/legacy/test/packages/pkg-split-b/test.c1
-rw-r--r--extra/legacy/test/rsync_output_sample14
-rwxr-xr-xextra/legacy/test/runTest15
-rw-r--r--extra/legacy/test/src/Makefile5
-rw-r--r--extra/legacy/test/src/test.c7
-rwxr-xr-xextra/legacy/test/test.d/create-filelists.sh105
-rwxr-xr-xextra/legacy/test/test.d/db-move.sh122
-rwxr-xr-xextra/legacy/test/test.d/db-remove.sh77
-rwxr-xr-xextra/legacy/test/test.d/db-repo-add.sh54
-rwxr-xr-xextra/legacy/test/test.d/db-repo-remove.sh58
-rwxr-xr-xextra/legacy/test/test.d/db-update.sh165
-rwxr-xr-xextra/legacy/test/test.d/ftpdir-cleanup.sh121
-rwxr-xr-xextra/legacy/test/test.d/packages.sh11
-rwxr-xr-xextra/legacy/test/test.d/pool-transition.sh152
-rwxr-xr-xextra/legacy/test/test.d/signed-packages.sh13
-rwxr-xr-xextra/legacy/test/test.d/sourceballs.sh84
-rwxr-xr-xextra/legacy/test/test.d/testing2x.sh27
-rw-r--r--extra/legacy/test/test_filter.py196
75 files changed, 6547 insertions, 0 deletions
diff --git a/extra/legacy/COPYING b/extra/legacy/COPYING
new file mode 100644
index 0000000..94a9ed0
--- /dev/null
+++ b/extra/legacy/COPYING
@@ -0,0 +1,674 @@
+ GNU GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users. We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors. You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights. Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received. You must make sure that they, too, receive
+or can get the source code. And you must show them these terms so they
+know their rights.
+
+ Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+ For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software. For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+ Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so. This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software. The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable. Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products. If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+ Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary. To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Use with the GNU Affero General Public License.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+ <one line to give the program's name and a brief idea of what it does.>
+ Copyright (C) <year> <name of author>
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+ <program> Copyright (C) <year> <name of author>
+ This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, your program's commands
+might be different; for a GUI interface, you would use an "about box".
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU GPL, see
+<http://www.gnu.org/licenses/>.
+
+ The GNU General Public License does not permit incorporating your program
+into proprietary programs. If your program is a subroutine library, you
+may consider it more useful to permit linking proprietary applications with
+the library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License. But first, please read
+<http://www.gnu.org/philosophy/why-not-lgpl.html>.
diff --git a/extra/legacy/TODO b/extra/legacy/TODO
new file mode 100644
index 0000000..9dd4b52
--- /dev/null
+++ b/extra/legacy/TODO
@@ -0,0 +1,10 @@
+* Test Suite for clean_repo.py
+
+ - Review all repo
+ - Remove all blacklisted packages
+ - Get pending list right
+ - Extract licenses all right
+
+* Fix db-move
+
+ - Make it use abslibre
diff --git a/extra/legacy/abslibre b/extra/legacy/abslibre
new file mode 100755
index 0000000..6b21d24
--- /dev/null
+++ b/extra/legacy/abslibre
@@ -0,0 +1,129 @@
+#!/bin/bash
+
+set -e
+
+FTP_BASE=/srv/repo/main
+ABSLIBRE=/srv/abslibre
+ABSGIT=/srv/git/abslibre/abslibre.git
+# Remote
+# ABSGIT=http://projects.parabolagnulinux.org/abslibre.git
+BLACKLIST=/home/repo/blacklist/blacklist.txt
+SYNCARGS='-mrtv --no-motd --delete-after --no-p --no-o --no-g --quiet'
+BLFILE=/tmp/blacklist.txt
+
+# Variables from abs.conf
+ABSROOT="/srv/abs/"
+# DON'T CHANGE. WE NEED IT FOR ABSLIBRE
+SYNCSERVER="rsync.archlinux.org"
+ARCH="i686"
+MIRRORLIST="/etc/pacman.d/mirrorlist"
+REPOS=(core extra community testing community-testing !staging !community-staging)
+
+# Steps
+# * Sync abs
+# * Download blacklist.txt
+# * Sync abslibre from abs excluding from blacklist
+# * Create repo.abs.tar.gz tarballs
+
+function sync_abs() {
+ for ARCH in any i686 x86_64; do
+ rsync ${SYNCARGS} ${SYNCSERVER}::abs/${ARCH}/ ${ABSROOT}/${ARCH} || return $?
+ done
+
+ # fix some permissions
+ find "${ABSROOT}" -type d -print0 | xargs -0 chmod 755
+ find "${ABSROOT}" -type f -print0 | xargs -0 chmod 644
+}
+
+function get_blacklist() {
+ printf ":: Updating blacklist...\t"
+ cat "${BLACKLIST}" | cut -d':' -f1 | sort -u | \
+ sed "s/^/**\//" > ${BLFILE} || {
+ printf "[FAILED]\n"
+ return 1
+ }
+
+ # Prevent using an empty blacklist
+ [ $(wc -l ${BLFILE} | cut -d " " -f1) -eq 0 ] && return 1
+
+ printf "[OK]\n"
+}
+
+function sync_abs_libre() {
+
+ # Clone ABSLibre git repo
+ if [ -d /tmp/abslibre/.git ]; then
+ pushd /tmp/abslibre >/dev/null 2>&1
+ git pull
+ popd >/dev/null 2>&1
+ else
+ git clone "$ABSGIT" /tmp/abslibre
+ fi
+
+ # Sync from ABS and then sync from ABSLibre
+ printf ":: Syncing ABSLibre...\t"
+ (rsync ${SYNCARGS} --delete-excluded \
+ --exclude-from=${BLFILE} \
+ ${ABSROOT} \
+ ${ABSLIBRE} \
+ &&
+ for ARCH in i686 x86_64; do rsync -v -mrtq --no-motd --no-p --no-o --no-g --quiet --exclude=.git/ /tmp/abslibre/ ${ABSLIBRE}/${ARCH}/; done) || {
+ printf "[FAILED]\n"
+ return 1
+ }
+
+ # fix some permissions
+ find "${ABSLIBRE}" -type d -print0 | xargs -0 chmod 755
+ find "${ABSLIBRE}" -type f -print0 | xargs -0 chmod 644
+
+ printf "[OK]\n"
+}
+
+# This part is very hacky and particular to the current setup :P
+sync_pre_mips64el() {
+ pushd /home/fauno/Repos/abslibre-pre-mips64el >/dev/null
+
+ sudo -u fauno sh -c "
+ rsync ${SYNCARGS} \
+ --exclude=.git* \
+ --exclude=community-staging \
+ --exclude=community-testing \
+ --exclude=gnome-unstable \
+ --exclude=kde-unstable \
+ --exclude=multilib \
+ --exclude=multilib-testing \
+ --exclude=multilib-staging \
+ --exclude=staging \
+ --exclude=testing \
+ ${ABSLIBRE}/x86_64/ \
+ /home/fauno/Repos/abslibre-pre-mips64el/ &&
+ git add . &&
+ git commit -m \"$(date)\" -a
+ git push origin master
+ git gc
+ "
+}
+
+# Create .abs.tar.gz tarballs
+create_tarballs() {
+ for repo in ${ABSLIBRE}/{i686,x86_64}/*; do
+ baserepo=${repo##*/}
+ arch=$(basename $(dirname $repo))
+
+ # Remove the old one
+ mkdir -p $FTP_BASE/$baserepo/os/$arch/
+ rm -fv $FTP_BASE/$baserepo/os/$arch/$baserepo.abs.tar.gz
+ # Create a new one joining arch and any
+ # Remove the first part of the path (it could be $repo but any isn't hit)
+ bsdtar -czf $FTP_BASE/$baserepo/os/$arch/$baserepo.abs.tar.gz \
+ -s ":${ABSLIBRE}/[a-z0-9_]\+/[a-z]\+::" \
+ $repo/* ${ABSLIBRE}/any/${baserepo}/*
+
+ done
+}
+
+sync_abs
+get_blacklist
+sync_abs_libre
+#sync_pre_mips64el
+create_tarballs
diff --git a/extra/legacy/any-to-ours b/extra/legacy/any-to-ours
new file mode 100755
index 0000000..a901d54
--- /dev/null
+++ b/extra/legacy/any-to-ours
@@ -0,0 +1,71 @@
+#!/bin/bash
+# Releases 'any' packages from Arch arches to ours
+
+trap_exit() {
+ echo
+ error "$@"
+ exit 1
+}
+
+source "$(dirname "$(readlink -e "$0")")/config"
+source "$(dirname "$(readlink -e "$0")")/db-libremessages"
+
+# From makepkg
+set -E
+
+trap 'trap_exit "$(gettext "TERM signal caught. Exiting...")"' TERM HUP QUIT
+trap 'trap_exit "$(gettext "Aborted by user! Exiting...")"' INT
+trap 'trap_exit "$(gettext "An unknown error has occurred. Exiting...")"' ERR
+
+# The architecture to compare with
+BASEARCH='x86_64'
+
+# Traverse all Arch repos
+for _repo in "${ARCHREPOS[@]}"; do
+ msg "Processing %s..." "${_repo}"
+
+ # Find 'any' packages
+ # This is hardcoded but it could release other arches...
+ PKGS=($(find "${FTP_BASE}/${_repo}/os/${BASEARCH}/" \
+ -iname '*-any.pkg.tar.?z' \
+ -printf "%f "))
+
+ if [ ${#PKGS[@]} -eq 0 ]; then
+ msg2 "No '%s' packages here" any
+ continue
+ fi
+
+ for _arch in "${OURARCHES[@]}"; do
+ msg2 "Syncing %s..." "${_arch}"
+
+ # Sync 'any' only and extract the synced packages
+ SYNCED=($(
+ rsync -av \
+ --include='*-any.pkg.tar.?z' \
+ --include='*-any.pkg.tar.?z.sig' \
+ --exclude='*' \
+ "${FTP_BASE}/${_repo}/os/${BASEARCH}/" \
+ "${FTP_BASE}/${_repo}/os/${_arch}/" 2>&1 | \
+ grep 'any\.pkg\.tar\..z$' | \
+ cut -d ' ' -f 1 ))
+
+ if [ ${#SYNCED[@]} -eq 0 ]; then
+ msg2 "Already synced (or error happened)"
+ continue
+ fi
+
+ msg2 "Synced %d packages: %s" "${#SYNCED[@]}" "${SYNCED[*]}"
+
+ msg2 "Adding to db..."
+
+ pushd "${FTP_BASE}/${_repo}/os/${_arch}/" >/dev/null
+
+ # Add the packages to the db
+ repo-add "${_repo}${DBEXT}" "${SYNCED[@]}"
+
+ popd >/dev/null
+
+ # Avoid mixups
+ unset SYNCED PKGS
+ done
+done
diff --git a/extra/legacy/check-package-libraries.py b/extra/legacy/check-package-libraries.py
new file mode 100755
index 0000000..bc2349b
--- /dev/null
+++ b/extra/legacy/check-package-libraries.py
@@ -0,0 +1,193 @@
+#!/usr/bin/env python3
+# Copyright (C) 2012 Michał Masłowski <mtjm@mtjm.eu>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+
+"""
+Check which libraries are provided or required by a package, store
+this in a database, update and list broken packages.
+
+Dependencies:
+
+- Python 3.2 or later with SQLite 3 support
+
+- ``bsdtar``
+
+- ``readelf``
+"""
+
+
+import os.path
+import re
+import sqlite3
+import subprocess
+import tempfile
+
+
+#: Regexp matching an interesting dynamic entry.
+_DYNAMIC = re.compile(r"^\s*[0-9a-fx]+"
+ "\s*\((NEEDED|SONAME)\)[^:]*:\s*\[(.+)\]$")
+
+
+def make_db(path):
+ """Make a new, empty, library database at *path*."""
+ con = sqlite3.connect(path)
+ con.executescript("""
+create table provided(
+ library varchar not null,
+ package varchar not null
+);
+create table used(
+ library varchar not null,
+ package varchar not null
+);
+""")
+ con.close()
+
+
+def begin(database):
+ """Connect to *database* and start a transaction."""
+ con = sqlite3.connect(database)
+ con.execute("begin exclusive")
+ return con
+
+
+def add_provided(con, package, libraries):
+ """Write that *package* provides *libraries*."""
+ for library in libraries:
+ con.execute("insert into provided (package, library) values (?,?)",
+ (package, library))
+
+
+def add_used(con, package, libraries):
+ """Write that *package* uses *libraries*."""
+ for library in libraries:
+ con.execute("insert into used (package, library) values (?,?)",
+ (package, library))
+
+
+def remove_package(con, package):
+ """Remove all entries for a package."""
+ con.execute("delete from provided where package=?", (package,))
+ con.execute("delete from used where package=?", (package,))
+
+
+def add_package(con, package):
+ """Add entries from a named *package*."""
+ # Extract to a temporary directory. This could be done more
+ # efficiently, since there is no need to store more than one file
+ # at once.
+ with tempfile.TemporaryDirectory() as temp:
+ tar = subprocess.Popen(("bsdtar", "xf", package, "-C", temp))
+ tar.communicate()
+ with open(os.path.join(temp, ".PKGINFO")) as pkginfo:
+ for line in pkginfo:
+ if line.startswith("pkgname ="):
+ pkgname = line[len("pkgname ="):].strip()
+ break
+ # Don't list previously removed libraries.
+ remove_package(con, pkgname)
+ provided = set()
+ used = set()
+ # Search for ELFs.
+ for dirname, dirnames, filenames in os.walk(temp):
+ assert dirnames is not None # unused, avoid pylint warning
+ for file_name in filenames:
+ path = os.path.join(dirname, file_name)
+ with open(path, "rb") as file_object:
+ if file_object.read(4) != b"\177ELF":
+ continue
+ readelf = subprocess.Popen(("readelf", "-d", path),
+ stdout=subprocess.PIPE)
+ for line in readelf.communicate()[0].split(b"\n"):
+ match = _DYNAMIC.match(line.decode("ascii"))
+ if match:
+ if match.group(1) == "SONAME":
+ provided.add(match.group(2))
+ elif match.group(1) == "NEEDED":
+ used.add(match.group(2))
+ else:
+ raise AssertionError("unknown entry type "
+ + match.group(1))
+ add_provided(con, pkgname, provided)
+ add_used(con, pkgname, used)
+
+
+def init(arguments):
+ """Initialize."""
+ make_db(arguments.database)
+
+
+def add(arguments):
+ """Add packages."""
+ con = begin(arguments.database)
+ for package in arguments.packages:
+ add_package(con, package)
+ con.commit()
+ con.close()
+
+
+def remove(arguments):
+ """Remove packages."""
+ con = begin(arguments.database)
+ for package in arguments.packages:
+ remove_package(con, package)
+ con.commit()
+ con.close()
+
+
+def check(arguments):
+ """List broken packages."""
+ con = begin(arguments.database)
+ available = set(row[0] for row
+ in con.execute("select library from provided"))
+ for package, library in con.execute("select package, library from used"):
+ if library not in available:
+ print(package, "needs", library)
+ con.close()
+
+
+def main():
+ """Get arguments and run the command."""
+ from argparse import ArgumentParser
+ parser = ArgumentParser(prog="check-package-libraries.py",
+ description="Check packages for "
+ "provided/needed libraries")
+ parser.add_argument("-d", "--database", type=str,
+ help="Database file to use",
+ default="package-libraries.sqlite")
+ subparsers = parser.add_subparsers()
+ subparser = subparsers.add_parser(name="init",
+ help="initialize the database")
+ subparser.set_defaults(command=init)
+ subparser = subparsers.add_parser(name="add",
+ help="add packages to database")
+ subparser.add_argument("packages", nargs="+", type=str,
+ help="package files to add")
+ subparser.set_defaults(command=add)
+ subparser = subparsers.add_parser(name="remove",
+ help="remove packages from database")
+ subparser.add_argument("packages", nargs="+", type=str,
+ help="package names to remove")
+ subparser.set_defaults(command=remove)
+ subparser = subparsers.add_parser(name="check",
+ help="list broken packages")
+ subparser.set_defaults(command=check)
+ arguments = parser.parse_args()
+ arguments.command(arguments)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/extra/legacy/config b/extra/legacy/config
new file mode 100644
index 0000000..be502cb
--- /dev/null
+++ b/extra/legacy/config
@@ -0,0 +1,57 @@
+#!/hint/bash
+
+FTP_BASE="/srv/repo/main"
+
+# Repos from Arch
+ARCHREPOS=('core' 'testing' 'extra' 'community' 'multilib' 'multilib-testing')
+# Official Parabola repos
+OURREPOS=('libre' 'libre-testing' 'libre-multilib' 'libre-multilib-testing')
+# User repos
+USERREPOS=('~smv' '~xihh' '~brendan' '~lukeshu' '~emulatorman' '~aurelien' '~jorginho' '~coadde' '~drtan')
+# Community project repos
+PROJREPOS=('nonsystemd' 'nonsystemd-testing' 'nonprism' 'nonprism-testing' 'pcr' 'kernels' 'cross' 'java')
+# Remote repos
+PKGREPOS=("${ARCHREPOS[@]}" "${OURREPOS[@]}" "${USERREPOS[@]}" "${PROJREPOS[@]}")
+PKGPOOL='pool/parabola'
+SRCPOOL='sources/parabola'
+
+# Directories where packages are shared between repos
+# *relative to FTP_BASE*
+ARCHPKGPOOLS=(pool/{packages,community})
+OURPKGPOOLS=(pool/parabola)
+PKGPOOLS=(${OURPKGPOOLS[@]} ${ARCHPKGPOOLS[@]})
+# Directories where sources are stored
+ARCHSRCPOOLS=(sources/{packages,community})
+OURPKGPOOLS=(sources/parabola)
+SRCPOOLS=(${OURSRCPOOLS[@]} ${ARCHSRCPOOLS[@]})
+
+CLEANUP_DESTDIR="$FTP_BASE/old/packages"
+CLEANUP_DRYRUN=false
+# Time in days to keep moved packages
+CLEANUP_KEEP=30
+
+SOURCE_CLEANUP_DESTDIR="$FTP_BASE/old/sources"
+SOURCE_CLEANUP_DRYRUN=true
+# Time in days to keep moved sourcepackages
+SOURCE_CLEANUP_KEEP=30
+
+REQUIRE_SIGNATURE=true
+
+LOCK_DELAY=10
+LOCK_TIMEOUT=300
+
+[ -n "${STAGING:-}" ] || STAGING="$HOME/staging/unknown/staging"
+TMPDIR="/tmp"
+ARCHARCHES=(i686 x86_64)
+OURARCHES=(armv7h)
+ARCHES=(${ARCHARCHES[@]} ${OURARCHES[@]})
+DBEXT=".db.tar.gz"
+FILESEXT=".files.tar.gz"
+PKGEXT=".pkg.tar.?z"
+SRCEXT=".src.tar.gz"
+
+MAKEPKGCONF="~/.makepkg.conf"
+BLACKLIST_FILE="$HOME/blacklist/blacklist.txt"
+
+# parabolaweb root
+WEB_DIR=/srv/http/parabolagnulinux.org/web
diff --git a/extra/legacy/create-repo b/extra/legacy/create-repo
new file mode 100755
index 0000000..3feb098
--- /dev/null
+++ b/extra/legacy/create-repo
@@ -0,0 +1,21 @@
+#!/bin/bash
+# Creates repository structure
+
+. "$(dirname "$(readlink -e "$0")")/config"
+. "$(dirname "$(readlink -e "$0")")/db-functions"
+
+if [ $# -eq 0 ]; then
+ msg "Usage: %s repo1 [repo2 ... repoX]" "${0##*/}"
+ exit 1
+fi
+
+msg "Creating repos..."
+for _repo in "$@"; do
+ msg2 "Creating [%s]" "${_repo}"
+ for _arch in "${ARCHES[@]}"; do
+ mkdir -p "${FTP_BASE}/${_repo}/os/${_arch}" || \
+ error "Failed creating %s dir" "${_arch}"
+ done
+done
+
+msg "Don't forget to add them to the PKGREPOS array on %s" "$(dirname "$(readlink -e "$0")")/config"
diff --git a/extra/legacy/createrepos b/extra/legacy/createrepos
new file mode 100755
index 0000000..8da2455
--- /dev/null
+++ b/extra/legacy/createrepos
@@ -0,0 +1,8 @@
+#!/bin/bash
+# Creates the repo structure defined in config
+
+source "$(dirname "$(readlink -e "$0")")/config"
+
+mkdir -p -- "${FTP_BASE}"/{"${PKGPOOL}","${SRCPOOL}"} "${CLEANUP_DESTDIR}" "${SOURCE_CLEANUP_DESTDIR}" "${STAGING}"
+
+"$(dirname "$(readlink -e "$0")")/create-repo" "${PKGREPOS[@]}"
diff --git a/extra/legacy/cron-jobs/check_archlinux/check_packages.py b/extra/legacy/cron-jobs/check_archlinux/check_packages.py
new file mode 100755
index 0000000..ac0194f
--- /dev/null
+++ b/extra/legacy/cron-jobs/check_archlinux/check_packages.py
@@ -0,0 +1,508 @@
+#!/usr/bin/env python2
+#
+# check_archlinux.py
+#
+# Original script by Scott Horowitz <stonecrest@gmail.com>
+# Rewritten by Xavier Chantry <shiningxc@gmail.com>
+#
+# This script currently checks for a number of issues in your ABS tree:
+# 1. Directories with missing PKGBUILDS
+# 2. Invalid PKGBUILDs (bash syntax error for instance)
+# 3. PKGBUILD names that don't match their directory
+# 4. Duplicate PKGBUILDs
+# 5. Valid arch's in PKGBUILDS
+# 6. Missing (make-)dependencies
+# 7. Hierarchy of repos (e.g., that a core package doesn't depend on
+# a non-core package)
+# 8. Circular dependencies
+
+import os,re,commands,getopt,sys,tarfile
+import pdb
+
+import ctypes
+_alpm = ctypes.cdll.LoadLibrary("libalpm.so")
+
+DBEXT='.db.tar.gz'
+
+packages = {} # pkgname : PacmanPackage
+repopkgs = {} # pkgname : PacmanPackage
+provisions = {} # provision : PacmanPackage
+pkgdeps,makepkgdeps = {},{} # PacmanPackage : list of the PacmanPackage dependencies
+invalid_pkgbuilds = []
+missing_pkgbuilds = []
+dups = []
+
+dbonly = []
+absonly = []
+
+mismatches = []
+missing_deps = []
+missing_makedeps = []
+invalid_archs = []
+dep_hierarchy = []
+makedep_hierarchy = []
+circular_deps = [] # pkgname>dep1>dep2>...>pkgname
+checked_deps = []
+
+class PacmanPackage:
+ def __init__(self):
+ self.name,self.version = "",""
+ self.base = ""
+ self.path,self.repo = "",""
+ self.deps,self.makedeps = [],[]
+ self.provides,self.conflicts = [],[]
+ self.archs = []
+
+class Depend:
+ def __init__(self,name,version,mod):
+ self.name = name
+ self.version = version
+ self.mod = mod
+
+def parse_pkgbuilds(repos,arch):
+ for absroot in absroots:
+ for repo in repos:
+ cmd = os.path.dirname(os.path.realpath(sys.argv[0])) + '/parse_pkgbuilds.sh '
+ cmd += arch + ' ' + absroot + '/' + repo
+ (status,output) = commands.getstatusoutput(cmd)
+ if status != 0:
+ print "Error : failed to run '%s'" % cmd
+ sys.exit()
+ parse_data(repo,output)
+
+def parse_data(repo,data):
+ attrname = None
+
+ for line in data.split('\n'):
+ if line.startswith('%'):
+ attrname = line.strip('%').lower()
+ elif line.strip() == '':
+ attrname = None
+ elif attrname == "invalid":
+ if repo in repos:
+ invalid_pkgbuilds.append(line)
+ elif attrname == "missing":
+ if repo in repos:
+ missing_pkgbuilds.append(line)
+ elif attrname == "name":
+ pkg = PacmanPackage()
+ pkg.name = line
+ pkg.repo = repo
+ dup = None
+ if pkg.name in packages:
+ dup = packages[pkg.name]
+ else:
+ packages[pkg.name] = pkg
+ elif attrname == "base":
+ pkg.base = line
+ elif attrname == "version":
+ pkg.version = line
+ elif attrname == "path":
+ pkg.path = line
+ if dup != None and (pkg.repo in repos or dup.repo in repos):
+ dups.append(pkg.path + " vs. " + dup.path)
+ elif attrname == "arch":
+ pkg.archs.append(line)
+ elif attrname == "depends":
+ pkg.deps.append(line)
+ elif attrname == "makedepends":
+ pkg.makedeps.append(line)
+ elif attrname == "conflicts":
+ pkg.conflicts.append(line)
+ elif attrname == "provides":
+ pkg.provides.append(line)
+
+def parse_dbs(repos,arch):
+ dbpkgs = {}
+ for repo in repos:
+ pkgs = set([])
+ db = tarfile.open(os.path.join(repodir,repo,'os',arch,repo + DBEXT))
+ for line in db.getnames():
+ if not '/' in line:
+ pkgs.add(line.rsplit('-',2)[0])
+ dbpkgs[repo] = pkgs
+ return(dbpkgs)
+
+def splitdep(dep):
+ name = dep
+ version = ""
+ mod = ""
+ for char in (">=", "<=", "=", ">", "<"):
+ pos = dep.find(char)
+ if pos > -1:
+ name = dep[:pos]
+ version = dep[pos:].replace(char, "")
+ mod = char
+ break
+ return Depend(name,version,mod)
+
+def splitprov(prov):
+ name = prov
+ version = ""
+ pos = prov.find("=")
+ if pos > -1:
+ name = prov[:pos]
+ version = prov[pos:].replace("=", "")
+ return (name,version)
+
+def vercmp(v1,mod,v2):
+ """
+ >>> vercmp("1.0", "<=", "2.0")
+ True
+ >>> vercmp("1:1.0", ">", "2.0")
+ True
+ >>> vercmp("1.0.2", ">=", "2.1.0")
+ False
+ """
+ s1 = ctypes.c_char_p(v1)
+ s2 = ctypes.c_char_p(v2)
+ res = _alpm.alpm_pkg_vercmp(s1,s2)
+ if res == 0:
+ return (mod.find("=") > -1)
+ elif res < 0:
+ return (mod.find("<") > -1)
+ elif res > 0:
+ return (mod.find(">") > -1)
+ return False
+
+
+def depcmp(name,version,dep):
+ if name != dep.name:
+ return False
+ if dep.version == "" or dep.mod == "":
+ return True
+ if version == "":
+ return False
+ return vercmp(version,dep.mod,dep.version)
+
+def provcmp(pkg,dep):
+ for prov in pkg.provides:
+ (provname,provver) = splitprov(prov)
+ if depcmp(provname,provver,dep):
+ return True
+ return False
+
+def verify_dep(dep):
+ dep = splitdep(dep)
+ if dep.name in packages:
+ pkg = packages[dep.name]
+ if depcmp(pkg.name,pkg.version,dep):
+ return [pkg]
+ if dep.name in provisions:
+ provlist = provisions[dep.name]
+ results = []
+ for prov in provlist:
+ if provcmp(prov,dep):
+ results.append(prov)
+ return results
+ return []
+
+def verify_deps(name,repo,deps):
+ pkg_deps = []
+ missdeps = []
+ hierarchy = []
+ for dep in deps:
+ pkglist = verify_dep(dep)
+ if pkglist == []:
+ missdeps.append(repo + "/" + name + " --> '" + dep + "'")
+ else:
+ valid_repos = get_repo_hierarchy(repo)
+ pkgdep = None
+ for pkg in pkglist:
+ if pkg.repo in valid_repos:
+ pkgdep = pkg
+ break
+ if not pkgdep:
+ pkgdep = pkglist[0]
+ hierarchy.append((repo,name,pkgdep))
+
+ pkg_deps.append(pkgdep)
+
+ return (pkg_deps,missdeps,hierarchy)
+
+def compute_deplist(pkg):
+ list = []
+ stack = [pkg]
+ while stack != []:
+ dep = stack.pop()
+ if dep in pkgdeps:
+ for dep2 in pkgdeps[dep]:
+ if dep2 not in list:
+ list.append(dep2)
+ stack.append(dep2)
+ if dep in makepkgdeps:
+ for dep2 in makepkgdeps[dep]:
+ if dep2 not in list:
+ list.append(dep2)
+ stack.append(dep2)
+ return list
+
+def check_hierarchy(deph):
+ hierarchy = []
+ for (repo,name,pkgdep) in deph:
+ deplist = compute_deplist(pkgdep)
+ valid_repos = get_repo_hierarchy(repo)
+ extdeps = []
+ for dep in deplist:
+ if dep.repo not in valid_repos:
+ extdeps.append(dep.name)
+ string = repo + "/" + name + " depends on " + pkgdep.repo + "/" + pkgdep.name + " ("
+ string += "%s extra (make)deps to pull" % len(extdeps)
+ if 0 < len(extdeps) < 10:
+ string += " : " + ' '.join(extdeps)
+ string += ")"
+ hierarchy.append(string)
+ return hierarchy
+
+def get_repo_hierarchy(repo):
+ repo_hierarchy = {'core': ['core'], \
+ 'extra': ['core', 'extra'], \
+ 'community': ['core', 'extra', 'community'], \
+ 'multilib': ['core', 'extra', 'community', 'multilib'] }
+ if repo in repo_hierarchy:
+ return repo_hierarchy[repo]
+ else:
+ return ['core','extra','community']
+
+def verify_archs(name,repo,archs):
+ valid_archs = ['any', 'i686', 'x86_64']
+ invalid_archs = []
+ for arch in archs:
+ if arch not in valid_archs:
+ invalid_archs.append(repo + "/" + name + " --> " + arch)
+ return invalid_archs
+
+def find_scc(packages):
+ # reset all variables
+ global index,S,pkgindex,pkglowlink
+ index = 0
+ S = []
+ pkgindex = {}
+ pkglowlink = {}
+ cycles = []
+ for pkg in packages:
+ tarjan(pkg)
+
+def tarjan(pkg):
+ global index,S,pkgindex,pkglowlink,cycles
+ pkgindex[pkg] = index
+ pkglowlink[pkg] = index
+ index += 1
+ checked_deps.append(pkg)
+ S.append(pkg)
+ deps = []
+ if pkg in pkgdeps:
+ deps = pkgdeps[pkg]
+ for dep in deps:
+ if dep not in pkgindex:
+ tarjan(dep)
+ pkglowlink[pkg] = min(pkglowlink[pkg],pkglowlink[dep])
+ elif dep in S:
+ pkglowlink[pkg] = min(pkglowlink[pkg],pkgindex[dep])
+ if pkglowlink[pkg] == pkgindex[pkg]:
+ dep = S.pop()
+ if pkg == dep:
+ return
+ path = pkg.name
+ while pkg != dep:
+ path = dep.repo + "/" + dep.name + ">" + path
+ dep = S.pop()
+ path = dep.name + ">" + path
+ if pkg.repo in repos:
+ circular_deps.append(path)
+
+def print_heading(heading):
+ print ""
+ print "=" * (len(heading) + 4)
+ print "= " + heading + " ="
+ print "=" * (len(heading) + 4)
+
+def print_subheading(subheading):
+ print ""
+ print subheading
+ print "-" * (len(subheading) + 2)
+
+def print_missdeps(pkgname,missdeps) :
+ for d in missdeps:
+ print pkgname + " : " + d
+
+def print_result(list, subheading):
+ if len(list) > 0:
+ list.sort()
+ print_subheading(subheading)
+ for item in list:
+ print item
+
+def print_results():
+ print_result(missing_pkgbuilds, "Missing PKGBUILDs")
+ print_result(invalid_pkgbuilds, "Invalid PKGBUILDs")
+ print_result(mismatches, "Mismatched Pkgnames")
+ print_result(dups, "Duplicate PKGBUILDs")
+ print_result(invalid_archs, "Invalid Archs")
+ print_result(missing_deps, "Missing Dependencies")
+ print_result(missing_makedeps, "Missing Makedepends")
+ print_result(dep_hierarchy, "Repo Hierarchy for Dependencies")
+ print_result(makedep_hierarchy, "Repo Hierarchy for Makedepends")
+ print_result(circular_deps, "Circular Dependencies")
+ print_result(dbonly, "Packages found in db, but not in tree")
+ print_result(absonly,"Packages found in tree, but not in db")
+ print_subheading("Summary")
+ print "Missing PKGBUILDs: ", len(missing_pkgbuilds)
+ print "Invalid PKGBUILDs: ", len(invalid_pkgbuilds)
+ print "Mismatching PKGBUILD names: ", len(mismatches)
+ print "Duplicate PKGBUILDs: ", len(dups)
+ print "Invalid archs: ", len(invalid_archs)
+ print "Missing (make)dependencies: ", len(missing_deps)+len(missing_makedeps)
+ print "Repo hierarchy problems: ", len(dep_hierarchy)+len(makedep_hierarchy)
+ print "Circular dependencies: ", len(circular_deps)
+ print "In db, but not in tree: ", len(dbonly)
+ print "In tree, but not in db: ", len(absonly)
+ print ""
+
+def print_usage():
+ print ""
+ print "Usage: ./check_packages.py [OPTION]"
+ print ""
+ print "Options:"
+ print " --abs-tree=<path[,path]> Check the specified tree(s) (default : /var/abs)"
+ print " --repos=<r1,r2,...> Check the specified repos (default : core,extra)"
+ print " --arch=<i686|x86_64> Check the specified arch (default : i686)"
+ print " --repo-dir=<path> Check the dbs at the specified path (default : /srv/ftp)"
+ print " -h, --help Show this help and exit"
+ print ""
+ print "Examples:"
+ print "\n Check core and extra in existing abs tree:"
+ print " ./check_packages.py --abs-tree=/var/abs --repos=core,extra --arch=i686"
+ print "\n Check community:"
+ print " ./check_packages.py --abs-tree=/var/abs --repos=community --arch=i686"
+ print ""
+
+if __name__ == "__main__":
+ ## Default path to the abs root directory
+ absroots = ["/var/abs"]
+ ## Default list of repos to check
+ repos = ['core', 'extra']
+ ## Default arch
+ arch = "i686"
+ ## Default repodir
+ repodir = "/srv/ftp"
+
+ try:
+ opts, args = getopt.getopt(sys.argv[1:], "", ["abs-tree=", "repos=",
+ "arch=", "repo-dir="])
+ except getopt.GetoptError:
+ print_usage()
+ sys.exit()
+ if opts != []:
+ for o, a in opts:
+ if o in ("--abs-tree"):
+ absroots = a.split(',')
+ elif o in ("--repos"):
+ repos = a.split(",")
+ elif o in ("--arch"):
+ arch = a
+ elif o in ("--repo-dir"):
+ repodir = a
+ else:
+ print_usage()
+ sys.exit()
+ if args != []:
+ print_usage()
+ sys.exit()
+
+ for absroot in absroots:
+ if not os.path.isdir(absroot):
+ print "Error : the abs tree " + absroot + " does not exist"
+ sys.exit()
+ for repo in repos:
+ repopath = absroot + "/" + repo
+ if not os.path.isdir(repopath):
+ print("Warning : the repository " + repo + " does not exist in " + absroot)
+
+ if not os.path.isdir(repodir):
+ print "Error: the repository directory %s does not exist" % repodir
+ sys.exit()
+ for repo in repos:
+ path = os.path.join(repodir,repo,'os',arch,repo + DBEXT)
+ if not os.path.isfile(path):
+ print "Error : repo DB %s : File not found" % path
+ sys.exit()
+ if not tarfile.is_tarfile(path):
+ print "Error : Cant open repo DB %s, not a valid tar file" % path
+ sys.exit()
+ # repos which need to be loaded
+ loadrepos = set([])
+ for repo in repos:
+ loadrepos = loadrepos | set(get_repo_hierarchy(repo))
+
+ print_heading("Integrity Check " + arch + " of " + ",".join(repos))
+ print("\nPerforming integrity checks...")
+
+ print("==> parsing pkgbuilds")
+ parse_pkgbuilds(loadrepos,arch)
+
+ # fill provisions
+ for name,pkg in packages.iteritems():
+ for prov in pkg.provides:
+ provname=prov.split("=")[0]
+ if provname not in provisions:
+ provisions[provname] = []
+ provisions[provname].append(pkg)
+
+ # fill repopkgs
+ for name,pkg in packages.iteritems():
+ if pkg.repo in repos:
+ repopkgs[name] = pkg
+
+ print("==> parsing db files")
+ dbpkgs = parse_dbs(repos,arch)
+
+ print("==> checking mismatches")
+ for name,pkg in repopkgs.iteritems():
+ pkgdirname = pkg.path.split("/")[-1]
+ if name != pkgdirname and pkg.base != pkgdirname:
+ mismatches.append(name + " vs. " + pkg.path)
+
+ print("==> checking archs")
+ for name,pkg in repopkgs.iteritems():
+ archs = verify_archs(name,pkg.repo,pkg.archs)
+ invalid_archs.extend(archs)
+
+ deph,makedeph = [],[]
+
+ print("==> checking dependencies")
+ for name,pkg in repopkgs.iteritems():
+ (deps,missdeps,hierarchy) = verify_deps(name,pkg.repo,pkg.deps)
+ pkgdeps[pkg] = deps
+ missing_deps.extend(missdeps)
+ deph.extend(hierarchy)
+
+ print("==> checking makedepends")
+ for name,pkg in repopkgs.iteritems():
+ (makedeps,missdeps,hierarchy) = verify_deps(name,pkg.repo,pkg.makedeps)
+ makepkgdeps[pkg] = makedeps
+ missing_makedeps.extend(missdeps)
+ makedeph.extend(hierarchy)
+
+ print("==> checking hierarchy")
+ dep_hierarchy = check_hierarchy(deph)
+ makedep_hierarchy = check_hierarchy(makedeph)
+
+ print("==> checking for circular dependencies")
+ # make sure pkgdeps is filled for every package
+ for name,pkg in packages.iteritems():
+ if pkg not in pkgdeps:
+ (deps,missdeps,_) = verify_deps(name,pkg.repo,pkg.deps)
+ pkgdeps[pkg] = deps
+ find_scc(repopkgs.values())
+
+ print("==> checking for differences between db files and pkgbuilds")
+ for repo in repos:
+ for pkg in dbpkgs[repo]:
+ if not (pkg in repopkgs and repopkgs[pkg].repo == repo):
+ dbonly.append("%s/%s" % (repo,pkg))
+ for name,pkg in repopkgs.iteritems():
+ if not name in dbpkgs[pkg.repo]:
+ absonly.append("%s/%s" % (pkg.repo,name))
+
+ print_results()
diff --git a/extra/legacy/cron-jobs/check_archlinux/parse_pkgbuilds.sh b/extra/legacy/cron-jobs/check_archlinux/parse_pkgbuilds.sh
new file mode 100755
index 0000000..b857ac8
--- /dev/null
+++ b/extra/legacy/cron-jobs/check_archlinux/parse_pkgbuilds.sh
@@ -0,0 +1,153 @@
+#!/bin/bash
+
+# Usage : parse_pkgbuilds.sh arch <pkgbuilds_dir1,dir2,...>
+# Example : parse_pkgbuilds.sh i686 /var/abs/core /var/abs/extra
+
+exit() { return; }
+
+splitpkg_overrides=('depends' 'optdepends' 'provides' 'conflicts')
+variables=('pkgname' 'pkgbase' 'epoch' 'pkgver' 'pkgrel' 'makedepends' 'arch' "${splitpkg_overrides[@]}")
+readonly -a variables splitpkg_overrides
+
+backup_package_variables() {
+ for var in "${splitpkg_overrides[@]}"; do
+ indirect="${var}_backup"
+ eval "${indirect}=(\"\${$var[@]}\")"
+ done
+}
+
+restore_package_variables() {
+ for var in "${splitpkg_overrides[@]}"; do
+ indirect="${var}_backup"
+ if [ -n "${!indirect}" ]; then
+ eval "${var}=(\"\${$indirect[@]}\")"
+ else
+ unset "${var}"
+ fi
+ done
+}
+
+print_info() {
+ echo -e "%NAME%\n$pkgname\n"
+ if [ -n "$epoch" ]; then
+ echo -e "%VERSION%\n$epoch:$pkgver-$pkgrel\n"
+ else
+ echo -e "%VERSION%\n$pkgver-$pkgrel\n"
+ fi
+ echo -e "%PATH%\n$dir\n"
+
+ if [ -n "$pkgbase" ]; then
+ echo -e "%BASE%\n$pkgbase\n"
+ fi
+
+ if [ -n "$arch" ]; then
+ echo "%ARCH%"
+ for i in "${arch[@]}"; do echo "$i"; done
+ echo ""
+ fi
+ if [ -n "$depends" ]; then
+ echo "%DEPENDS%"
+ for i in "${depends[@]}"; do
+ echo "$i"
+ done
+ echo ""
+ fi
+ if [ -n "$makedepends" ]; then
+ echo "%MAKEDEPENDS%"
+ for i in "${makedepends[@]}"; do
+ echo "$i"
+ done
+ echo ""
+ fi
+ if [ -n "$conflicts" ]; then
+ echo "%CONFLICTS%"
+ for i in "${conflicts[@]}"; do echo "$i"; done
+ echo ""
+ fi
+ if [ -n "$provides" ]; then
+ echo "%PROVIDES%"
+ for i in "${provides[@]}"; do echo "$i"; done
+ echo ""
+ fi
+}
+
+source_pkgbuild() {
+ ret=0
+ dir=$1
+ pkgbuild=$dir/PKGBUILD
+ for var in "${variables[@]}"; do
+ unset "${var}"
+ done
+ source "$pkgbuild" &>/dev/null || ret=$?
+
+ # ensure $pkgname and $pkgver variables were found
+ if [ $ret -ne 0 -o -z "$pkgname" -o -z "$pkgver" ]; then
+ echo -e "%INVALID%\n$pkgbuild\n"
+ return 1
+ fi
+
+ if [ "${#pkgname[@]}" -gt "1" ]; then
+ pkgbase=${pkgbase:-${pkgname[0]}}
+ for pkg in "${pkgname[@]}"; do
+ if [ "$(type -t "package_${pkg}")" != "function" ]; then
+ echo -e "%INVALID%\n$pkgbuild\n"
+ return 1
+ else
+ backup_package_variables
+ pkgname=$pkg
+ while IFS= read -r line; do
+ var=${line%%=*}
+ var="${var#"${var%%[![:space:]]*}"}" # remove leading whitespace characters
+ for realvar in "${variables[@]}"; do
+ if [ "$var" == "$realvar" ]; then
+ eval $line
+ break
+ fi
+ done
+ done < <(type "package_${pkg}")
+ print_info
+ restore_package_variables
+ fi
+ done
+ else
+ echo
+ print_info
+ fi
+
+ return 0
+}
+
+find_pkgbuilds() {
+ #Skip over some dirs
+ local d="${1##*/}"
+ if [ "$d" = "CVS" -o "$d" = ".svn" ]; then
+ return
+ fi
+
+ if [ -f "$1/PKGBUILD" ]; then
+ source_pkgbuild "$1"
+ return
+ fi
+ empty=1
+ for dir in "$1"/*; do
+ if [ -d "$dir" ]; then
+ find_pkgbuilds "$dir"
+ unset empty
+ fi
+ done
+ if [ -n "$empty" ]; then
+ echo -e "%MISSING%\n$1\n"
+ fi
+}
+
+if [ -z "$1" -o -z "$2" ]; then
+ exit 1
+fi
+
+CARCH=$1
+shift
+for dir in "$@"; do
+ find_pkgbuilds "$dir"
+done
+
+exit 0
diff --git a/extra/legacy/cron-jobs/devlist-mailer b/extra/legacy/cron-jobs/devlist-mailer
new file mode 100755
index 0000000..1a05521
--- /dev/null
+++ b/extra/legacy/cron-jobs/devlist-mailer
@@ -0,0 +1,28 @@
+#!/bin/bash
+#Dummy helper to send email to arch-dev
+# It does nothing if no output
+
+LIST="arch-dev-public@archlinux.org"
+#LIST="aaronmgriffin@gmail.com"
+FROM="repomaint@archlinux.org"
+
+SUBJECT="Repository Maintenance $(date +"%d-%m-%Y")"
+if [ $# -ge 1 ]; then
+ SUBJECT="$1 $(date +"%d-%m-%Y")"
+fi
+
+if [ $# -ge 2 ]; then
+ LIST="$2"
+fi
+
+stdin="$(cat)"
+#echo used to strip whitespace for checking for actual data
+if [ -n "$(echo $stdin)" ]; then
+
+ echo "Subject: $SUBJECT
+To: $LIST
+From: $FROM
+
+$stdin" | /usr/sbin/sendmail -F"$FROM" "$LIST"
+
+fi
diff --git a/extra/legacy/cron-jobs/ftpdir-cleanup b/extra/legacy/cron-jobs/ftpdir-cleanup
new file mode 100755
index 0000000..4a2b418
--- /dev/null
+++ b/extra/legacy/cron-jobs/ftpdir-cleanup
@@ -0,0 +1,88 @@
+#!/bin/bash
+
+. "$(dirname "$(readlink -e "$0")")/../config"
+. "$(dirname "$(readlink -e "$0")")/../db-functions"
+
+clean_pkg() {
+ local pkg
+ local target
+
+ if ! "${CLEANUP_DRYRUN}"; then
+ for pkg in "$@"; do
+ if [ -h "$pkg" ]; then
+ rm -f "$pkg" "$pkg.sig"
+ else
+ mv_acl "$pkg" "$CLEANUP_DESTDIR/${pkg##*/}"
+ if [ -e "$pkg.sig" ]; then
+ mv_acl "$pkg.sig" "$CLEANUP_DESTDIR/${pkg##*/}.sig"
+ fi
+ touch "${CLEANUP_DESTDIR}/${pkg##*/}"
+ fi
+ done
+ fi
+}
+
+"${CLEANUP_DRYRUN}" && warning 'dry run mode is active'
+
+for repo in "${PKGREPOS[@]}"; do
+ for arch in "${ARCHES[@]}"; do
+ if [ ! -f "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}" ]; then
+ continue
+ fi
+ # get a list of actual available package files
+ find "${FTP_BASE}/${repo}/os/${arch}" -xtype f -name "*${PKGEXT}" -printf '%f\n' | sort > "${WORKDIR}/repo-${repo}-${arch}"
+ # get a list of package files defined in the repo db
+ bsdtar -xOf "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}" | awk '/^%FILENAME%/{getline;print}' | sort > "${WORKDIR}/db-${repo}-${arch}"
+
+ missing_pkgs=($(comm -13 "${WORKDIR}/repo-${repo}-${arch}" "${WORKDIR}/db-${repo}-${arch}"))
+ if [ ${#missing_pkgs[@]} -ge 1 ]; then
+ error "Missing packages in [%s] (%s)..." "${repo}" "${arch}"
+ for missing_pkg in "${missing_pkgs[@]}"; do
+ msg2 '%s' "${missing_pkg}"
+ done
+ fi
+
+ old_pkgs=($(comm -23 "${WORKDIR}/repo-${repo}-${arch}" "${WORKDIR}/db-${repo}-${arch}"))
+ if [ ${#old_pkgs[@]} -ge 1 ]; then
+ msg "Removing old packages from [%s] (%s)..." "${repo}" "${arch}"
+ for old_pkg in "${old_pkgs[@]}"; do
+ msg2 '%s' "${old_pkg}"
+ clean_pkg "${FTP_BASE}/${repo}/os/${arch}/${old_pkg}"
+ done
+ fi
+ done
+done
+
+# get a list of all available packages in the pacakge pool
+find "$FTP_BASE/${PKGPOOL}" -name "*${PKGEXT}" -printf '%f\n' | sort > "${WORKDIR}/pool"
+# create a list of packages in our db
+cat "${WORKDIR}/db-"* | sort -u > "${WORKDIR}/db"
+
+old_pkgs=($(comm -23 "${WORKDIR}/pool" "${WORKDIR}/db"))
+if [ ${#old_pkgs[@]} -ge 1 ]; then
+ msg "Removing old packages from package pool..."
+ for old_pkg in "${old_pkgs[@]}"; do
+ msg2 '%s' "${old_pkg}"
+ clean_pkg "$FTP_BASE/${PKGPOOL}/${old_pkg}"
+ done
+fi
+
+old_pkgs=($(find "${CLEANUP_DESTDIR}" -type f -name "*${PKGEXT}" -mtime +"${CLEANUP_KEEP}" -printf '%f\n'))
+if [ ${#old_pkgs[@]} -ge 1 ]; then
+ msg "Removing old packages from the cleanup directory..."
+ for old_pkg in "${old_pkgs[@]}"; do
+ msg2 '%s' "${old_pkg}"
+ if ! "${CLEANUP_DRYRUN}"; then
+ rm -f "${CLEANUP_DESTDIR}/${old_pkg}"
+ rm -f "${CLEANUP_DESTDIR}/${old_pkg}.sig"
+ fi
+ done
+fi
+
+for repo in "${PKGREPOS[@]}"; do
+ for arch in "${ARCHES[@]}"; do
+ repo_unlock "${repo}" "${arch}"
+ done
+done
+
+script_unlock
diff --git a/extra/legacy/cron-jobs/integrity-check b/extra/legacy/cron-jobs/integrity-check
new file mode 100755
index 0000000..7459380
--- /dev/null
+++ b/extra/legacy/cron-jobs/integrity-check
@@ -0,0 +1,32 @@
+#!/bin/bash
+
+dirname="$(dirname "$(readlink -e "$0")")"
+
+. "${dirname}/../config"
+. "${dirname}/../db-functions"
+
+script_lock
+
+if [ $# -ne 1 ]; then
+ die "usage: %s <mailto>" "${0##*/}"
+fi
+mailto=$1
+
+check() {
+ "${dirname}"/check_archlinux/check_packages.py \
+ --repos="${repos}" \
+ --abs-tree="/srv/abs/rsync/${arch},/srv/abs/rsync/any" \
+ --repo-dir="${FTP_BASE}" \
+ --arch="${arch}" \
+ 2>&1 | "${dirname}"/devlist-mailer "Integrity Check ${arch}: ${repos}" "${mailto}"
+}
+
+repos='core,extra,community'
+arch='i686'
+check
+
+repos='core,extra,community,multilib'
+arch='x86_64'
+check
+
+script_unlock
diff --git a/extra/legacy/cron-jobs/make_repo_torrents b/extra/legacy/cron-jobs/make_repo_torrents
new file mode 100755
index 0000000..2eb0978
--- /dev/null
+++ b/extra/legacy/cron-jobs/make_repo_torrents
@@ -0,0 +1,70 @@
+#!/bin/bash
+# Copyright (C) 2014 Joseph Graham <joseph@t67.eu>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+# This script finds any updated packages and calls
+# `make_indivudual_torrent' for each of them.
+
+username=$( id -un )
+
+case "${username}" in
+ repo | root )
+ true
+ ;;
+ * )
+ echo "This script must be run as repo user or root user."
+ echo "ByeBye!"
+ exit 1
+ ;;
+esac
+
+# pacman doesn't support multiple different packages of the same name,
+# so it's OK to just stuff all the torrents into a single directory.
+script_directory="$(dirname "$(readlink -e "$0")")/.."
+. "$(dirname "$(readlink -e "$0")")/../config"
+public_location="$FTP_BASE/"
+torrent_location="$FTP_BASE/torrents/"
+
+cd "${torrent_location}"
+
+# Find any directories that might have packages in then
+find "${public_location}" -name 'os' -type 'd' |
+while read dir
+do
+ # Find any packages
+ find "${dir}" -name '*\.pkg\.tar\.xz' |
+ while read pkg
+ do
+ pkg_name="${pkg##*/}"
+
+ if [[ -h "${pkg}" ]] # check if it's a symbolic link
+ then
+ # We get the target of the symlink
+ pkg=$( readlink -f "${pkg}" )
+ fi
+
+ # If a .torrent file does not already exist for this package, we call
+ # `make_individual_torrent' to make it.
+ if ! [[ -f "${torrent_location}${pkg_name}.torrent" ]]
+ then
+ "$script_directory/make_individual_torrent" "${pkg}" "${public_location}"
+ fi
+ done
+done
+
+if [[ "${username}" == root ]]
+then
+ chown repo *
+fi
diff --git a/extra/legacy/cron-jobs/repo-sanity-check b/extra/legacy/cron-jobs/repo-sanity-check
new file mode 100755
index 0000000..239f042
--- /dev/null
+++ b/extra/legacy/cron-jobs/repo-sanity-check
@@ -0,0 +1,57 @@
+#!/bin/bash
+# Solves issue165... on the old roundup install. From the database
+# backups, the title was "Older/deprecated packages never leave the
+# repo", I don't know how the body of the issue is stored in the DB,
+# but the title says enough, I think.
+
+. "$(dirname "$(readlink -e "$0")")/../config"
+. "$(dirname "$(readlink -e "$0")")/../db-functions"
+
+# Traverse all repos
+for _repo in "${PKGREPOS[@]}"; do
+ msg "Cleaning up [%s]" "${_repo}"
+
+ # Find all pkgnames on this repo's abs
+ on_abs=($(
+ find "${SVNREPO}/${_repo}" -name PKGBUILD | \
+ while read pkgbuild; do
+ source "${pkgbuild}" >/dev/null 2>&1
+ # cleanup to save memory
+ unset build package source md5sums pkgdesc pkgver pkgrel epoch \
+ url license arch depends makedepends optdepends options \
+ >/dev/null 2>&1
+
+ # also cleanup package functions
+ for _pkg in "${pkgname[@]}"; do
+ unset "package_${pkg}" >/dev/null 2>&1
+ done
+
+ # this fills the on_abs array
+ echo "${pkgname[@]}"
+ done
+ ))
+
+ # quit if abs is empty
+ if [ ${#on_abs[*]} -eq 0 ]; then
+ warning "[%s]'s ABS tree is empty, skipping" "${_repo}"
+ break
+ fi
+
+ # Find all pkgnames on repos
+ on_repo=($(
+ find "${FTP_BASE}/${_repo}" -name "*.pkg.tar.?z" \
+ -printf "%f\n" | sed "s/^\(.\+\)-[^-]\+-[^-]\+-[^-]\+$/\1/"
+ ))
+
+ # Compares them, whatever is on repos but not on abs should be removed
+ remove=($(comm -13 \
+ <(printf '%s\n' "${on_abs[@]}" | sort -u) \
+ <(printf '%s\n' "${on_repo[@]}" | sort -u) ))
+
+ # Remove them from databases, ftpdir-cleanup will take care of the rest
+ find "${FTP_BASE}/${_repo}" -name "*.db.tar.?z" \
+ -exec repo-remove {} "${remove[@]}" \; >/dev/null 2>&1
+
+ msg2 "Removed the following packages:"
+ plain '%s' "${remove[@]}"
+done
diff --git a/extra/legacy/cron-jobs/sourceballs b/extra/legacy/cron-jobs/sourceballs
new file mode 100755
index 0000000..c12a128
--- /dev/null
+++ b/extra/legacy/cron-jobs/sourceballs
@@ -0,0 +1,151 @@
+#!/bin/bash
+
+dirname="$(dirname "$(readlink -e "$0")")"
+. "${dirname}/../config"
+. "${dirname}/../db-functions"
+pushd "${WORKDIR}" >/dev/null
+
+script_lock
+
+for repo in "${PKGREPOS[@]}"; do
+ for arch in "${ARCHES[@]}"; do
+ repo_lock "${repo}" "${arch}" || exit 1
+ done
+done
+
+#adjust the nice level to run at a lower priority
+renice +10 -p $$ > /dev/null
+
+# Create a readable file for each repo with the following format
+# <pkgbase|pkgname> <pkgver>-<pkgrel> <arch> <license>[ <license>]
+for repo in "${PKGREPOS[@]}"; do
+ for arch in "${ARCHES[@]}"; do
+ # Repo does not exist; skip it
+ if [ ! -f "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}" ]; then
+ continue
+ fi
+ bsdtar -xOf "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}" \
+ | awk '/^%NAME%/ { getline b };
+ /^%BASE%/ { getline b };
+ /^%VERSION%/ { getline v };
+ /^%LICENSE%/,/^$/ {
+ if ( !/^%LICENSE%/ ) { l=l" "$0 }
+ };
+ /^%ARCH%/ {
+ getline a;
+ printf "%s %s %s %s\n", b, v, a, l;
+ l="";
+ }'
+ done | sort -u > "${WORKDIR}/db-${repo}"
+done
+
+for repo in "${PKGREPOS[@]}"; do
+ for arch in "${ARCHES[@]}"; do
+ repo_unlock "${repo}" "${arch}"
+ done
+done
+
+# Create a list of all available source package file names
+find "${FTP_BASE}/${SRCPOOL}" -xtype f -name "*${SRCEXT}" -printf '%f\n' | sort -u > "${WORKDIR}/available-src-pkgs"
+
+# Check for all packages if we need to build a source package
+for repo in "${PKGREPOS[@]}"; do
+ newpkgs=()
+ failedpkgs=()
+ while read line; do
+ pkginfo=("${line}")
+ pkgbase=${pkginfo[0]}
+ pkgver=${pkginfo[1]}
+ pkgarch=${pkginfo[2]}
+ pkglicense=("${pkginfo[@]:3}")
+
+ # Should this package be skipped?
+ if grep -Fqx "${pkgbase}" "${dirname}/sourceballs.skip"; then
+ continue
+ fi
+ # Commenting out, we'll sourceball everything
+ # Check if the license or .force file does not enforce creating a source package
+# if ! (chk_license ${pkglicense[@]} || grep -Fqx "${pkgbase}" "${dirname}/sourceballs.force"); then
+# continue
+# fi
+ # Store the expected file name of the source package
+ echo "${pkgbase}-${pkgver}${SRCEXT}" >> "${WORKDIR}/expected-src-pkgs"
+
+ # Build the source package if its not already there
+ if ! grep -Fqx "${pkgbase}-${pkgver}${SRCEXT}" "${WORKDIR}/available-src-pkgs"; then
+ # Check if we had failed before
+ if in_array "${pkgbase}-${pkgver}${SRCEXT}" "${failedpkgs[@]}"; then
+ continue
+ fi
+
+ # Get the sources from xbs
+ mkdir -p -m0770 "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}"
+ cp -a "$(xbs releasepath "${pkgbase}" "${repo}" "${pkgarch}")" \
+ "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}/${pkgbase}" >/dev/null 2>&1
+ if [ $? -ge 1 ]; then
+ failedpkgs+=("${pkgbase}-${pkgver}${SRCEXT}")
+ continue
+ fi
+
+ # Build the actual source package
+ pushd "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}/${pkgbase}" >/dev/null
+ SRCPKGDEST=. makepkg --nocolor --allsource --ignorearch --skippgpcheck >"${WORKDIR}/${pkgbase}.log" 2>&1
+ if [ $? -eq 0 ] && [ -f "${pkgbase}-${pkgver}${SRCEXT}" ]; then
+ mv_acl "${pkgbase}-${pkgver}${SRCEXT}" "${FTP_BASE}/${SRCPOOL}/${pkgbase}-${pkgver}${SRCEXT}"
+ # Avoid creating the same source package for every arch
+ echo "${pkgbase}-${pkgver}${SRCEXT}" >> "${WORKDIR}/available-src-pkgs"
+ newpkgs+=("${pkgbase}-${pkgver}${SRCEXT}")
+ else
+ failedpkgs+=("${pkgbase}-${pkgver}${SRCEXT}")
+ cat "${WORKDIR}/${pkgbase}.log" >> "${WORKDIR}/makepkg-fail.log"
+ fi
+ popd >/dev/null
+ fi
+ done < "${WORKDIR}/db-${repo}"
+
+ if [ ${#newpkgs[@]} -ge 1 ]; then
+ msg "Adding source packages for [%s]..." "${repo}"
+ for new_pkg in "${newpkgs[@]}"; do
+ msg2 '%s' "${new_pkg}"
+ done
+ fi
+ if [ ${#failedpkgs[@]} -ge 1 ]; then
+ msg "Failed to create source packages for [%s]..." "${repo}"
+ for failed_pkg in "${failedpkgs[@]}"; do
+ msg2 '%s' "${failed_pkg}"
+ done
+ fi
+done
+
+# Cleanup old source packages
+cat "${WORKDIR}/expected-src-pkgs" | sort -u > "${WORKDIR}/expected-src-pkgs.sort"
+cat "${WORKDIR}/available-src-pkgs" | sort -u > "${WORKDIR}/available-src-pkgs.sort"
+old_pkgs=($(comm -23 "${WORKDIR}/available-src-pkgs.sort" "${WORKDIR}/expected-src-pkgs.sort"))
+
+if [ ${#old_pkgs[@]} -ge 1 ]; then
+ msg "Removing old source packages..."
+ "${SOURCE_CLEANUP_DRYRUN}" && warning 'dry run mode is active'
+ for old_pkg in "${old_pkgs[@]}"; do
+ msg2 '%s' "${old_pkg}"
+ if ! "${SOURCE_CLEANUP_DRYRUN}"; then
+ mv_acl "$FTP_BASE/${SRCPOOL}/${old_pkg}" "${SOURCE_CLEANUP_DESTDIR}/${old_pkg}"
+ touch "${SOURCE_CLEANUP_DESTDIR}/${old_pkg}"
+ fi
+ done
+fi
+
+old_pkgs=($(find "${SOURCE_CLEANUP_DESTDIR}" -type f -name "*${SRCEXT}" -mtime +"${SOURCE_CLEANUP_KEEP}" -printf '%f\n'))
+if [ ${#old_pkgs[@]} -ge 1 ]; then
+ msg "Removing old source packages from the cleanup directory..."
+ for old_pkg in "${old_pkgs[@]}"; do
+ msg2 '%s' "${old_pkg}"
+ "${SOURCE_CLEANUP_DRYRUN}" || rm -f "${SOURCE_CLEANUP_DESTDIR}/${old_pkg}"
+ done
+fi
+
+if [ -f "${WORKDIR}/makepkg-fail.log" ]; then
+ msg "Log of failed packages"
+ cat "${WORKDIR}/makepkg-fail.log"
+fi
+
+script_unlock
diff --git a/extra/legacy/cron-jobs/sourceballs.force b/extra/legacy/cron-jobs/sourceballs.force
new file mode 100644
index 0000000..badf15d
--- /dev/null
+++ b/extra/legacy/cron-jobs/sourceballs.force
@@ -0,0 +1,4 @@
+faad2
+wxgtk
+wxpython
+glhack
diff --git a/extra/legacy/cron-jobs/sourceballs.skip b/extra/legacy/cron-jobs/sourceballs.skip
new file mode 100644
index 0000000..14d6f4b
--- /dev/null
+++ b/extra/legacy/cron-jobs/sourceballs.skip
@@ -0,0 +1,29 @@
+nexuiz-data
+torcs-data
+tremulous-data
+ufoai-data
+frogatto-data
+vdrift-data
+naev-data
+btanks-data
+wesnoth-data
+texlive-bin
+texlive-bibtexextra
+texlive-core
+texlive-fontsextra
+texlive-formatsextra
+texlive-games
+texlive-genericextra
+texlive-htmlxml
+texlive-humanities
+texlive-langcjk
+texlive-langcyrillic
+texlive-langextra
+texlive-langgreek
+texlive-latexextra
+texlive-music
+texlive-pictures
+texlive-plainextra
+texlive-pstricks
+texlive-publishers
+texlive-science
diff --git a/extra/legacy/db-check-nonfree b/extra/legacy/db-check-nonfree
new file mode 100755
index 0000000..37b7cf6
--- /dev/null
+++ b/extra/legacy/db-check-nonfree
@@ -0,0 +1,46 @@
+#!/bin/bash
+
+. "$(dirname "$(readlink -e "$0")")/config"
+. "$(dirname "$(readlink -e "$0")")/db-functions"
+
+if [ $# -ge 1 ]; then
+ error "Calling %s with a specific repository is not supported" "${0##*/}"
+ exit 1
+fi
+
+# TODO: this might lock too much (architectures)
+for repo in "${repos[@]}"; do
+ for pkgarch in "${ARCHES[@]}"; do
+ repo_lock "${repo}" "${pkgarch}" || exit 1
+ done
+done
+
+msg "Check nonfree in repo:"
+nonfree=($(cut -d: -f1 "${BLACKLIST_FILE}" | sort -u))
+for repo in "${ARCHREPOS[@]}"; do
+ for pkgarch in "${ARCHES[@]}"; do
+ msg2 "%s %s" "$repo" "$pkgarch"
+ if [ ! -f "${FTP_BASE}/${repo}/os/${pkgarch}/${repo}${DBEXT}" ]; then
+ continue
+ fi
+ unset dbpkgs
+ unset cleanpkgs
+ cleanpkgs=()
+ dbpkgs=($(bsdtar -xOf "${FTP_BASE}/${repo}/os/${pkgarch}/${repo}${DBEXT}" | awk '/^%NAME%/{getline;print}' | sort -u ))
+ for pkgname in "${dbpkgs[@]}"; do
+ if in_array "${pkgname}" "${nonfree[@]}"; then
+ cleanpkgs+=("${pkgname}")
+ fi
+ done
+ if [ ${#cleanpkgs[@]} -ge 1 ]; then
+ msg2 "Nonfree: %s" "${cleanpkgs[*]}"
+ arch_repo_remove "${repo}" "${pkgarch}" "${cleanpkgs[@]}"
+ fi
+ done
+done
+
+for repo in "${repos[@]}"; do
+ for pkgarch in "${ARCHES[@]}"; do
+ repo_unlock "${repo}" "${pkgarch}"
+ done
+done
diff --git a/extra/legacy/db-cleanup b/extra/legacy/db-cleanup
new file mode 100755
index 0000000..ffa2601
--- /dev/null
+++ b/extra/legacy/db-cleanup
@@ -0,0 +1,69 @@
+#!/bin/bash
+# Syncs pools against themselves using database contents as filter to cleanup
+# them up
+# License: GPLv3
+
+# Principles
+# * Get repos dbs contents
+# * Make them a include list
+# * Rsync pools against themselves removing excluded files
+# * Instant cleanup!
+
+trap_exit() {
+ echo
+ error "$@"
+ exit 1
+}
+
+source "$(dirname "$(readlink -e "$0")")/config"
+source "$(dirname "$(readlink -e "$0")")/db-libremessages"
+
+# From makepkg
+set -E
+
+trap 'trap_exit "$(gettext "TERM signal caught. Exiting...")"' TERM HUP QUIT
+trap 'trap_exit "$(gettext "Aborted by user! Exiting...")"' INT
+trap 'trap_exit "$(gettext "An unknown error has occurred. Exiting...")"' ERR
+
+EXTRAFLAGS=()
+"${CLEANUP_DRYRUN}" && EXTRAFLAGS+=(--dry-run)
+
+filter=$(mktemp -t "${0##*/}.XXXXXXXXXX")
+trap "rm -f -- $(printf %q "$filter")" EXIT
+
+for _repo in "${PKGREPOS[@]}"; do
+ for _arch in "${ARCHES[@]}"; do
+ msg "Getting %s-%s database" "${_repo}" "${_arch}"
+
+ dbfile="${FTP_BASE}/${_repo}/os/${_arch}/${_repo}${DBEXT}"
+
+ if [ ! -r "${dbfile}" ]; then
+ warning "Not found"
+ continue
+ fi
+
+ # Echo the contents into a filter file
+ bsdtar tf "${dbfile}" | \
+ cut -d'/' -f1 | \
+ sort -u | \
+ sed "s|$|*|" >> "$filter"
+
+ done
+done
+
+msg "Removing old files:"
+
+for POOL in "${PKGPOOLS[@]}" "${SRCPOOLS[@]}"; do
+ msg2 '%s' "${POOL}"
+
+ rsync "${EXTRAFLAGS[@]}" -va --delete-excluded \
+ --include-from="$filter" \
+ --exclude="*" \
+ "${FTP_BASE}/${POOL}/" \
+ "${FTP_BASE}/${POOL}/"
+done
+
+msg "Removing dead symlinks:"
+actions=(-print)
+"${CLEANUP_DRYRUN}" || actions+=(-delete)
+find -L "${FTP_BASE}/" -type l "${actions[@]}"
diff --git a/extra/legacy/db-functions b/extra/legacy/db-functions
new file mode 100644
index 0000000..d76aa41
--- /dev/null
+++ b/extra/legacy/db-functions
@@ -0,0 +1,524 @@
+#!/hint/bash
+
+# Some PKGBUILDs need CARCH to be set
+CARCH="x86_64"
+
+# Useful functions
+UMASK=""
+set_umask () {
+ [ "$UMASK" == "" ] && UMASK="$(umask)"
+ export UMASK
+ umask 002
+}
+
+restore_umask () {
+ umask "$UMASK" >/dev/null
+}
+
+# just like mv -f, but we touch the file and then copy the content so
+# default ACLs in the target dir will be applied
+mv_acl() {
+ rm -f "$2"
+ touch "$2"
+ cat "$1" >"$2" || return 1
+ rm -f "$1"
+}
+
+# set up general environment
+WORKDIR=$(mktemp -dt "${0##*/}.XXXXXXXXXX")
+LOCKS=()
+REPO_MODIFIED=0
+
+# check if messages are to be printed using color
+unset ALL_OFF BOLD BLUE GREEN RED YELLOW
+if [[ -t 2 ]]; then
+ ALL_OFF="$(tput sgr0)"
+ BOLD="$(tput bold)"
+ BLUE="${BOLD}$(tput setaf 4)"
+ GREEN="${BOLD}$(tput setaf 2)"
+ RED="${BOLD}$(tput setaf 1)"
+ YELLOW="${BOLD}$(tput setaf 3)"
+fi
+readonly ALL_OFF BOLD BLUE GREEN RED YELLOW
+
+plain() {
+ local mesg=$1; shift
+ printf "${BOLD} ${mesg}${ALL_OFF}\n" "$@"
+}
+
+msg() {
+ local mesg=$1; shift
+ printf "${GREEN}==>${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@"
+}
+
+msg2() {
+ local mesg=$1; shift
+ printf "${BLUE} ->${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@"
+}
+
+warning() {
+ local mesg=$1; shift
+ printf "${YELLOW}==> WARNING:${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@" >&2
+}
+
+error() {
+ local mesg=$1; shift
+ printf "${RED}==> ERROR${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@" >&2
+}
+
+##
+# usage : in_array( $needle, $haystack )
+# return : 0 - found
+# 1 - not found
+##
+in_array() {
+ local needle=$1; shift
+ [[ -z $1 ]] && return 1 # Not Found
+ local item
+ for item in "$@"; do
+ [[ $item = "$needle" ]] && return 0 # Found
+ done
+ return 1 # Not Found
+}
+
+##
+# usage : get_full_version( $epoch, $pkgver, $pkgrel )
+# return : full version spec, including epoch (if necessary), pkgver, pkgrel
+##
+get_full_version() {
+ if [[ $1 -eq 0 ]]; then
+ # zero epoch case, don't include it in version
+ echo "$2-$3"
+ else
+ echo "$1:$2-$3"
+ fi
+}
+
+script_lock() {
+ local LOCKDIR="$TMPDIR/.scriptlock.${0##*/}"
+ if ! mkdir "$LOCKDIR" >/dev/null 2>&1 ; then
+ local _owner="$(/usr/bin/stat -c %U "$LOCKDIR")"
+ error "Script %s is already locked by %s." "${0##*/}" "$_owner"
+ exit 1
+ else
+ set_umask
+ return 0
+ fi
+}
+
+script_unlock() {
+ local LOCKDIR="$TMPDIR/.scriptlock.${0##*/}"
+ if [ ! -d "$LOCKDIR" ]; then
+ warning "Script %s was not locked!" "${0##*/}"
+ restore_umask
+ return 1
+ else
+ rmdir "$LOCKDIR"
+ restore_umask
+ return 0
+ fi
+}
+
+cleanup() {
+ local l
+ local repo
+ local arch
+
+ trap - EXIT INT QUIT TERM
+ for l in "${LOCKS[@]}"; do
+ repo=${l%.*}
+ arch=${l#*.}
+ if [ -d "$TMPDIR/.repolock.$repo.$arch" ]; then
+ msg "Removing left over lock from [%s] (%s)" "${repo}" "${arch}"
+ repo_unlock "$repo" "$arch"
+ fi
+ done
+ if [ -d "$TMPDIR/.scriptlock.${0##*/}" ]; then
+ msg "Removing left over lock from %s" "${0##*/}"
+ script_unlock
+ fi
+ rm -rf "$WORKDIR"
+
+ if (( REPO_MODIFIED )); then
+ date +%s > "${FTP_BASE}/lastupdate"
+ date -u +%s > "${FTP_BASE}/lastsync"
+ fi
+
+ [ "$1" ] && exit "$1"
+}
+
+abort() {
+ msg 'Aborting...'
+ cleanup 0
+}
+
+die() {
+ error "$@"
+ cleanup 1
+}
+
+trap abort INT QUIT TERM HUP
+trap cleanup EXIT
+
+
+#repo_lock <repo-name> <arch> [timeout]
+repo_lock () {
+ local LOCKDIR="$TMPDIR/.repolock.$1.$2"
+ local DBLOCKFILE="${FTP_BASE}/${1}/os/${2}/${1}${DBEXT}.lck"
+ local FILESLOCKFILE="${FTP_BASE}/${1}/os/${2}/${1}${FILESEXT}.lck"
+ local _count
+ local _trial
+ local _timeout
+ local _lockblock
+ local _owner
+
+ # This is the lock file used by repo-add and repo-remove
+ if [ -f "${DBLOCKFILE}" ]; then
+ error "Repo [%s] (%s) is already locked by repo-{add,remove} process %s" "$1" "$2" "$(<"$DBLOCKFILE")"
+ return 1
+ fi
+ if [ -f "${FILESLOCKFILE}" ]; then
+ error "Repo [%s] (%s) is already locked by repo-{add,remove} process %s" "$1" "$2" "$(<"$FILESLOCKFILE")"
+ return 1
+ fi
+
+ if [ $# -eq 2 ]; then
+ _lockblock=true
+ _trial=0
+ elif [ $# -eq 3 ]; then
+ _lockblock=false
+ _timeout=$3
+ let _trial=$_timeout/$LOCK_DELAY
+ fi
+
+ _count=0
+ while [ "$_count" -le "$_trial" ] || "$_lockblock" ; do
+ if ! mkdir "$LOCKDIR" >/dev/null 2>&1 ; then
+ _owner="$(/usr/bin/stat -c %U "$LOCKDIR")"
+ warning "Repo [%s] (%s) is already locked by %s." "${1}" "${2}" "$_owner"
+ msg2 "Retrying in %d seconds..." "$LOCK_DELAY"
+ else
+ LOCKS+=("$1.$2")
+ set_umask
+ return 0
+ fi
+ sleep "$LOCK_DELAY"
+ let _count=$_count+1
+ done
+
+ error "Repo [%s] (%s) is already locked by %s. Giving up!" "${1}" "${2}" "$_owner"
+ return 1
+}
+
+repo_unlock () { #repo_unlock <repo-name> <arch>
+ local LOCKDIR="$TMPDIR/.repolock.$1.$2"
+ if [ ! -d "$LOCKDIR" ]; then
+ warning "Repo lock [%s] (%s) was not locked!" "${1}" "${2}"
+ restore_umask
+ return 1
+ else
+ rmdir "$LOCKDIR"
+ restore_umask
+ return 0
+ fi
+}
+
+# usage: _grep_pkginfo pkgfile pattern
+_grep_pkginfo() {
+ local _ret
+
+ _ret="$(/usr/bin/bsdtar -xOqf "$1" .PKGINFO | grep -m 1 "^${2} = ")"
+ echo "${_ret#${2} = }"
+}
+
+
+# Get the package base or name as fallback
+getpkgbase() {
+ local _base
+
+ _base="$(_grep_pkginfo "$1" "pkgbase")"
+ if [ -z "$_base" ]; then
+ getpkgname "$1"
+ else
+ echo "$_base"
+ fi
+}
+
+issplitpkg() {
+ local _base
+
+ _base="$(_grep_pkginfo "$1" "pkgbase")"
+ if [ -z "$_base" ]; then
+ return 1
+ else
+ return 0
+ fi
+}
+
+# Get the package name
+getpkgname() {
+ local _name
+
+ _name="$(_grep_pkginfo "$1" "pkgname")"
+ if [ -z "$_name" ]; then
+ error "Package '%s' has no pkgname in the PKGINFO. Fail!" "$1"
+ exit 1
+ fi
+
+ echo "$_name"
+}
+
+# Get the pkgver-pkgrel of this package
+getpkgver() {
+ local _ver
+
+ _ver="$(_grep_pkginfo "$1" "pkgver")"
+ if [ -z "$_ver" ]; then
+ error "Package '%s' has no pkgver in the PKGINFO. Fail!" "$1"
+ exit 1
+ fi
+
+ echo "$_ver"
+}
+
+getpkgarch() {
+ local _ver
+
+ _ver="$(_grep_pkginfo "$1" "arch")"
+ if [ -z "$_ver" ]; then
+ error "Package '%s' has no arch in the PKGINFO. Fail!" "$1"
+ exit 1
+ fi
+
+ echo "$_ver"
+}
+
+getpkgfile() {
+ if [[ ${#} -ne 1 ]]; then
+ error 'No canonical package found!'
+ exit 1
+ elif [ ! -f "${1}" ]; then
+ error "Package %s not found!" "${1}"
+ exit 1
+ elif "${REQUIRE_SIGNATURE}" && [ ! -f "${1}.sig" ]; then
+ error "Package signature %s not found!" "${1}.sig"
+ exit 1
+ fi
+
+ echo "${1}"
+}
+
+getpkgfiles() {
+ local f
+ if [ ! -z "$(printf '%s\n' "${@%\.*}" | sort | uniq -D)" ]; then
+ error 'Duplicate packages found!'
+ exit 1
+ fi
+
+ for f in "${@}"; do
+ if [ ! -f "${f}" ]; then
+ error "Package %s not found!" "${f}"
+ exit 1
+ elif "${REQUIRE_SIGNATURE}" && [ ! -f "${f}.sig" ]; then
+ error "Package signature %s not found!" "${f}.sig"
+ exit 1
+ fi
+ done
+
+ echo "${@}"
+}
+
+check_pkgfile() {
+ local pkgfile=$1
+
+ local pkgname="$(getpkgname "${pkgfile}")"
+ [ $? -ge 1 ] && return 1
+ local pkgver="$(getpkgver "${pkgfile}")"
+ [ $? -ge 1 ] && return 1
+ local pkgarch="$(getpkgarch "${pkgfile}")"
+ [ $? -ge 1 ] && return 1
+
+ in_array "${pkgarch}" "${ARCHES[@]}" 'any' || return 1
+
+ if echo "${pkgfile##*/}" | grep -q "${pkgname}-${pkgver}-${pkgarch}"; then
+ return 0
+ else
+ return 1
+ fi
+}
+
+check_pkgxbs() {
+ local pkgfile="${1}"
+ local _pkgbase="$(getpkgbase "${pkgfile}")"
+ [ $? -ge 1 ] && return 1
+ local _pkgname="$(getpkgname "${pkgfile}")"
+ [ $? -ge 1 ] && return 1
+ local _pkgver="$(getpkgver "${pkgfile}")"
+ [ $? -ge 1 ] && return 1
+ local _pkgarch="$(getpkgarch "${pkgfile}")"
+ [ $? -ge 1 ] && return 1
+ local repo="${2}"
+
+ in_array "${repo}" "${PKGREPOS[@]}" || return 1
+
+ local xbsver="$(. "$(xbs releasepath "${_pkgbase}" "${repo}" "${_pkgarch}")/PKGBUILD"; get_full_version "${_pkgname}")"
+ [ "${xbsver}" == "${_pkgver}" ] || return 1
+
+ local xbsnames=($(. "$(xbs releasepath "${_pkgbase}" "${repo}" "${_pkgarch}")/PKGBUILD"; echo "${pkgname[@]}"))
+ in_array "${_pkgname}" "${xbsnames[@]}" || return 1
+
+ return 0
+}
+
+check_splitpkgs() {
+ local repo="${1}"
+ shift
+ local pkgfiles=("${@}")
+ local pkgfile
+ local pkgdir
+ local xbsname
+
+ mkdir -p "${WORKDIR}/check_splitpkgs/"
+ pushd "${WORKDIR}/check_splitpkgs" >/dev/null
+
+ for pkgfile in "${pkgfiles[@]}"; do
+ issplitpkg "${pkgfile}" || continue
+ local _pkgbase="$(getpkgbase "${pkgfile}")"
+ msg2 "Checking %s" "$_pkgbase"
+ local _pkgname="$(getpkgname "${pkgfile}")"
+ local _pkgarch="$(getpkgarch "${pkgfile}")"
+ mkdir -p "${repo}/${_pkgarch}/${_pkgbase}"
+ echo "${_pkgname}" >> "${repo}/${_pkgarch}/${_pkgbase}/staging"
+
+ local xbsnames=($(. "$(xbs releasepath "${_pkgbase}" "${repo}" "${_pkgarch}")/PKGBUILD"; echo "${pkgname[@]}"))
+ printf '%s\n' "${xbsnames[@]}" >> "${repo}/${_pkgarch}/${_pkgbase}/xbs"
+ done
+ popd >/dev/null
+
+ for pkgdir in "${WORKDIR}/check_splitpkgs/${repo}"/*/*; do
+ [ ! -d "${pkgdir}" ] && continue
+ sort -u "${pkgdir}/staging" -o "${pkgdir}/staging"
+ sort -u "${pkgdir}/xbs" -o "${pkgdir}/xbs"
+ if [ ! -z "$(comm -13 "${pkgdir}/staging" "${pkgdir}/xbs")" ]; then
+ return 1
+ fi
+ done
+
+ return 0
+}
+
+check_pkgrepos() {
+ local pkgfile=$1
+
+ local pkgname="$(getpkgname "${pkgfile}")"
+ [ $? -ge 1 ] && return 1
+ local pkgver="$(getpkgver "${pkgfile}")"
+ [ $? -ge 1 ] && return 1
+ local pkgarch="$(getpkgarch "${pkgfile}")"
+ [ $? -ge 1 ] && return 1
+
+ [ -f "${FTP_BASE}/${PKGPOOL}/${pkgname}-${pkgver}-${pkgarch}"${PKGEXT} ] && return 1
+ [ -f "${FTP_BASE}/${PKGPOOL}/${pkgname}-${pkgver}-${pkgarch}"${PKGEXT}.sig ] && return 1
+ [ -f "${FTP_BASE}/${PKGPOOL}/${pkgfile##*/}" ] && return 1
+ [ -f "${FTP_BASE}/${PKGPOOL}/${pkgfile##*/}.sig" ] && return 1
+
+ local repo
+ local arch
+ for repo in "${PKGREPOS[@]}"; do
+ for arch in "${ARCHES[@]}"; do
+ [ -f "${FTP_BASE}/${repo}/os/${arch}/${pkgname}-${pkgver}-${pkgarch}"${PKGEXT} ] && return 1
+ [ -f "${FTP_BASE}/${repo}/os/${arch}/${pkgname}-${pkgver}-${pkgarch}"${PKGEXT}.sig ] && return 1
+ [ -f "${FTP_BASE}/${repo}/os/${arch}/${pkgfile##*/}" ] && return 1
+ [ -f "${FTP_BASE}/${repo}/os/${arch}/${pkgfile##*/}.sig" ] && return 1
+ done
+ done
+
+ return 0
+}
+
+#usage: chk_license ${license[@]}"
+chk_license() {
+ local l
+ for l in "${@}"; do
+ in_array "${l}" "${ALLOWED_LICENSES[@]}" && return 0
+ done
+
+ return 1
+}
+
+check_repo_permission() {
+ local repo=$1
+
+ [ ${#PKGREPOS[@]} -eq 0 ] && return 1
+ [ -z "${PKGPOOL}" ] && return 1
+
+ in_array "${repo}" "${PKGREPOS[@]}" || return 1
+
+ [ -w "$FTP_BASE/${PKGPOOL}" ] || return 1
+
+ local arch
+ for arch in "${ARCHES[@]}"; do
+ local dir="${FTP_BASE}/${repo}/os/${arch}/"
+ [ -w "${dir}" ] || return 1
+ [ -f "${dir}${repo}"${DBEXT} -a ! -w "${dir}${repo}"${DBEXT} ] && return 1
+ [ -f "${dir}${repo}"${FILESEXT} -a ! -w "${dir}${repo}"${FILESEXT} ] && return 1
+ done
+
+ return 0
+}
+
+set_repo_permission() {
+ local repo=$1
+ local arch=$2
+ local dbfile="${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}"
+ local filesfile="${FTP_BASE}/${repo}/os/${arch}/${repo}${FILESEXT}"
+
+ if [ -w "${dbfile}" ]; then
+ local group=$(/usr/bin/stat --printf='%G' "$(dirname "${dbfile}")")
+ chgrp "$group" "${dbfile}" || error "Could not change group of %s to %s" "${dbfile}" "$group"
+ chgrp "$group" "${filesfile}" || error "Could not change group of %s to %s" "${filesfile}" "$group"
+ chmod g+w "${dbfile}" || error "Could not set write permission for group %s to %s" "$group" "${dbfile}"
+ chmod g+w "${filesfile}" || error "Could not set write permission for group %s to %s" "$group" "${filesfile}"
+ else
+ error "You don't have permission to change %s" "${dbfile}"
+ fi
+}
+
+arch_repo_add() {
+ local repo=$1
+ local arch=$2
+ local pkgs=("${@:3}")
+
+ printf -v pkgs_str -- '%q ' "${pkgs[@]}"
+ # package files might be relative to repo dir
+ pushd "${FTP_BASE}/${repo}/os/${arch}" >/dev/null
+ /usr/bin/repo-add -q "${repo}${DBEXT}" "${pkgs[@]}" >/dev/null \
+ || error 'repo-add %q %s' "${repo}${DBEXT}" "${pkgs_str% }"
+ /usr/bin/repo-add -f -q "${repo}${FILESEXT}" "${pkgs[@]}" \
+ || error 'repo-add -f %q %s' "${repo}${FILESEXT}" "${pkgs_str% }"
+ popd >/dev/null
+ set_repo_permission "${repo}" "${arch}"
+
+ REPO_MODIFIED=1
+}
+
+arch_repo_remove() {
+ local repo=$1
+ local arch=$2
+ local pkgs=("${@:3}")
+ local dbfile="${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}"
+ local filesfile="${FTP_BASE}/${repo}/os/${arch}/${repo}${FILESEXT}"
+
+ if [ ! -f "${dbfile}" ]; then
+ error "No database found at '%s'" "${dbfile}"
+ return 1
+ fi
+ printf -v pkgs_str -- '%q ' "${pkgs[@]}"
+ /usr/bin/repo-remove -q "${dbfile}" "${pkgs[@]}" >/dev/null \
+ || error 'repo-remove %q %s' "${dbfile}" "${pkgs_str% }"
+ /usr/bin/repo-remove -q "${filesfile}" "${pkgs[@]}" \
+ || error 'repo-remove %q %s' "${filesfile}" "${pkgs_str% }"
+ set_repo_permission "${repo}" "${arch}"
+
+ REPO_MODIFIED=1
+}
diff --git a/extra/legacy/db-libremessages b/extra/legacy/db-libremessages
new file mode 100755
index 0000000..37df149
--- /dev/null
+++ b/extra/legacy/db-libremessages
@@ -0,0 +1,83 @@
+# Copyright (c) 2006-2010 Pacman Development Team <pacman-dev@archlinux.org>
+# Copyright (c) 2002-2006 by Judd Vinet <jvinet@zeroflux.org>
+# Copyright (c) 2005 by Aurelien Foret <orelien@chez.com>
+# Copyright (c) 2006 by Miklos Vajna <vmiklos@frugalware.org>
+# Copyright (c) 2005 by Christian Hamar <krics@linuxforum.hu>
+# Copyright (c) 2006 by Alex Smith <alex@alex-smith.me.uk>
+# Copyright (c) 2006 by Andras Voroskoi <voroskoi@frugalware.org>
+# Copyright (c) 2011 by Joshua Haase <hahj87@gmail.com>
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+# gettext initialization
+export TEXTDOMAIN='libretools'
+export TEXTDOMAINDIR='/usr/share/locale'
+
+# check if messages are to be printed using color
+unset ALL_OFF BOLD BLUE GREEN RED YELLOW
+
+if tput setaf 0 &>/dev/null; then
+ ALL_OFF="$(tput sgr0)"
+ BOLD="$(tput bold)"
+ BLUE="${BOLD}$(tput setaf 4)"
+ GREEN="${BOLD}$(tput setaf 2)"
+ RED="${BOLD}$(tput setaf 1)"
+ YELLOW="${BOLD}$(tput setaf 3)"
+ PURPLE="${ALL_OFF}$(tput setaf 5)"
+else
+ ALL_OFF="\033[1;0m"
+ BOLD="\033[1;1m"
+ BLUE="${BOLD}\033[1;34m"
+ GREEN="${BOLD}\033[1;32m"
+ RED="${BOLD}\033[1;31m"
+ YELLOW="${BOLD}\033[1;33m"
+ PURPLE="${BOLD}\033[1;30;40m"
+fi
+
+stdnull() {
+ local action=$1;
+ eval "${action} >/dev/null 2>&1"
+}
+
+plain() {
+ local mesg=$1; shift
+ printf "${BOLD} ${mesg}${ALL_OFF}\n" "$@" >&2
+}
+
+msg() {
+ local mesg=$1; shift
+ printf "${GREEN}==>${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@" >&2
+}
+
+msg2() {
+ local mesg=$1; shift
+ printf "${BLUE} ->${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@" >&2
+}
+
+warning() {
+ local mesg=$1; shift
+ printf "${YELLOW}==> $(gettext "WARNING:")${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@" >&2
+}
+
+error() {
+ local mesg=$1; shift
+ printf "${RED}==> $(gettext "ERROR:")${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@" >&2
+}
+
+fatal_error() {
+ local mesg=$1; shift
+ error "$mesg" "$@"
+
+ exit 1
+}
diff --git a/extra/legacy/db-list-unsigned-packages b/extra/legacy/db-list-unsigned-packages
new file mode 100755
index 0000000..095e1e6
--- /dev/null
+++ b/extra/legacy/db-list-unsigned-packages
@@ -0,0 +1,38 @@
+#!/bin/bash
+# Copyright (C) 2012 Michał Masłowski <mtjm@mtjm.eu>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+set -e
+
+# Output a list of repo/package-name-and-version pairs representing
+# unsigned packages available for architecture $1 and specified for
+# architecture $2 (usually $1 or any, default is to list all).
+
+. "$(dirname "$(readlink -e "$0")")/config"
+. "$(dirname "$(readlink -e "$0")")/db-functions"
+
+if [ $# -lt 1 ]; then
+ msg "usage: %s <architecture>" "${0##*/}"
+ exit 1
+fi
+
+arch=$1
+shift
+
+for repo in "${PKGREPOS[@]}"
+do
+ db="${FTP_BASE}/${repo}/os/${arch}/${repo}.db"
+ [ -f "$db" ] && "$(dirname "$(readlink -e "$0")")/db-list-unsigned-packages.py" "$repo" "$@" < "$db"
+done
diff --git a/extra/legacy/db-list-unsigned-packages.py b/extra/legacy/db-list-unsigned-packages.py
new file mode 100755
index 0000000..80cff51
--- /dev/null
+++ b/extra/legacy/db-list-unsigned-packages.py
@@ -0,0 +1,96 @@
+#!/usr/bin/env python3
+# Copyright (C) 2012 Michał Masłowski <mtjm@mtjm.eu>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+
+"""
+Output a list of repo/package-name-and-version pairs representing
+unsigned packages in the database at standard input of repo named in
+the first argument and specified for architectures listed in the
+following arguments (usually the one of the database or any, default
+is to list all).
+
+If the --keyset argument is passed, print the key fingerprint of every
+signed package.
+"""
+
+
+import base64
+import subprocess
+import sys
+import tarfile
+
+
+def main():
+ """Do the job."""
+ check_keys = False
+ if "--keyset" in sys.argv:
+ sys.argv.remove("--keyset")
+ check_keys = True
+ repo = sys.argv[1]
+ pkgarches = frozenset(name.encode("utf-8") for name in sys.argv[2:])
+ packages = []
+ keys = []
+ with tarfile.open(fileobj=sys.stdin.buffer) as archive:
+ for entry in archive:
+ if entry.name.endswith("/desc"):
+ content = archive.extractfile(entry)
+ skip = False
+ is_arch = False
+ key = None
+ for line in content:
+ if is_arch:
+ is_arch = False
+ if pkgarches and line.strip() not in pkgarches:
+ skip = True # different architecture
+ break
+ if line == b"%PGPSIG%\n":
+ skip = True # signed
+ key = b""
+ if check_keys:
+ continue
+ else:
+ break
+ if line == b"%ARCH%\n":
+ is_arch = True
+ continue
+ if key is not None:
+ if line.strip():
+ key += line.strip()
+ else:
+ break
+ if check_keys and key:
+ key_binary = base64.b64decode(key)
+ keys.append(key_binary)
+ packages.append(repo + "/" + entry.name[:-5])
+ if skip:
+ continue
+ print(repo + "/" + entry.name[:-5])
+ if check_keys and keys:
+ # We have collected all signed package names in packages and
+ # all keys in keys. Let's now ask gpg to list all signatures
+ # and find which keys made them.
+ packets = subprocess.check_output(("gpg", "--list-packets"),
+ input=b"".join(keys))
+ i = 0
+ for line in packets.decode("latin1").split("\n"):
+ if line.startswith(":signature packet:"):
+ keyid = line[line.index("keyid ") + len("keyid "):]
+ print(packages[i], keyid)
+ i += 1
+
+
+if __name__ == "__main__":
+ main()
diff --git a/extra/legacy/db-move b/extra/legacy/db-move
new file mode 100755
index 0000000..275a11a
--- /dev/null
+++ b/extra/legacy/db-move
@@ -0,0 +1,106 @@
+#!/bin/bash
+
+. "$(dirname "$(readlink -e "$0")")/config"
+. "$(dirname "$(readlink -e "$0")")/db-functions"
+
+if [ $# -lt 3 ]; then
+ msg "usage: %s <repo-from> <repo-to> <pkgname|pkgbase> ..." "${0##*/}"
+ exit 1
+fi
+
+args=("${@}")
+repo_from="${args[0]}"
+repo_to="${args[1]}"
+ftppath_from="${FTP_BASE}/${repo_from}/os/"
+ftppath_to="${FTP_BASE}/${repo_to}/os/"
+
+if ! check_repo_permission "$repo_to" || ! check_repo_permission "$repo_from"; then
+ die "You don't have permission to move packages from %s to %s" "${repo_from}" "${repo_to}"
+fi
+
+# TODO: this might lock too much (architectures)
+for pkgarch in "${ARCHES[@]}"; do
+ repo_lock "${repo_to}" "${pkgarch}" || exit 1
+ repo_lock "${repo_from}" "${pkgarch}" || exit 1
+done
+
+# First loop is to check that all necessary files exist
+for pkgbase in "${args[@]:2}"; do
+ for pkgarch in "${ARCHES[@]}" 'any'; do
+ xbsrepo_from="$(xbs releasepath "${pkgbase}" "${repo_from}" "${pkgarch}")"
+ if [ -r "${xbsrepo_from}/PKGBUILD" ]; then
+ pkgnames=($(. "${xbsrepo_from}/PKGBUILD"; echo "${pkgname[@]}"))
+ if [ ${#pkgnames[@]} -lt 1 ]; then
+ die "Could not read pkgname"
+ fi
+
+ pkgver=$(. "${xbsrepo_from}/PKGBUILD"; get_full_version "${epoch:-0}" "${pkgver}" "${pkgrel}")
+ if [ -z "${pkgver}" ]; then
+ die "Could not read pkgver"
+ fi
+
+ if [ "${pkgarch}" == 'any' ]; then
+ tarches=("${ARCHES[@]}")
+ else
+ tarches=("${pkgarch}")
+ fi
+
+ for pkgname in "${pkgnames[@]}"; do
+ for tarch in "${tarches[@]}"; do
+ getpkgfile "${ftppath_from}/${tarch}/${pkgname}-${pkgver}-${pkgarch}"${PKGEXT} >/dev/null
+ done
+ done
+ continue 2
+ fi
+ done
+ die "%s not found in %s" "${pkgbase}" "${repo_from}"
+done
+
+msg "Moving packages from [%s] to [%s]..." "${repo_from}" "${repo_to}"
+
+declare -A add_pkgs
+declare -A remove_pkgs
+for pkgbase in "${args[@]:2}"; do
+ # move the package in xbs
+ arches=($(xbs move "${repo_from}" "${repo_to}" "${pkgbase}"))
+ # move the package in ftp
+ for pkgarch in "${arches[@]}"; do
+ xbsrepo_to="$(xbs releasepath "$pkgbase" "$repo_to" "$pkgarch")"
+ if true; then # to add an indent level to make merging easier
+ if [ "${pkgarch}" == 'any' ]; then
+ tarches=("${ARCHES[@]}")
+ else
+ tarches=("${pkgarch}")
+ fi
+ msg2 '%s (%s)' "${pkgbase}" "${tarches[*]}"
+ pkgnames=($(. "${xbsrepo_to}/PKGBUILD"; echo "${pkgname[@]}"))
+ pkgver=$(. "${xbsrepo_to}/PKGBUILD"; get_full_version "${epoch:-0}" "${pkgver}" "${pkgrel}")
+
+ for pkgname in "${pkgnames[@]}"; do
+ for tarch in "${tarches[@]}"; do
+ pkgpath=$(getpkgfile "${ftppath_from}/${tarch}/${pkgname}-${pkgver}-${pkgarch}"${PKGEXT})
+ pkgfile="${pkgpath##*/}"
+
+ ln -s "../../../${PKGPOOL}/${pkgfile}" "${ftppath_to}/${tarch}/"
+ if [ -f "${FTP_BASE}/${PKGPOOL}/${pkgfile}.sig" ]; then
+ ln -s "../../../${PKGPOOL}/${pkgfile}.sig" "${ftppath_to}/${tarch}/"
+ fi
+ add_pkgs[${tarch}]+="${FTP_BASE}/${PKGPOOL}/${pkgfile} "
+ remove_pkgs[${tarch}]+="${pkgname} "
+ done
+ done
+ fi
+ done
+done
+
+for tarch in "${ARCHES[@]}"; do
+ if [ -n "${add_pkgs[${tarch}]}" ]; then
+ arch_repo_add "${repo_to}" "${tarch}" ${add_pkgs[${tarch}]}
+ arch_repo_remove "${repo_from}" "${tarch}" ${remove_pkgs[${tarch}]}
+ fi
+done
+
+for pkgarch in "${ARCHES[@]}"; do
+ repo_unlock "${repo_from}" "${pkgarch}"
+ repo_unlock "${repo_to}" "${pkgarch}"
+done
diff --git a/extra/legacy/db-remove b/extra/legacy/db-remove
new file mode 100755
index 0000000..dcbe4b4
--- /dev/null
+++ b/extra/legacy/db-remove
@@ -0,0 +1,49 @@
+#!/bin/bash
+
+. "$(dirname "$(readlink -e "$0")")/config"
+. "$(dirname "$(readlink -e "$0")")/db-functions"
+
+if [ $# -lt 3 ]; then
+ msg "usage: %s <repo> <arch> <pkgname|pkgbase> ..." "${0##*/}"
+ exit 1
+fi
+
+repo="$1"
+arch="$2"
+pkgbases=("${@:3}")
+
+if ! check_repo_permission "$repo"; then
+ die "You don't have permission to remove packages from %s" "${repo}"
+fi
+
+if [ "$arch" == "any" ]; then
+ tarches=("${ARCHES[@]}")
+else
+ tarches=("$arch")
+fi
+
+for tarch in "${tarches[@]}"; do
+ repo_lock "$repo" "$tarch" || exit 1
+done
+
+remove_pkgs=()
+for pkgbase in "${pkgbases[@]}"; do
+ msg "Removing %s from [%s]..." "$pkgbase" "$repo"
+
+ path="$(xbs releasepath "$pkgbase" "$repo" "$arch")"
+ if [ -d "$path" ]; then
+ remove_pkgs+=($(. "$path/PKGBUILD"; echo "${pkgname[@]}"))
+ xbs unrelease "$pkgbase" "$repo" "$arch"
+ else
+ warning "%s not found in %s for %s" \
+ "$pkgbase" "$(xbs name)" "$repo-$arch"
+ warning "Removing only %s from the repo" "$pkgbase"
+ warning "If it was a split package you have to remove the others yourself!"
+ remove_pkgs+=("$pkgbase")
+ fi
+done
+
+for tarch in "${tarches[@]}"; do
+ arch_repo_remove "${repo}" "${tarch}" "${remove_pkgs[@]}"
+ repo_unlock "$repo" "$tarch"
+done
diff --git a/extra/legacy/db-repo-add b/extra/legacy/db-repo-add
new file mode 100755
index 0000000..4611bdf
--- /dev/null
+++ b/extra/legacy/db-repo-add
@@ -0,0 +1,41 @@
+#!/bin/bash
+
+. "$(dirname "$(readlink -e "$0")")/config"
+. "$(dirname "$(readlink -e "$0")")/db-functions"
+
+if [ $# -lt 3 ]; then
+ msg "usage: %s <repo> <arch> <pkgfile> ..." "${0##*/}"
+ exit 1
+fi
+
+repo="$1"
+arch="$2"
+pkgfiles=("${@:3}")
+
+ftppath="$FTP_BASE/$repo/os"
+
+if ! check_repo_permission "$repo"; then
+ die "You don't have permission to add packages to %s" "${repo}"
+fi
+
+if [ "$arch" == "any" ]; then
+ tarches=("${ARCHES[@]}")
+else
+ tarches=("$arch")
+fi
+
+for tarch in "${tarches[@]}"; do
+ repo_lock "$repo" "$tarch" || exit 1
+done
+
+for tarch in "${tarches[@]}"; do
+ for pkgfile in "${pkgfiles[@]}"; do
+ if [[ ! -f "${FTP_BASE}/${repo}/os/${arch}/${pkgfile##*/}" ]]; then
+ die "Package file %s not found in %s" "${pkgfile##*/}" "${FTP_BASE}/${repo}/os/${arch}/"
+ else
+ msg "Adding %s to [%s]..." "$pkgfile" "$repo"
+ fi
+ done
+ arch_repo_add "${repo}" "${tarch}" "${pkgfiles[@]}"
+ repo_unlock "$repo" "$tarch"
+done
diff --git a/extra/legacy/db-repo-remove b/extra/legacy/db-repo-remove
new file mode 100755
index 0000000..aadc4ce
--- /dev/null
+++ b/extra/legacy/db-repo-remove
@@ -0,0 +1,37 @@
+#!/bin/bash
+
+. "$(dirname "$(readlink -e "$0")")/config"
+. "$(dirname "$(readlink -e "$0")")/db-functions"
+
+if [ $# -lt 3 ]; then
+ msg "usage: %s <repo> <arch> <pkgname> ..." "${0##*/}"
+ exit 1
+fi
+
+repo="$1"
+arch="$2"
+pkgnames=("${@:3}")
+
+ftppath="$FTP_BASE/$repo/os"
+
+if ! check_repo_permission "$repo"; then
+ die "You don't have permission to remove packages from %s" "${repo}"
+fi
+
+if [ "$arch" == "any" ]; then
+ tarches=("${ARCHES[@]}")
+else
+ tarches=("$arch")
+fi
+
+for tarch in "${tarches[@]}"; do
+ repo_lock "$repo" "$tarch" || exit 1
+done
+
+for tarch in "${tarches[@]}"; do
+ for pkgname in "${pkgnames[@]}"; do
+ msg "Removing %s from [%s]..." "$pkgname" "$repo"
+ done
+ arch_repo_remove "${repo}" "${tarch}" "${pkgnames[@]}"
+ repo_unlock "$repo" "$tarch"
+done
diff --git a/extra/legacy/db-sync b/extra/legacy/db-sync
new file mode 100755
index 0000000..2194fe6
--- /dev/null
+++ b/extra/legacy/db-sync
@@ -0,0 +1,208 @@
+#!/bin/bash
+# Syncs Arch repos based on info contained in repo.db files
+# License: GPLv3
+
+# Principles
+# * Get repo.db from an Arch-like repo
+# * Generate a list of available packages
+# * Create sync whitelist (based on package blacklist)
+# * Get packages
+# * Check package signatures
+# * Check database signatures
+# * Sync repo => repo
+
+# TODO
+# * make a tarball of files used for forensics
+
+# Run as `V=true db-sync` to get verbose output
+VERBOSE=${V}
+extra=()
+${VERBOSE} && extra+=(-v)
+
+WORKDIR=$(mktemp -dt "${0##*/}.XXXXXXXXXX")
+trap "rm -rf -- $(printf '%q' "${WORKDIR}")" EXIT
+
+# Returns contents of a repo
+get_repos() {
+ # Exclude everything but db files
+ rsync "${extra[@]}" --no-motd -mrtlH --no-p --include="*/" \
+ --include="*.db" \
+ --include="*${DBEXT}" \
+ --include="*.files" \
+ --include="*${FILESEXT}" \
+ --exclude="*" \
+ --delete-after \
+ "rsync://${mirror}/${mirrorpath}/" "$WORKDIR"
+}
+
+get_repo_content() {
+ # Return all contents
+ bsdtar tf "${1}" | \
+ cut -d "/" -f 1 | \
+ sort -u
+}
+
+# Prints blacklisted packages
+get_blacklist() {
+ cut -d ':' -f 1 "${BLACKLIST_FILE}"
+}
+
+# repo
+# arch
+get_repo_file() {
+ echo "${WORKDIR}/${1}/os/${2}/${1}"
+}
+
+# Process the databases and get the libre packages
+init() {
+
+ # Get the blacklisted packages
+ blacklist=($(get_blacklist))
+ # Store all the whitelist files
+ whitelists=()
+
+ msg "%d packages in blacklist" ${#blacklist[@]}
+
+ test ${#blacklist[@]} -eq 0 && fatal_error "Empty blacklist"
+
+ # Sync the repos databases
+ get_repos
+
+ # Traverse all repo-arch pairs
+ for _repo in "${ARCHREPOS[@]}"; do
+ for _arch in "${ARCHARCHES[@]}"; do
+ msg "Processing %s-%s" "${_repo}-${_arch}"
+
+ db_file=$(get_repo_file "${_repo}" "${_arch}")${DBEXT}
+ files_file=$(get_repo_file "${_repo}" "${_arch}")${FILESEXT}
+
+ if [ ! -f "${db_file}" ]; then
+ warning "%s doesn't exist, skipping this repo-arch" "${db_file}"
+ continue
+ fi
+ if [ ! -f "${files_file}" ]; then
+ warning "%s doesn't exist, skipping this repo-arch" "${files_file}"
+ continue
+ fi
+
+ # Remove blacklisted packages and count them
+ # TODO capture all removed packages for printing on debug mode
+ msg2 "Removing blacklisted packages from %s database..." .db
+ LC_ALL=C repo-remove "${db_file}" "${blacklist[@]}" \
+ |& sed -n 's/-> Removing/ &/p'
+ msg2 "Removing blacklisted packages from %s database..." .files
+ LC_ALL=C repo-remove "${files_file}" "${blacklist[@]}" \
+ |& sed -n 's/-> Removing/ &/p'
+ # Get db contents
+ db=($(get_repo_content "${db_file}"))
+
+ msg2 "Process clean db for syncing..."
+
+ # Create a whitelist, add * wildcard to end
+ # TODO due to lack of -arch suffix, the pool sync retrieves every arch even if
+ # we aren't syncing them
+ # IMPORTANT: the . in the sed command is needed because an empty
+ # whitelist would consist of a single * allowing any package to
+ # pass through
+ printf '%s\n' "${db[@]}" | sed "s|.$|&*|g" > "/tmp/${_repo}-${_arch}.whitelist"
+
+ msg2 "%d packages in whitelist" "$(wc -l /tmp/${_repo}-${_arch}.whitelist | cut -d' ' -f1)"
+
+ # Sync excluding everything but whitelist
+ # We delete here for cleanup
+ rsync "${extra[@]}" --no-motd -rtlH \
+ --delete-after \
+ --delete-excluded \
+ --delay-updates \
+ --include-from="/tmp/${_repo}-${_arch}.whitelist" \
+ --exclude="*" \
+ "rsync://${mirror}/${mirrorpath}/${_repo}/os/${_arch}/" \
+ "${FTP_BASE}/${_repo}/os/${_arch}/"
+
+ # Add a new whitelist
+ whitelists+=(/tmp/${_repo}-${_arch}.whitelist)
+
+ msg "Putting databases back in place"
+ rsync "${extra[@]}" --no-motd -rtlH \
+ --delay-updates \
+ --safe-links \
+ "${WORKDIR}/${_repo}/os/${_arch}/" \
+ "${FTP_BASE}/${_repo}/os/${_arch}/"
+
+ # Cleanup
+ unset db
+ done
+ done
+
+
+ msg "Syncing package pool"
+ # Concatenate all whitelists, check for single *s just in case
+ cat "${whitelists[@]}" | grep -v "^\*$" | sort -u > /tmp/any.whitelist
+
+ msg2 "Retrieving %d packages from pool" "$(wc -l /tmp/any.whitelist | cut -d' ' -f1)"
+
+ # Sync
+ # *Don't delete-after*, this is the job of cleanup scripts. It will remove our
+ # packages too
+ local pkgpool
+ for pkgpool in "${ARCHPKGPOOLS[@]}"; do
+ rsync "${extra[@]}" --no-motd -rtlH \
+ --delay-updates \
+ --safe-links \
+ --include-from=/tmp/any.whitelist \
+ --exclude="*" \
+ "rsync://${mirror}/${mirrorpath}/${pkgpool}/" \
+ "${FTP_BASE}/${pkgpool}/"
+ done
+
+ # Sync sources
+ msg "Syncing source pool"
+ #sed "s|\.pkg\.tar\.|.src.tar.|" /tmp/any.whitelist > /tmp/any-src.whitelist
+ #msg2 "Retrieving %d sources from pool" $(wc -l < /tmp/any-src.whitelist)
+
+ # Sync
+ # *Don't delete-after*, this is the job of cleanup scripts. It will remove our
+ # packages too
+ local srcpool
+ for srcpool in "${ARCHSRCPOOLS[@]}"; do
+ rsync "${extra[@]}" --no-motd -rtlH \
+ --delay-updates \
+ --safe-links \
+ --include-from=/tmp/any.whitelist \
+ --exclude="*" \
+ "rsync://${mirror}/${mirrorpath}/${srcpool}/" \
+ "${FTP_BASE}/${srcpool}/"
+ done
+
+ date -u +%s > "${FTP_BASE}/lastsync"
+
+ # Cleanup
+ unset blacklist whitelists _arch _repo repo_file
+}
+
+trap_exit() {
+ local signal=$1; shift
+ echo
+ error "$@"
+ trap -- "$signal"
+ kill "-$signal" "$$"
+}
+
+source "$(dirname "$(readlink -e "$0")")/config"
+source "$(dirname "$(readlink -e "$0")")/db-sync.conf"
+source "$(dirname "$(readlink -e "$0")")/db-libremessages"
+
+# Check variables presence
+for var in DBEXT FILESEXT mirror mirrorpath WORKDIR BLACKLIST_FILE FTP_BASE ARCHSRCPOOLS ARCHPKGPOOLS; do
+ test -z "${!var}" && fatal_error "Empty %s" "${var}"
+done
+
+# From makepkg
+set -E
+for signal in TERM HUP QUIT; do
+ trap "trap_exit $signal '%s signal caught. Exiting...' $signal" "$signal"
+done
+trap 'trap_exit INT "Aborted by user! Exiting..."' INT
+trap 'trap_exit USR1 "An unknown error has occurred. Exiting..."' ERR
+
+init
diff --git a/extra/legacy/db-sync.conf b/extra/legacy/db-sync.conf
new file mode 100644
index 0000000..f7748c3
--- /dev/null
+++ b/extra/legacy/db-sync.conf
@@ -0,0 +1,11 @@
+#mirror="mirrors.kernel.org"
+mirror="mirrors.niyawe.de"
+
+## mirrors without sources folder
+#mirror="mirror.nl.leaseweb.net"
+#mirror="mirror.one.com"
+#mirror="mirror.us.leaseweb.net"
+#mirror="mirror.bytemark.co.uk"
+#mirror="mirror.de.leaseweb.net"
+
+mirrorpath="archlinux"
diff --git a/extra/legacy/db-update b/extra/legacy/db-update
new file mode 100755
index 0000000..4830791
--- /dev/null
+++ b/extra/legacy/db-update
@@ -0,0 +1,118 @@
+#!/bin/bash
+
+. "$(dirname "$(readlink -e "$0")")/config"
+. "$(dirname "$(readlink -e "$0")")/db-functions"
+
+if [ $# -ge 1 ]; then
+ warning "Calling %s with a specific repository is no longer supported" "${0##*/}"
+ exit 1
+fi
+
+# Find repos with packages to release
+staging_repos=($(find "${STAGING}" -mindepth 1 -type f -name "*${PKGEXT}" -printf '%h\n' | sort -u))
+if [ $? -ge 1 ]; then
+ die "Could not read %s" "${STAGING}"
+fi
+
+repos=()
+for staging_repo in "${staging_repos[@]##*/}"; do
+ if in_array "${staging_repo}" "${PKGREPOS[@]}"; then
+ repos+=("${staging_repo}")
+ fi
+done
+
+# TODO: this might lock too much (architectures)
+for repo in "${repos[@]}"; do
+ for pkgarch in "${ARCHES[@]}"; do
+ repo_lock "${repo}" "${pkgarch}" || exit 1
+ done
+done
+
+# check if packages are valid
+for repo in "${repos[@]}"; do
+ if ! check_repo_permission "${repo}"; then
+ die "You don't have permission to update packages in %s" "${repo}"
+ fi
+ pkgs=($(getpkgfiles "${STAGING}/${repo}/"*${PKGEXT}))
+ if [ $? -eq 0 ]; then
+ for pkg in "${pkgs[@]}"; do
+ if [ -h "${pkg}" ]; then
+ die "Package %s is a symbolic link" "${repo}/${pkg##*/}"
+ fi
+ if ! check_pkgfile "${pkg}"; then
+ die "Package %s is not consistent with its meta data" "${repo}/${pkg##*/}"
+ fi
+ if ! check_pkgrepos "${pkg}"; then
+ die "Package %s already exists in another repository" "${repo}/${pkg##*/}"
+ fi
+ done
+ # This is fucking obnoxious
+ #if ! check_splitpkgs ${repo} "${pkgs[@]}"; then
+ # die "Missing split packages for %s" "${repo}"
+ #fi
+ else
+ die "Could not read %s" "${STAGING}"
+ fi
+done
+
+dirs=()
+for repo in "${repos[@]}"; do
+ msg "Updating [%s]..." "${repo}"
+ any_pkgs=($(getpkgfiles "${STAGING}/${repo}/"*-any${PKGEXT} 2>/dev/null))
+ for pkgarch in "${ARCHES[@]}"; do
+ add_dirs=()
+ add_pkgs=()
+ arch_pkgs=($(getpkgfiles "${STAGING}/${repo}/"*-"${pkgarch}"${PKGEXT} 2>/dev/null))
+ for pkg in "${arch_pkgs[@]}" "${any_pkgs[@]}"; do
+ pkgfile="${pkg##*/}"
+ msg2 "%s (%s)" "${pkgfile}" "${pkgarch}"
+ # any packages might have been moved by the previous run
+ if [ -f "${pkg}" ]; then
+ mv "${pkg}" "$FTP_BASE/${PKGPOOL}"
+ fi
+ ln -s "../../../${PKGPOOL}/${pkgfile}" "$FTP_BASE/$repo/os/${pkgarch}"
+ # also move signatures
+ if [ -f "${pkg}.sig" ]; then
+ mv "${pkg}.sig" "$FTP_BASE/${PKGPOOL}"
+ fi
+ if [ -f "$FTP_BASE/${PKGPOOL}/${pkgfile}.sig" ]; then
+ ln -s "../../../${PKGPOOL}/${pkgfile}.sig" "$FTP_BASE/$repo/os/${pkgarch}"
+ fi
+ add_dirs+=("${STAGING}/abslibre/$(getpkgarch "$FTP_BASE/$PKGPOOL/$pkgfile")/$repo/$(getpkgbase "$FTP_BASE/$PKGPOOL/$pkgfile")")
+ add_pkgs+=("${pkgfile}")
+ done
+ for add_dir in "${add_dirs[@]}"; do
+ (cd "${add_dir}" && xbs release-server "${repo}" "${pkgarch}") ||
+ error 'cd %q && xbs release-server %q %q' "${add_dir}" "${repo}" "${pkgarch}"
+ done
+ if [ ${#add_pkgs[@]} -ge 1 ]; then
+ arch_repo_add "${repo}" "${pkgarch}" "${add_pkgs[@]}"
+ fi
+ dirs+=("${add_dirs[@]}")
+ done
+done
+
+for repo in "${repos[@]}"; do
+ for pkgarch in "${ARCHES[@]}"; do
+ repo_unlock "${repo}" "${pkgarch}"
+ done
+done
+
+cd "${STAGING}"
+
+# Remove left over XBS files
+rm -rf -- "${dirs[@]}"
+dirname -z -- "${dirs[@]}" |
+ xargs -0 realpath -zm --relative-to="${STAGING}" -- |
+ xargs -0 rmdir -p -- 2>/dev/null
+
+# Stage generated source files
+while read -r file; do
+ pub="${FTP_BASE}/${file}"
+ if [[ -f "$pub" ]]; then
+ warning "file already exists: %s" "${file}"
+ else
+ mkdir -p -- "${pub%/*}"
+ mv -vn "$file" "$pub"
+ fi
+done < <(find other sources -type f 2>/dev/null)
diff --git a/extra/legacy/list_nonfree_in_db.py b/extra/legacy/list_nonfree_in_db.py
new file mode 100755
index 0000000..a486fa5
--- /dev/null
+++ b/extra/legacy/list_nonfree_in_db.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python2
+#-*- encoding: utf-8 -*-
+from filter import *
+import argparse
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ prog="nonfree_in_db",
+ description="Cleans nonfree files on repo",)
+
+ parser.add_argument("-k", "--blacklist-file", type=str,
+ help="File containing blacklisted names",
+ required=True,)
+
+ parser.add_argument("-b", "--database", type=str,
+ help="dabatase to clean",
+ required=True,)
+
+ args=parser.parse_args()
+
+ if not (args.blacklist_file and args.database):
+ parser.print_help()
+ exit(1)
+
+ blacklist=listado(args.blacklist_file)
+ pkgs=get_pkginfo_from_db(args.database)
+
+ print(" ".join([pkg["name"] for pkg in pkgs if pkg["name"] in blacklist]))
diff --git a/extra/legacy/make_individual_torrent b/extra/legacy/make_individual_torrent
new file mode 100755
index 0000000..0a7e778
--- /dev/null
+++ b/extra/legacy/make_individual_torrent
@@ -0,0 +1,52 @@
+#!/bin/bash
+# Copyright (C) 2014 Joseph Graham <joseph@t67.eu>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+# This script is called by `make_repo_torrents' to make a torrent. It
+# depends on `mktorrent'. It takes the following args:
+# $1 - path of package
+# $2 - public location
+
+# Comma seperated list of trackers, no spaces
+# t67.eu is run by Xylon, hackcoop by fauno & friends
+trackers='http://t67.eu:6969/announce,http://tracker.hackcoop.com.ar/announce'
+
+# This mirror is put as a webseed. Which mirror we use for a webseed
+# doesn't really matter since it's re-written on the client machine by
+# pacman2pacman so it won't normally be used anyway.
+seed_url='http://repo.parabolagnulinux.org/'
+
+if [[ -z "${1}" ]]
+then
+ echo "Error. First arg must be the path of the package."
+ echo 1
+fi
+
+if [[ -z "${2}" ]]
+then
+ echo "Error. Second arg must be the public location."
+ echo 1
+fi
+
+pkg="${1}"
+public_location="${2}"
+
+pkg_name="${pkg##*/}"
+
+# URL of the actual package for the webseed
+webseed="${seed_url}${pkg#${public_location}}"
+
+mktorrent -a "${trackers}" "${pkg}" -w "${webseed}" >/dev/null ||
+echo "Error making torrent for \"${pkg}\""
diff --git a/extra/legacy/mkrepo b/extra/legacy/mkrepo
new file mode 100755
index 0000000..b11dc0b
--- /dev/null
+++ b/extra/legacy/mkrepo
@@ -0,0 +1,15 @@
+#!/bin/bash
+# Author: Nicolás Reynolds <fauno@kiwwwi.com.ar>
+# License: GPLv3+
+# Description: A script to quickly create new [repos]
+
+source "$(dirname "$(readlink -e "$0")")/config"
+
+for repo in "$@"; do
+ echo ":: Creating [$repo]"
+ for arch in "${ARCHES[@]}"; do
+ mkdir -pv "${FTP_BASE}/${repo}/os/${arch}"
+ done
+done
+
+echo ":: All done. Add the repo to the ParabolaWeb admin page."
diff --git a/extra/legacy/test/__init__.py b/extra/legacy/test/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/extra/legacy/test/__init__.py
diff --git a/extra/legacy/test/blacklist_sample b/extra/legacy/test/blacklist_sample
new file mode 100644
index 0000000..2a02af6
--- /dev/null
+++ b/extra/legacy/test/blacklist_sample
@@ -0,0 +1,2 @@
+alex:alex-libre: Aquí va un comentario
+gmime22 ::Non free dependencies \ No newline at end of file
diff --git a/extra/legacy/test/core.db.tar.gz b/extra/legacy/test/core.db.tar.gz
new file mode 100644
index 0000000..5eb2081
--- /dev/null
+++ b/extra/legacy/test/core.db.tar.gz
Binary files differ
diff --git a/extra/legacy/test/depends b/extra/legacy/test/depends
new file mode 100644
index 0000000..7ff3ad4
--- /dev/null
+++ b/extra/legacy/test/depends
@@ -0,0 +1,4 @@
+%DEPENDS%
+glibc>=2.13
+zlib
+
diff --git a/extra/legacy/test/desc b/extra/legacy/test/desc
new file mode 100644
index 0000000..abba644
--- /dev/null
+++ b/extra/legacy/test/desc
@@ -0,0 +1,39 @@
+%FILENAME%
+binutils-2.21-4-x86_64.pkg.tar.xz
+
+%NAME%
+binutils
+
+%VERSION%
+2.21-4
+
+%DESC%
+A set of programs to assemble and manipulate binary and object files
+
+%GROUPS%
+base
+
+%CSIZE%
+3412892
+
+%ISIZE%
+17571840
+
+%MD5SUM%
+4e666f87c78998f4839f33dc06d2043a
+
+%URL%
+http://www.gnu.org/software/binutils/
+
+%LICENSE%
+GPL
+
+%ARCH%
+x86_64
+
+%BUILDDATE%
+1297240369
+
+%PACKAGER%
+Allan McRae <allan@archlinux.org>
+
diff --git a/extra/legacy/test/lib/common.inc b/extra/legacy/test/lib/common.inc
new file mode 100644
index 0000000..a2dee10
--- /dev/null
+++ b/extra/legacy/test/lib/common.inc
@@ -0,0 +1,266 @@
+set -E
+
+. "$(dirname ${BASH_SOURCE[0]})/../../config"
+. "$(dirname ${BASH_SOURCE[0]})/../../db-functions"
+
+oneTimeSetUp() {
+ local p
+ local d
+ local a
+ local pkgname
+ local pkgarch
+ local pkgversion
+ local build
+ pkgdir="$(mktemp -d /tmp/$(basename $0).XXXXXXXXXX)"
+ cp -Lr $(dirname ${BASH_SOURCE[0]})/../packages/* "${pkgdir}"
+ msg 'Building packages...'
+ for d in "${pkgdir}"/*; do
+ pushd $d >/dev/null
+ pkgname=($(. PKGBUILD; echo ${pkgname[@]}))
+ pkgarch=($(. PKGBUILD; echo ${arch[@]}))
+ pkgversion=$(. PKGBUILD; echo $(get_full_version ${epoch:-0} ${pkgver} ${pkgrel}))
+
+ build=true
+ for a in ${pkgarch[@]}; do
+ for p in ${pkgname[@]}; do
+ [ ! -f ${p}-${pkgversion}-${a}${PKGEXT} ] && build=false
+ done
+ done
+
+ if ! ${build}; then
+ if [ "${pkgarch[0]}" == 'any' ]; then
+ sudo extra-x86_64-build || die 'extra-x86_64-build failed'
+ else
+ for a in ${pkgarch[@]}; do
+ sudo extra-${a}-build || die "extra-${a}-build failed"
+ done
+ fi
+ for a in ${pkgarch[@]}; do
+ for p in ${pkgname[@]}; do
+ cp ${p}-${pkgversion}-${a}${PKGEXT} $(dirname ${BASH_SOURCE[0]})/../packages/$(basename ${d})
+ done
+ done
+ fi
+ popd >/dev/null
+ done
+}
+
+oneTimeTearDown() {
+ rm -rf "${pkgdir}"
+}
+
+setUp() {
+ local p
+ local pkg
+ local r
+ local a
+
+ [ -f "$(dirname ${BASH_SOURCE[0]})/../../config.local" ] && die "$(dirname ${BASH_SOURCE[0]})/../../config.local exists"
+ TMP="$(mktemp -d /dev/shm/$(basename $0).XXXXXXXXXX)"
+ #msg "Using ${TMP}"
+
+ PKGREPOS=('core' 'extra' 'testing')
+ PKGPOOL='pool/packages'
+ mkdir -p "${TMP}/"{ftp,tmp,staging,{package,source}-cleanup,svn-packages-{copy,repo}}
+
+ for r in ${PKGREPOS[@]}; do
+ mkdir -p "${TMP}/staging/${r}"
+ for a in ${ARCHES[@]} any; do
+ mkdir -p "${TMP}/ftp/${r}/os/${a}"
+ done
+ done
+ mkdir -p "${TMP}/ftp/${PKGPOOL}"
+ mkdir -p "${TMP}/ftp/${SRCPOOL}"
+
+ msg 'Creating svn repository...'
+ svnadmin create "${TMP}/svn-packages-repo"
+ svn checkout -q "file://${TMP}/svn-packages-repo" "${TMP}/svn-packages-copy"
+
+ for p in "${pkgdir}"/*; do
+ pkg=$(basename $p)
+ mkdir -p "${TMP}/svn-packages-copy/${pkg}"/{trunk,repos}
+ cp "${p}"/* "${TMP}/svn-packages-copy"/${pkg}/trunk/
+ svn add -q "${TMP}/svn-packages-copy"/${pkg}
+ svn commit -q -m"initial commit of ${pkg}" "${TMP}/svn-packages-copy"
+ done
+
+ cat <<eot > "$(dirname ${BASH_SOURCE[0]})/../../config.local"
+ FTP_BASE="${TMP}/ftp"
+ SVNREPO="file://${TMP}/svn-packages-repo"
+ PKGREPOS=(${PKGREPOS[@]})
+ PKGPOOL="${PKGPOOL}"
+ CLEANUP_DESTDIR="${TMP}/package-cleanup"
+ SOURCE_CLEANUP_DESTDIR="${TMP}/source-cleanup"
+ STAGING="${TMP}/staging"
+ TMPDIR="${TMP}/tmp"
+ CLEANUP_DRYRUN=false
+ SOURCE_CLEANUP_DRYRUN=false
+ REQUIRE_SIGNATURE=true
+eot
+ . "$(dirname ${BASH_SOURCE[0]})/../../config"
+}
+
+tearDown() {
+ rm -rf "${TMP}"
+ rm -f "$(dirname ${BASH_SOURCE[0]})/../../config.local"
+ echo
+}
+
+releasePackage() {
+ local repo=$1
+ local pkgbase=$2
+ local arch=$3
+
+ pushd "${TMP}/svn-packages-copy"/${pkgbase}/trunk/ >/dev/null
+ archrelease ${repo}-${arch} >/dev/null 2&>1
+ pkgver=$(. PKGBUILD; echo $(get_full_version ${epoch:-0} ${pkgver} ${pkgrel}))
+ popd >/dev/null
+ cp "${pkgdir}/${pkgbase}"/*-${pkgver}-${arch}${PKGEXT} "${STAGING}"/${repo}/
+
+ if ${REQUIRE_SIGNATURE}; then
+ # TODO: really sign the packages with a valid key
+ find "${STAGING}"/${repo}/ -type f \
+ -name "*-${pkgver}-${arch}${PKGEXT}" \
+ -exec touch {}.sig \;
+ fi
+}
+
+checkAnyPackageDB() {
+ local repo=$1
+ local pkg=$2
+ local arch
+ local db
+
+ [ -r "${FTP_BASE}/${PKGPOOL}/${pkg}" ] || fail "${PKGPOOL}/${pkg} not found"
+ if ${REQUIRE_SIGNATURE}; then
+ [ -r "${FTP_BASE}/${PKGPOOL}/${pkg}.sig" ] || fail "${PKGPOOL}/${pkg}.sig not found"
+ fi
+
+ for arch in i686 x86_64; do
+ [ -L "${FTP_BASE}/${repo}/os/${arch}/${pkg}" ] || fail "${repo}/os/${arch}/${pkg} is not a symlink"
+ [ "$(readlink -e "${FTP_BASE}/${repo}/os/${arch}/${pkg}")" == "${FTP_BASE}/${PKGPOOL}/${pkg}" ] \
+ || fail "${repo}/os/${arch}/${pkg} does not link to ${PKGPOOL}/${pkg}"
+
+ if ${REQUIRE_SIGNATURE}; then
+ [ -L "${FTP_BASE}/${repo}/os/${arch}/${pkg}.sig" ] || fail "${repo}/os/${arch}/${pkg}.sig is not a symlink"
+ [ "$(readlink -e "${FTP_BASE}/${repo}/os/${arch}/${pkg}.sig")" == "${FTP_BASE}/${PKGPOOL}/${pkg}.sig" ] \
+ || fail "${repo}/os/${arch}/${pkg}.sig does not link to ${PKGPOOL}/${pkg}.sig"
+ fi
+ done
+ [ -r "${STAGING}"/${repo}/${pkg} ] && fail "${repo}/${pkg} found in staging dir"
+ [ -r "${STAGING}"/${repo}/${pkg}.sig ] && fail "${repo}/${pkg}.sig found in staging dir"
+
+ for db in ${DBEXT} ${FILESEXT}; do
+ ( [ -r "${FTP_BASE}/${repo}/os/${arch}/${repo}${db%.tar.*}" ] \
+ && bsdtar -xf "${FTP_BASE}/${repo}/os/${arch}/${repo}${db%.tar.*}" -O | grep -q ${pkg}) \
+ || fail "${pkg} not in ${repo}/os/${arch}/${repo}${db%.tar.*}"
+ done
+
+ [ -r "${FTP_BASE}/${repo}/os/any/${pkg}" ] && fail "${repo}/os/any/${pkg} should not exist"
+ [ -r "${FTP_BASE}/${repo}/os/any/${pkg}.sig" ] && fail "${repo}/os/any/${pkg}.sig should not exist"
+}
+
+checkAnyPackage() {
+ local repo=$1
+ local pkg=$2
+
+ checkAnyPackageDB $repo $pkg
+
+ local pkgbase=$(getpkgbase "${FTP_BASE}/${PKGPOOL}/${pkg}")
+ svn up -q "${TMP}/svn-packages-copy/${pkgbase}"
+ [ -d "${TMP}/svn-packages-copy/${pkgbase}/repos/${repo}-any" ] \
+ || fail "svn-packages-copy/${pkgbase}/repos/${repo}-any does not exist"
+}
+
+checkPackageDB() {
+ local repo=$1
+ local pkg=$2
+ local arch=$3
+ local db
+
+ [ -r "${FTP_BASE}/${PKGPOOL}/${pkg}" ] || fail "${PKGPOOL}/${pkg} not found"
+ [ -L "${FTP_BASE}/${repo}/os/${arch}/${pkg}" ] || fail "${repo}/os/${arch}/${pkg} not a symlink"
+ [ -r "${STAGING}"/${repo}/${pkg} ] && fail "${repo}/${pkg} found in staging dir"
+
+ [ "$(readlink -e "${FTP_BASE}/${repo}/os/${arch}/${pkg}")" == "${FTP_BASE}/${PKGPOOL}/${pkg}" ] \
+ || fail "${repo}/os/${arch}/${pkg} does not link to ${PKGPOOL}/${pkg}"
+
+ if ${REQUIRE_SIGNATURE}; then
+ [ -r "${FTP_BASE}/${PKGPOOL}/${pkg}.sig" ] || fail "${PKGPOOL}/${pkg}.sig not found"
+ [ -L "${FTP_BASE}/${repo}/os/${arch}/${pkg}.sig" ] || fail "${repo}/os/${arch}/${pkg}.sig is not a symlink"
+ [ -r "${STAGING}"/${repo}/${pkg}.sig ] && fail "${repo}/${pkg}.sig found in staging dir"
+
+ [ "$(readlink -e "${FTP_BASE}/${repo}/os/${arch}/${pkg}.sig")" == "${FTP_BASE}/${PKGPOOL}/${pkg}.sig" ] \
+ || fail "${repo}/os/${arch}/${pkg}.sig does not link to ${PKGPOOL}/${pkg}.sig"
+ fi
+
+ for db in ${DBEXT} ${FILESEXT}; do
+ ( [ -r "${FTP_BASE}/${repo}/os/${arch}/${repo}${db%.tar.*}" ] \
+ && bsdtar -xf "${FTP_BASE}/${repo}/os/${arch}/${repo}${db%.tar.*}" -O | grep -q ${pkg}) \
+ || fail "${pkg} not in ${repo}/os/${arch}/${repo}${db%.tar.*}"
+ done
+}
+
+checkPackage() {
+ local repo=$1
+ local pkg=$2
+ local arch=$3
+
+ checkPackageDB $repo $pkg $arch
+
+ local pkgbase=$(getpkgbase "${FTP_BASE}/${PKGPOOL}/${pkg}")
+ svn up -q "${TMP}/svn-packages-copy/${pkgbase}"
+ [ -d "${TMP}/svn-packages-copy/${pkgbase}/repos/${repo}-${arch}" ] \
+ || fail "svn-packages-copy/${pkgbase}/repos/${repo}-${arch} does not exist"
+}
+
+checkRemovedPackageDB() {
+ local repo=$1
+ local pkgbase=$2
+ local arch=$3
+ local db
+
+ for db in ${DBEXT} ${FILESEXT}; do
+ ( [ -r "${FTP_BASE}/${repo}/os/${arch}/${repo}${db%.tar.*}" ] \
+ && bsdtar -xf "${FTP_BASE}/${repo}/os/${arch}/${repo}${db%.tar.*}" -O | grep -q ${pkgbase}) \
+ && fail "${pkgbase} should not be in ${repo}/os/${arch}/${repo}${db%.tar.*}"
+ done
+}
+
+checkRemovedPackage() {
+ local repo=$1
+ local pkgbase=$2
+ local arch=$3
+
+ checkRemovedPackageDB $repo $pkgbase $arch
+
+ svn up -q "${TMP}/svn-packages-copy/${pkgbase}"
+ [ -d "${TMP}/svn-packages-copy/${pkgbase}/repos/${repo}-${arch}" ] \
+ && fail "svn-packages-copy/${pkgbase}/repos/${repo}-${arch} should not exist"
+}
+
+checkRemovedAnyPackageDB() {
+ local repo=$1
+ local pkgbase=$2
+ local arch
+ local db
+
+ for db in ${DBEXT} ${FILESEXT}; do
+ for arch in i686 x86_64; do
+ ( [ -r "${FTP_BASE}/${repo}/os/${arch}/${repo}${db%.tar.*}" ] \
+ && bsdtar -xf "${FTP_BASE}/${repo}/os/${arch}/${repo}${db%.tar.*}" -O | grep -q ${pkgbase}) \
+ && fail "${pkgbase} should not be in ${repo}/os/${arch}/${repo}${db%.tar.*}"
+ done
+ done
+}
+
+checkRemovedAnyPackage() {
+ local repo=$1
+ local pkgbase=$2
+
+ checkRemovedAnyPackageDB $repo $pkgbase
+
+ svn up -q "${TMP}/svn-packages-copy/${pkgbase}"
+ [ -d "${TMP}/svn-packages-copy/${pkgbase}/repos/${repo}-any" ] \
+ && fail "svn-packages-copy/${pkgbase}/repos/${repo}-any should not exist"
+}
diff --git a/extra/legacy/test/lib/shunit2 b/extra/legacy/test/lib/shunit2
new file mode 100755
index 0000000..8862ffd
--- /dev/null
+++ b/extra/legacy/test/lib/shunit2
@@ -0,0 +1,1048 @@
+#! /bin/sh
+# $Id: shunit2 335 2011-05-01 20:10:33Z kate.ward@forestent.com $
+# vim:et:ft=sh:sts=2:sw=2
+#
+# Copyright 2008 Kate Ward. All Rights Reserved.
+# Released under the LGPL (GNU Lesser General Public License)
+#
+# shUnit2 -- Unit testing framework for Unix shell scripts.
+# http://code.google.com/p/shunit2/
+#
+# Author: kate.ward@forestent.com (Kate Ward)
+#
+# shUnit2 is a xUnit based unit test framework for Bourne shell scripts. It is
+# based on the popular JUnit unit testing framework for Java.
+
+# return if shunit already loaded
+[ -n "${SHUNIT_VERSION:-}" ] && exit 0
+
+SHUNIT_VERSION='2.1.6'
+
+SHUNIT_TRUE=0
+SHUNIT_FALSE=1
+SHUNIT_ERROR=2
+
+# enable strict mode by default
+SHUNIT_STRICT=${SHUNIT_STRICT:-${SHUNIT_TRUE}}
+
+_shunit_warn() { echo "shunit2:WARN $@" >&2; }
+_shunit_error() { echo "shunit2:ERROR $@" >&2; }
+_shunit_fatal() { echo "shunit2:FATAL $@" >&2; exit ${SHUNIT_ERROR}; }
+
+# specific shell checks
+if [ -n "${ZSH_VERSION:-}" ]; then
+ setopt |grep "^shwordsplit$" >/dev/null
+ if [ $? -ne ${SHUNIT_TRUE} ]; then
+ _shunit_fatal 'zsh shwordsplit option is required for proper operation'
+ fi
+ if [ -z "${SHUNIT_PARENT:-}" ]; then
+ _shunit_fatal "zsh does not pass \$0 through properly. please declare \
+\"SHUNIT_PARENT=\$0\" before calling shUnit2"
+ fi
+fi
+
+#
+# constants
+#
+
+__SHUNIT_ASSERT_MSG_PREFIX='ASSERT:'
+__SHUNIT_MODE_SOURCED='sourced'
+__SHUNIT_MODE_STANDALONE='standalone'
+__SHUNIT_PARENT=${SHUNIT_PARENT:-$0}
+
+# set the constants readonly
+shunit_constants_=`set |grep '^__SHUNIT_' |cut -d= -f1`
+echo "${shunit_constants_}" |grep '^Binary file' >/dev/null && \
+ shunit_constants_=`set |grep -a '^__SHUNIT_' |cut -d= -f1`
+for shunit_constant_ in ${shunit_constants_}; do
+ shunit_ro_opts_=''
+ case ${ZSH_VERSION:-} in
+ '') ;; # this isn't zsh
+ [123].*) ;; # early versions (1.x, 2.x, 3.x)
+ *) shunit_ro_opts_='-g' ;; # all later versions. declare readonly globally
+ esac
+ readonly ${shunit_ro_opts_} ${shunit_constant_}
+done
+unset shunit_constant_ shunit_constants_ shunit_ro_opts_
+
+# variables
+__shunit_lineno='' # line number of executed test
+__shunit_mode=${__SHUNIT_MODE_SOURCED} # operating mode
+__shunit_reportGenerated=${SHUNIT_FALSE} # is report generated
+__shunit_script='' # filename of unittest script (standalone mode)
+__shunit_skip=${SHUNIT_FALSE} # is skipping enabled
+__shunit_suite='' # suite of tests to execute
+
+# counts of tests
+__shunit_testSuccess=${SHUNIT_TRUE}
+__shunit_testsTotal=0
+__shunit_testsPassed=0
+__shunit_testsFailed=0
+
+# counts of asserts
+__shunit_assertsTotal=0
+__shunit_assertsPassed=0
+__shunit_assertsFailed=0
+__shunit_assertsSkipped=0
+
+# macros
+_SHUNIT_LINENO_='eval __shunit_lineno=""; if [ "${1:-}" = "--lineno" ]; then [ -n "$2" ] && __shunit_lineno="[$2] "; shift 2; fi'
+
+#-----------------------------------------------------------------------------
+# assert functions
+#
+
+# Assert that two values are equal to one another.
+#
+# Args:
+# message: string: failure message [optional]
+# expected: string: expected value
+# actual: string: actual value
+# Returns:
+# integer: success (TRUE/FALSE/ERROR constant)
+assertEquals()
+{
+ ${_SHUNIT_LINENO_}
+ if [ $# -lt 2 -o $# -gt 3 ]; then
+ _shunit_error "assertEquals() requires two or three arguments; $# given"
+ _shunit_error "1: ${1:+$1} 2: ${2:+$2} 3: ${3:+$3}${4:+ 4: $4}"
+ return ${SHUNIT_ERROR}
+ fi
+ _shunit_shouldSkip && return ${SHUNIT_TRUE}
+
+ shunit_message_=${__shunit_lineno}
+ if [ $# -eq 3 ]; then
+ shunit_message_="${shunit_message_}$1"
+ shift
+ fi
+ shunit_expected_=$1
+ shunit_actual_=$2
+
+ shunit_return=${SHUNIT_TRUE}
+ if [ "${shunit_expected_}" = "${shunit_actual_}" ]; then
+ _shunit_assertPass
+ else
+ failNotEquals "${shunit_message_}" "${shunit_expected_}" "${shunit_actual_}"
+ shunit_return=${SHUNIT_FALSE}
+ fi
+
+ unset shunit_message_ shunit_expected_ shunit_actual_
+ return ${shunit_return}
+}
+_ASSERT_EQUALS_='eval assertEquals --lineno "${LINENO:-}"'
+
+# Assert that two values are not equal to one another.
+#
+# Args:
+# message: string: failure message [optional]
+# expected: string: expected value
+# actual: string: actual value
+# Returns:
+# integer: success (TRUE/FALSE/ERROR constant)
+assertNotEquals()
+{
+ ${_SHUNIT_LINENO_}
+ if [ $# -lt 2 -o $# -gt 3 ]; then
+ _shunit_error "assertNotEquals() requires two or three arguments; $# given"
+ return ${SHUNIT_ERROR}
+ fi
+ _shunit_shouldSkip && return ${SHUNIT_TRUE}
+
+ shunit_message_=${__shunit_lineno}
+ if [ $# -eq 3 ]; then
+ shunit_message_="${shunit_message_}$1"
+ shift
+ fi
+ shunit_expected_=$1
+ shunit_actual_=$2
+
+ shunit_return=${SHUNIT_TRUE}
+ if [ "${shunit_expected_}" != "${shunit_actual_}" ]; then
+ _shunit_assertPass
+ else
+ failSame "${shunit_message_}" "$@"
+ shunit_return=${SHUNIT_FALSE}
+ fi
+
+ unset shunit_message_ shunit_expected_ shunit_actual_
+ return ${shunit_return}
+}
+_ASSERT_NOT_EQUALS_='eval assertNotEquals --lineno "${LINENO:-}"'
+
+# Assert that a value is null (i.e. an empty string)
+#
+# Args:
+# message: string: failure message [optional]
+# actual: string: actual value
+# Returns:
+# integer: success (TRUE/FALSE/ERROR constant)
+assertNull()
+{
+ ${_SHUNIT_LINENO_}
+ if [ $# -lt 1 -o $# -gt 2 ]; then
+ _shunit_error "assertNull() requires one or two arguments; $# given"
+ return ${SHUNIT_ERROR}
+ fi
+ _shunit_shouldSkip && return ${SHUNIT_TRUE}
+
+ shunit_message_=${__shunit_lineno}
+ if [ $# -eq 2 ]; then
+ shunit_message_="${shunit_message_}$1"
+ shift
+ fi
+ assertTrue "${shunit_message_}" "[ -z '$1' ]"
+ shunit_return=$?
+
+ unset shunit_message_
+ return ${shunit_return}
+}
+_ASSERT_NULL_='eval assertNull --lineno "${LINENO:-}"'
+
+# Assert that a value is not null (i.e. a non-empty string)
+#
+# Args:
+# message: string: failure message [optional]
+# actual: string: actual value
+# Returns:
+# integer: success (TRUE/FALSE/ERROR constant)
+assertNotNull()
+{
+ ${_SHUNIT_LINENO_}
+ if [ $# -gt 2 ]; then # allowing 0 arguments as $1 might actually be null
+ _shunit_error "assertNotNull() requires one or two arguments; $# given"
+ return ${SHUNIT_ERROR}
+ fi
+ _shunit_shouldSkip && return ${SHUNIT_TRUE}
+
+ shunit_message_=${__shunit_lineno}
+ if [ $# -eq 2 ]; then
+ shunit_message_="${shunit_message_}$1"
+ shift
+ fi
+ shunit_actual_=`_shunit_escapeCharactersInString "${1:-}"`
+ test -n "${shunit_actual_}"
+ assertTrue "${shunit_message_}" $?
+ shunit_return=$?
+
+ unset shunit_actual_ shunit_message_
+ return ${shunit_return}
+}
+_ASSERT_NOT_NULL_='eval assertNotNull --lineno "${LINENO:-}"'
+
+# Assert that two values are the same (i.e. equal to one another).
+#
+# Args:
+# message: string: failure message [optional]
+# expected: string: expected value
+# actual: string: actual value
+# Returns:
+# integer: success (TRUE/FALSE/ERROR constant)
+assertSame()
+{
+ ${_SHUNIT_LINENO_}
+ if [ $# -lt 2 -o $# -gt 3 ]; then
+ _shunit_error "assertSame() requires two or three arguments; $# given"
+ return ${SHUNIT_ERROR}
+ fi
+ _shunit_shouldSkip && return ${SHUNIT_TRUE}
+
+ shunit_message_=${__shunit_lineno}
+ if [ $# -eq 3 ]; then
+ shunit_message_="${shunit_message_}$1"
+ shift
+ fi
+ assertEquals "${shunit_message_}" "$1" "$2"
+ shunit_return=$?
+
+ unset shunit_message_
+ return ${shunit_return}
+}
+_ASSERT_SAME_='eval assertSame --lineno "${LINENO:-}"'
+
+# Assert that two values are not the same (i.e. not equal to one another).
+#
+# Args:
+# message: string: failure message [optional]
+# expected: string: expected value
+# actual: string: actual value
+# Returns:
+# integer: success (TRUE/FALSE/ERROR constant)
+assertNotSame()
+{
+ ${_SHUNIT_LINENO_}
+ if [ $# -lt 2 -o $# -gt 3 ]; then
+ _shunit_error "assertNotSame() requires two or three arguments; $# given"
+ return ${SHUNIT_ERROR}
+ fi
+ _shunit_shouldSkip && return ${SHUNIT_TRUE}
+
+ shunit_message_=${__shunit_lineno}
+ if [ $# -eq 3 ]; then
+ shunit_message_="${shunit_message_:-}$1"
+ shift
+ fi
+ assertNotEquals "${shunit_message_}" "$1" "$2"
+ shunit_return=$?
+
+ unset shunit_message_
+ return ${shunit_return}
+}
+_ASSERT_NOT_SAME_='eval assertNotSame --lineno "${LINENO:-}"'
+
+# Assert that a value or shell test condition is true.
+#
+# In shell, a value of 0 is true and a non-zero value is false. Any integer
+# value passed can thereby be tested.
+#
+# Shell supports much more complicated tests though, and a means to support
+# them was needed. As such, this function tests that conditions are true or
+# false through evaluation rather than just looking for a true or false.
+#
+# The following test will succeed:
+# assertTrue 0
+# assertTrue "[ 34 -gt 23 ]"
+# The folloing test will fail with a message:
+# assertTrue 123
+# assertTrue "test failed" "[ -r '/non/existant/file' ]"
+#
+# Args:
+# message: string: failure message [optional]
+# condition: string: integer value or shell conditional statement
+# Returns:
+# integer: success (TRUE/FALSE/ERROR constant)
+assertTrue()
+{
+ ${_SHUNIT_LINENO_}
+ if [ $# -gt 2 ]; then
+ _shunit_error "assertTrue() takes one two arguments; $# given"
+ return ${SHUNIT_ERROR}
+ fi
+ _shunit_shouldSkip && return ${SHUNIT_TRUE}
+
+ shunit_message_=${__shunit_lineno}
+ if [ $# -eq 2 ]; then
+ shunit_message_="${shunit_message_}$1"
+ shift
+ fi
+ shunit_condition_=$1
+
+ # see if condition is an integer, i.e. a return value
+ shunit_match_=`expr "${shunit_condition_}" : '\([0-9]*\)'`
+ shunit_return=${SHUNIT_TRUE}
+ if [ -z "${shunit_condition_}" ]; then
+ # null condition
+ shunit_return=${SHUNIT_FALSE}
+ elif [ -n "${shunit_match_}" -a "${shunit_condition_}" = "${shunit_match_}" ]
+ then
+ # possible return value. treating 0 as true, and non-zero as false.
+ [ ${shunit_condition_} -ne 0 ] && shunit_return=${SHUNIT_FALSE}
+ else
+ # (hopefully) a condition
+ ( eval ${shunit_condition_} ) >/dev/null 2>&1
+ [ $? -ne 0 ] && shunit_return=${SHUNIT_FALSE}
+ fi
+
+ # record the test
+ if [ ${shunit_return} -eq ${SHUNIT_TRUE} ]; then
+ _shunit_assertPass
+ else
+ _shunit_assertFail "${shunit_message_}"
+ fi
+
+ unset shunit_message_ shunit_condition_ shunit_match_
+ return ${shunit_return}
+}
+_ASSERT_TRUE_='eval assertTrue --lineno "${LINENO:-}"'
+
+# Assert that a value or shell test condition is false.
+#
+# In shell, a value of 0 is true and a non-zero value is false. Any integer
+# value passed can thereby be tested.
+#
+# Shell supports much more complicated tests though, and a means to support
+# them was needed. As such, this function tests that conditions are true or
+# false through evaluation rather than just looking for a true or false.
+#
+# The following test will succeed:
+# assertFalse 1
+# assertFalse "[ 'apples' = 'oranges' ]"
+# The folloing test will fail with a message:
+# assertFalse 0
+# assertFalse "test failed" "[ 1 -eq 1 -a 2 -eq 2 ]"
+#
+# Args:
+# message: string: failure message [optional]
+# condition: string: integer value or shell conditional statement
+# Returns:
+# integer: success (TRUE/FALSE/ERROR constant)
+assertFalse()
+{
+ ${_SHUNIT_LINENO_}
+ if [ $# -lt 1 -o $# -gt 2 ]; then
+ _shunit_error "assertFalse() quires one or two arguments; $# given"
+ return ${SHUNIT_ERROR}
+ fi
+ _shunit_shouldSkip && return ${SHUNIT_TRUE}
+
+ shunit_message_=${__shunit_lineno}
+ if [ $# -eq 2 ]; then
+ shunit_message_="${shunit_message_}$1"
+ shift
+ fi
+ shunit_condition_=$1
+
+ # see if condition is an integer, i.e. a return value
+ shunit_match_=`expr "${shunit_condition_}" : '\([0-9]*\)'`
+ shunit_return=${SHUNIT_TRUE}
+ if [ -z "${shunit_condition_}" ]; then
+ # null condition
+ shunit_return=${SHUNIT_FALSE}
+ elif [ -n "${shunit_match_}" -a "${shunit_condition_}" = "${shunit_match_}" ]
+ then
+ # possible return value. treating 0 as true, and non-zero as false.
+ [ ${shunit_condition_} -eq 0 ] && shunit_return=${SHUNIT_FALSE}
+ else
+ # (hopefully) a condition
+ ( eval ${shunit_condition_} ) >/dev/null 2>&1
+ [ $? -eq 0 ] && shunit_return=${SHUNIT_FALSE}
+ fi
+
+ # record the test
+ if [ ${shunit_return} -eq ${SHUNIT_TRUE} ]; then
+ _shunit_assertPass
+ else
+ _shunit_assertFail "${shunit_message_}"
+ fi
+
+ unset shunit_message_ shunit_condition_ shunit_match_
+ return ${shunit_return}
+}
+_ASSERT_FALSE_='eval assertFalse --lineno "${LINENO:-}"'
+
+#-----------------------------------------------------------------------------
+# failure functions
+#
+
+# Records a test failure.
+#
+# Args:
+# message: string: failure message [optional]
+# Returns:
+# integer: success (TRUE/FALSE/ERROR constant)
+fail()
+{
+ ${_SHUNIT_LINENO_}
+ if [ $# -gt 1 ]; then
+ _shunit_error "fail() requires zero or one arguments; $# given"
+ return ${SHUNIT_ERROR}
+ fi
+ _shunit_shouldSkip && return ${SHUNIT_TRUE}
+
+ shunit_message_=${__shunit_lineno}
+ if [ $# -eq 1 ]; then
+ shunit_message_="${shunit_message_}$1"
+ shift
+ fi
+
+ _shunit_assertFail "${shunit_message_}"
+
+ unset shunit_message_
+ return ${SHUNIT_FALSE}
+}
+_FAIL_='eval fail --lineno "${LINENO:-}"'
+
+# Records a test failure, stating two values were not equal.
+#
+# Args:
+# message: string: failure message [optional]
+# expected: string: expected value
+# actual: string: actual value
+# Returns:
+# integer: success (TRUE/FALSE/ERROR constant)
+failNotEquals()
+{
+ ${_SHUNIT_LINENO_}
+ if [ $# -lt 2 -o $# -gt 3 ]; then
+ _shunit_error "failNotEquals() requires one or two arguments; $# given"
+ return ${SHUNIT_ERROR}
+ fi
+ _shunit_shouldSkip && return ${SHUNIT_TRUE}
+
+ shunit_message_=${__shunit_lineno}
+ if [ $# -eq 3 ]; then
+ shunit_message_="${shunit_message_}$1"
+ shift
+ fi
+ shunit_expected_=$1
+ shunit_actual_=$2
+
+ _shunit_assertFail "${shunit_message_:+${shunit_message_} }expected:<${shunit_expected_}> but was:<${shunit_actual_}>"
+
+ unset shunit_message_ shunit_expected_ shunit_actual_
+ return ${SHUNIT_FALSE}
+}
+_FAIL_NOT_EQUALS_='eval failNotEquals --lineno "${LINENO:-}"'
+
+# Records a test failure, stating two values should have been the same.
+#
+# Args:
+# message: string: failure message [optional]
+# expected: string: expected value
+# actual: string: actual value
+# Returns:
+# integer: success (TRUE/FALSE/ERROR constant)
+failSame()
+{
+ ${_SHUNIT_LINENO_}
+ if [ $# -lt 2 -o $# -gt 3 ]; then
+ _shunit_error "failSame() requires two or three arguments; $# given"
+ return ${SHUNIT_ERROR}
+ fi
+ _shunit_shouldSkip && return ${SHUNIT_TRUE}
+
+ shunit_message_=${__shunit_lineno}
+ if [ $# -eq 3 ]; then
+ shunit_message_="${shunit_message_}$1"
+ shift
+ fi
+
+ _shunit_assertFail "${shunit_message_:+${shunit_message_} }expected not same"
+
+ unset shunit_message_
+ return ${SHUNIT_FALSE}
+}
+_FAIL_SAME_='eval failSame --lineno "${LINENO:-}"'
+
+# Records a test failure, stating two values were not equal.
+#
+# This is functionally equivalent to calling failNotEquals().
+#
+# Args:
+# message: string: failure message [optional]
+# expected: string: expected value
+# actual: string: actual value
+# Returns:
+# integer: success (TRUE/FALSE/ERROR constant)
+failNotSame()
+{
+ ${_SHUNIT_LINENO_}
+ if [ $# -lt 2 -o $# -gt 3 ]; then
+ _shunit_error "failNotEquals() requires one or two arguments; $# given"
+ return ${SHUNIT_ERROR}
+ fi
+ _shunit_shouldSkip && return ${SHUNIT_TRUE}
+
+ shunit_message_=${__shunit_lineno}
+ if [ $# -eq 3 ]; then
+ shunit_message_="${shunit_message_}$1"
+ shift
+ fi
+ failNotEquals "${shunit_message_}" "$1" "$2"
+ shunit_return=$?
+
+ unset shunit_message_
+ return ${shunit_return}
+}
+_FAIL_NOT_SAME_='eval failNotSame --lineno "${LINENO:-}"'
+
+#-----------------------------------------------------------------------------
+# skipping functions
+#
+
+# Force remaining assert and fail functions to be "skipped".
+#
+# This function forces the remaining assert and fail functions to be "skipped",
+# i.e. they will have no effect. Each function skipped will be recorded so that
+# the total of asserts and fails will not be altered.
+#
+# Args:
+# None
+startSkipping()
+{
+ __shunit_skip=${SHUNIT_TRUE}
+}
+
+# Resume the normal recording behavior of assert and fail calls.
+#
+# Args:
+# None
+endSkipping()
+{
+ __shunit_skip=${SHUNIT_FALSE}
+}
+
+# Returns the state of assert and fail call skipping.
+#
+# Args:
+# None
+# Returns:
+# boolean: (TRUE/FALSE constant)
+isSkipping()
+{
+ return ${__shunit_skip}
+}
+
+#-----------------------------------------------------------------------------
+# suite functions
+#
+
+# Stub. This function should contains all unit test calls to be made.
+#
+# DEPRECATED (as of 2.1.0)
+#
+# This function can be optionally overridden by the user in their test suite.
+#
+# If this function exists, it will be called when shunit2 is sourced. If it
+# does not exist, shunit2 will search the parent script for all functions
+# beginning with the word 'test', and they will be added dynamically to the
+# test suite.
+#
+# This function should be overridden by the user in their unit test suite.
+# Note: see _shunit_mktempFunc() for actual implementation
+#
+# Args:
+# None
+#suite() { :; } # DO NOT UNCOMMENT THIS FUNCTION
+
+# Adds a function name to the list of tests schedule for execution.
+#
+# This function should only be called from within the suite() function.
+#
+# Args:
+# function: string: name of a function to add to current unit test suite
+suite_addTest()
+{
+ shunit_func_=${1:-}
+
+ __shunit_suite="${__shunit_suite:+${__shunit_suite} }${shunit_func_}"
+ __shunit_testsTotal=`expr ${__shunit_testsTotal} + 1`
+
+ unset shunit_func_
+}
+
+# Stub. This function will be called once before any tests are run.
+#
+# Common one-time environment preparation tasks shared by all tests can be
+# defined here.
+#
+# This function should be overridden by the user in their unit test suite.
+# Note: see _shunit_mktempFunc() for actual implementation
+#
+# Args:
+# None
+#oneTimeSetUp() { :; } # DO NOT UNCOMMENT THIS FUNCTION
+
+# Stub. This function will be called once after all tests are finished.
+#
+# Common one-time environment cleanup tasks shared by all tests can be defined
+# here.
+#
+# This function should be overridden by the user in their unit test suite.
+# Note: see _shunit_mktempFunc() for actual implementation
+#
+# Args:
+# None
+#oneTimeTearDown() { :; } # DO NOT UNCOMMENT THIS FUNCTION
+
+# Stub. This function will be called before each test is run.
+#
+# Common environment preparation tasks shared by all tests can be defined here.
+#
+# This function should be overridden by the user in their unit test suite.
+# Note: see _shunit_mktempFunc() for actual implementation
+#
+# Args:
+# None
+#setUp() { :; }
+
+# Note: see _shunit_mktempFunc() for actual implementation
+# Stub. This function will be called after each test is run.
+#
+# Common environment cleanup tasks shared by all tests can be defined here.
+#
+# This function should be overridden by the user in their unit test suite.
+# Note: see _shunit_mktempFunc() for actual implementation
+#
+# Args:
+# None
+#tearDown() { :; } # DO NOT UNCOMMENT THIS FUNCTION
+
+#------------------------------------------------------------------------------
+# internal shUnit2 functions
+#
+
+# Create a temporary directory to store various run-time files in.
+#
+# This function is a cross-platform temporary directory creation tool. Not all
+# OSes have the mktemp function, so one is included here.
+#
+# Args:
+# None
+# Outputs:
+# string: the temporary directory that was created
+_shunit_mktempDir()
+{
+ # try the standard mktemp function
+ ( exec mktemp -dqt shunit.XXXXXX 2>/dev/null ) && return
+
+ # the standard mktemp didn't work. doing our own.
+ if [ -r '/dev/urandom' -a -x '/usr/bin/od' ]; then
+ _shunit_random_=`/usr/bin/od -vAn -N4 -tx4 </dev/urandom \
+ |sed 's/^[^0-9a-f]*//'`
+ elif [ -n "${RANDOM:-}" ]; then
+ # $RANDOM works
+ _shunit_random_=${RANDOM}${RANDOM}${RANDOM}$$
+ else
+ # $RANDOM doesn't work
+ _shunit_date_=`date '+%Y%m%d%H%M%S'`
+ _shunit_random_=`expr ${_shunit_date_} / $$`
+ fi
+
+ _shunit_tmpDir_="${TMPDIR:-/tmp}/shunit.${_shunit_random_}"
+ ( umask 077 && mkdir "${_shunit_tmpDir_}" ) || \
+ _shunit_fatal 'could not create temporary directory! exiting'
+
+ echo ${_shunit_tmpDir_}
+ unset _shunit_date_ _shunit_random_ _shunit_tmpDir_
+}
+
+# This function is here to work around issues in Cygwin.
+#
+# Args:
+# None
+_shunit_mktempFunc()
+{
+ for _shunit_func_ in oneTimeSetUp oneTimeTearDown setUp tearDown suite noexec
+ do
+ _shunit_file_="${__shunit_tmpDir}/${_shunit_func_}"
+ cat <<EOF >"${_shunit_file_}"
+#! /bin/sh
+exit ${SHUNIT_TRUE}
+EOF
+ chmod +x "${_shunit_file_}"
+ done
+
+ unset _shunit_file_
+}
+
+# Final cleanup function to leave things as we found them.
+#
+# Besides removing the temporary directory, this function is in charge of the
+# final exit code of the unit test. The exit code is based on how the script
+# was ended (e.g. normal exit, or via Ctrl-C).
+#
+# Args:
+# name: string: name of the trap called (specified when trap defined)
+_shunit_cleanup()
+{
+ _shunit_name_=$1
+
+ case ${_shunit_name_} in
+ EXIT) _shunit_signal_=0 ;;
+ INT) _shunit_signal_=2 ;;
+ TERM) _shunit_signal_=15 ;;
+ *)
+ _shunit_warn "unrecognized trap value (${_shunit_name_})"
+ _shunit_signal_=0
+ ;;
+ esac
+
+ # do our work
+ rm -fr "${__shunit_tmpDir}"
+
+ # exit for all non-EXIT signals
+ if [ ${_shunit_name_} != 'EXIT' ]; then
+ _shunit_warn "trapped and now handling the (${_shunit_name_}) signal"
+ # disable EXIT trap
+ trap 0
+ # add 128 to signal and exit
+ exit `expr ${_shunit_signal_} + 128`
+ elif [ ${__shunit_reportGenerated} -eq ${SHUNIT_FALSE} ] ; then
+ _shunit_assertFail 'Unknown failure encountered running a test'
+ _shunit_generateReport
+ exit ${SHUNIT_ERROR}
+ fi
+
+ unset _shunit_name_ _shunit_signal_
+}
+
+# The actual running of the tests happens here.
+#
+# Args:
+# None
+_shunit_execSuite()
+{
+ for _shunit_test_ in ${__shunit_suite}; do
+ __shunit_testSuccess=${SHUNIT_TRUE}
+
+ # disable skipping
+ endSkipping
+
+ # execute the per-test setup function
+ setUp
+
+ # execute the test
+ echo "${_shunit_test_}"
+ eval ${_shunit_test_}
+
+ # execute the per-test tear-down function
+ tearDown
+
+ # update stats
+ if [ ${__shunit_testSuccess} -eq ${SHUNIT_TRUE} ]; then
+ __shunit_testsPassed=`expr ${__shunit_testsPassed} + 1`
+ else
+ __shunit_testsFailed=`expr ${__shunit_testsFailed} + 1`
+ fi
+ done
+
+ unset _shunit_test_
+}
+
+# Generates the user friendly report with appropriate OK/FAILED message.
+#
+# Args:
+# None
+# Output:
+# string: the report of successful and failed tests, as well as totals.
+_shunit_generateReport()
+{
+ _shunit_ok_=${SHUNIT_TRUE}
+
+ # if no exit code was provided one, determine an appropriate one
+ [ ${__shunit_testsFailed} -gt 0 \
+ -o ${__shunit_testSuccess} -eq ${SHUNIT_FALSE} ] \
+ && _shunit_ok_=${SHUNIT_FALSE}
+
+ echo
+ if [ ${__shunit_testsTotal} -eq 1 ]; then
+ echo "Ran ${__shunit_testsTotal} test."
+ else
+ echo "Ran ${__shunit_testsTotal} tests."
+ fi
+
+ _shunit_failures_=''
+ _shunit_skipped_=''
+ [ ${__shunit_assertsFailed} -gt 0 ] \
+ && _shunit_failures_="failures=${__shunit_assertsFailed}"
+ [ ${__shunit_assertsSkipped} -gt 0 ] \
+ && _shunit_skipped_="skipped=${__shunit_assertsSkipped}"
+
+ if [ ${_shunit_ok_} -eq ${SHUNIT_TRUE} ]; then
+ _shunit_msg_='OK'
+ [ -n "${_shunit_skipped_}" ] \
+ && _shunit_msg_="${_shunit_msg_} (${_shunit_skipped_})"
+ else
+ _shunit_msg_="FAILED (${_shunit_failures_}"
+ [ -n "${_shunit_skipped_}" ] \
+ && _shunit_msg_="${_shunit_msg_},${_shunit_skipped_}"
+ _shunit_msg_="${_shunit_msg_})"
+ fi
+
+ echo
+ echo ${_shunit_msg_}
+ __shunit_reportGenerated=${SHUNIT_TRUE}
+
+ unset _shunit_failures_ _shunit_msg_ _shunit_ok_ _shunit_skipped_
+}
+
+# Test for whether a function should be skipped.
+#
+# Args:
+# None
+# Returns:
+# boolean: whether the test should be skipped (TRUE/FALSE constant)
+_shunit_shouldSkip()
+{
+ [ ${__shunit_skip} -eq ${SHUNIT_FALSE} ] && return ${SHUNIT_FALSE}
+ _shunit_assertSkip
+}
+
+# Records a successful test.
+#
+# Args:
+# None
+_shunit_assertPass()
+{
+ __shunit_assertsPassed=`expr ${__shunit_assertsPassed} + 1`
+ __shunit_assertsTotal=`expr ${__shunit_assertsTotal} + 1`
+}
+
+# Records a test failure.
+#
+# Args:
+# message: string: failure message to provide user
+_shunit_assertFail()
+{
+ _shunit_msg_=$1
+
+ __shunit_testSuccess=${SHUNIT_FALSE}
+ __shunit_assertsFailed=`expr ${__shunit_assertsFailed} + 1`
+ __shunit_assertsTotal=`expr ${__shunit_assertsTotal} + 1`
+ echo "${__SHUNIT_ASSERT_MSG_PREFIX}${_shunit_msg_}"
+
+ unset _shunit_msg_
+}
+
+# Records a skipped test.
+#
+# Args:
+# None
+_shunit_assertSkip()
+{
+ __shunit_assertsSkipped=`expr ${__shunit_assertsSkipped} + 1`
+ __shunit_assertsTotal=`expr ${__shunit_assertsTotal} + 1`
+}
+
+# Prepare a script filename for sourcing.
+#
+# Args:
+# script: string: path to a script to source
+# Returns:
+# string: filename prefixed with ./ (if necessary)
+_shunit_prepForSourcing()
+{
+ _shunit_script_=$1
+ case "${_shunit_script_}" in
+ /*|./*) echo "${_shunit_script_}" ;;
+ *) echo "./${_shunit_script_}" ;;
+ esac
+ unset _shunit_script_
+}
+
+# Escape a character in a string.
+#
+# Args:
+# c: string: unescaped character
+# s: string: to escape character in
+# Returns:
+# string: with escaped character(s)
+_shunit_escapeCharInStr()
+{
+ [ -n "$2" ] || return # no point in doing work on an empty string
+
+ # Note: using shorter variable names to prevent conflicts with
+ # _shunit_escapeCharactersInString().
+ _shunit_c_=$1
+ _shunit_s_=$2
+
+
+ # escape the character
+ echo ''${_shunit_s_}'' |sed 's/\'${_shunit_c_}'/\\\'${_shunit_c_}'/g'
+
+ unset _shunit_c_ _shunit_s_
+}
+
+# Escape a character in a string.
+#
+# Args:
+# str: string: to escape characters in
+# Returns:
+# string: with escaped character(s)
+_shunit_escapeCharactersInString()
+{
+ [ -n "$1" ] || return # no point in doing work on an empty string
+
+ _shunit_str_=$1
+
+ # Note: using longer variable names to prevent conflicts with
+ # _shunit_escapeCharInStr().
+ for _shunit_char_ in '"' '$' "'" '`'; do
+ _shunit_str_=`_shunit_escapeCharInStr "${_shunit_char_}" "${_shunit_str_}"`
+ done
+
+ echo "${_shunit_str_}"
+ unset _shunit_char_ _shunit_str_
+}
+
+# Extract list of functions to run tests against.
+#
+# Args:
+# script: string: name of script to extract functions from
+# Returns:
+# string: of function names
+_shunit_extractTestFunctions()
+{
+ _shunit_script_=$1
+
+ # extract the lines with test function names, strip of anything besides the
+ # function name, and output everything on a single line.
+ _shunit_regex_='^[ ]*(function )*test[A-Za-z0-9_]* *\(\)'
+ egrep "${_shunit_regex_}" "${_shunit_script_}" \
+ |sed 's/^[^A-Za-z0-9_]*//;s/^function //;s/\([A-Za-z0-9_]*\).*/\1/g' \
+ |xargs
+
+ unset _shunit_regex_ _shunit_script_
+}
+
+#------------------------------------------------------------------------------
+# main
+#
+
+# determine the operating mode
+if [ $# -eq 0 ]; then
+ __shunit_script=${__SHUNIT_PARENT}
+ __shunit_mode=${__SHUNIT_MODE_SOURCED}
+else
+ __shunit_script=$1
+ [ -r "${__shunit_script}" ] || \
+ _shunit_fatal "unable to read from ${__shunit_script}"
+ __shunit_mode=${__SHUNIT_MODE_STANDALONE}
+fi
+
+# create a temporary storage location
+__shunit_tmpDir=`_shunit_mktempDir`
+
+# provide a public temporary directory for unit test scripts
+# TODO(kward): document this
+SHUNIT_TMPDIR="${__shunit_tmpDir}/tmp"
+mkdir "${SHUNIT_TMPDIR}"
+
+# setup traps to clean up after ourselves
+trap '_shunit_cleanup EXIT' 0
+trap '_shunit_cleanup INT' 2
+trap '_shunit_cleanup TERM' 15
+
+# create phantom functions to work around issues with Cygwin
+_shunit_mktempFunc
+PATH="${__shunit_tmpDir}:${PATH}"
+
+# make sure phantom functions are executable. this will bite if /tmp (or the
+# current $TMPDIR) points to a path on a partition that was mounted with the
+# 'noexec' option. the noexec command was created with _shunit_mktempFunc().
+noexec 2>/dev/null || _shunit_fatal \
+ 'please declare TMPDIR with path on partition with exec permission'
+
+# we must manually source the tests in standalone mode
+if [ "${__shunit_mode}" = "${__SHUNIT_MODE_STANDALONE}" ]; then
+ . "`_shunit_prepForSourcing \"${__shunit_script}\"`"
+fi
+
+# execute the oneTimeSetUp function (if it exists)
+oneTimeSetUp
+
+# execute the suite function defined in the parent test script
+# deprecated as of 2.1.0
+suite
+
+# if no suite function was defined, dynamically build a list of functions
+if [ -z "${__shunit_suite}" ]; then
+ shunit_funcs_=`_shunit_extractTestFunctions "${__shunit_script}"`
+ for shunit_func_ in ${shunit_funcs_}; do
+ suite_addTest ${shunit_func_}
+ done
+fi
+unset shunit_func_ shunit_funcs_
+
+# execute the tests
+_shunit_execSuite
+
+# execute the oneTimeTearDown function (if it exists)
+oneTimeTearDown
+
+# generate the report
+_shunit_generateReport
+
+# that's it folks
+[ ${__shunit_testsFailed} -eq 0 ]
+exit $?
diff --git a/extra/legacy/test/packages/pkg-any-a/PKGBUILD b/extra/legacy/test/packages/pkg-any-a/PKGBUILD
new file mode 100644
index 0000000..8749a35
--- /dev/null
+++ b/extra/legacy/test/packages/pkg-any-a/PKGBUILD
@@ -0,0 +1,12 @@
+pkgname=pkg-any-a
+pkgver=1
+pkgrel=1
+pkgdesc="A package called ${pkgname}"
+arch=('any')
+url='http://www.archlinux.org/'
+license=('GPL')
+
+package() {
+ install -d -m755 ${pkgdir}/usr/share/${pkgname}
+ echo 'test' > ${pkgdir}/usr/share/${pkgname}/test
+}
diff --git a/extra/legacy/test/packages/pkg-any-b/PKGBUILD b/extra/legacy/test/packages/pkg-any-b/PKGBUILD
new file mode 100644
index 0000000..e6a0498
--- /dev/null
+++ b/extra/legacy/test/packages/pkg-any-b/PKGBUILD
@@ -0,0 +1,12 @@
+pkgname=pkg-any-b
+pkgver=1
+pkgrel=1
+pkgdesc="A package called ${pkgname}"
+arch=('any')
+url='http://www.archlinux.org/'
+license=('GPL')
+
+package() {
+ install -d -m755 ${pkgdir}/usr/share/${pkgname}
+ echo 'test' > ${pkgdir}/usr/share/${pkgname}/test
+}
diff --git a/extra/legacy/test/packages/pkg-simple-a/Makefile b/extra/legacy/test/packages/pkg-simple-a/Makefile
new file mode 120000
index 0000000..50be211
--- /dev/null
+++ b/extra/legacy/test/packages/pkg-simple-a/Makefile
@@ -0,0 +1 @@
+../../src/Makefile \ No newline at end of file
diff --git a/extra/legacy/test/packages/pkg-simple-a/PKGBUILD b/extra/legacy/test/packages/pkg-simple-a/PKGBUILD
new file mode 100644
index 0000000..953ecfa
--- /dev/null
+++ b/extra/legacy/test/packages/pkg-simple-a/PKGBUILD
@@ -0,0 +1,22 @@
+pkgname=pkg-simple-a
+pkgver=1
+pkgrel=1
+pkgdesc="A package called ${pkgname}"
+arch=('i686' 'x86_64')
+url='http://www.archlinux.org/'
+license=('GPL')
+depends=('glibc')
+makedepends=('gcc')
+source=('Makefile' 'test.c')
+md5sums=('c6cb8dcc86253355fed559416d0c8dcf'
+ '3c1e4279feb678fd9cabaccdb28e40d0')
+
+build() {
+ cd ${srcdir}
+ make
+}
+
+package() {
+ cd ${srcdir}
+ make install DESTDIR=${pkgdir} DESTBIN=${pkgname}
+}
diff --git a/extra/legacy/test/packages/pkg-simple-a/test.c b/extra/legacy/test/packages/pkg-simple-a/test.c
new file mode 120000
index 0000000..ed5b5ac
--- /dev/null
+++ b/extra/legacy/test/packages/pkg-simple-a/test.c
@@ -0,0 +1 @@
+../../src/test.c \ No newline at end of file
diff --git a/extra/legacy/test/packages/pkg-simple-b/Makefile b/extra/legacy/test/packages/pkg-simple-b/Makefile
new file mode 120000
index 0000000..50be211
--- /dev/null
+++ b/extra/legacy/test/packages/pkg-simple-b/Makefile
@@ -0,0 +1 @@
+../../src/Makefile \ No newline at end of file
diff --git a/extra/legacy/test/packages/pkg-simple-b/PKGBUILD b/extra/legacy/test/packages/pkg-simple-b/PKGBUILD
new file mode 100644
index 0000000..95ffd09
--- /dev/null
+++ b/extra/legacy/test/packages/pkg-simple-b/PKGBUILD
@@ -0,0 +1,22 @@
+pkgname=pkg-simple-b
+pkgver=1
+pkgrel=1
+pkgdesc="A package called ${pkgname}"
+arch=('i686' 'x86_64')
+url='http://www.archlinux.org/'
+license=('GPL')
+depends=('glibc')
+makedepends=('gcc')
+source=('Makefile' 'test.c')
+md5sums=('c6cb8dcc86253355fed559416d0c8dcf'
+ '3c1e4279feb678fd9cabaccdb28e40d0')
+
+build() {
+ cd ${srcdir}
+ make
+}
+
+package() {
+ cd ${srcdir}
+ make install DESTDIR=${pkgdir} DESTBIN=${pkgname}
+}
diff --git a/extra/legacy/test/packages/pkg-simple-b/test.c b/extra/legacy/test/packages/pkg-simple-b/test.c
new file mode 120000
index 0000000..ed5b5ac
--- /dev/null
+++ b/extra/legacy/test/packages/pkg-simple-b/test.c
@@ -0,0 +1 @@
+../../src/test.c \ No newline at end of file
diff --git a/extra/legacy/test/packages/pkg-simple-epoch/Makefile b/extra/legacy/test/packages/pkg-simple-epoch/Makefile
new file mode 120000
index 0000000..50be211
--- /dev/null
+++ b/extra/legacy/test/packages/pkg-simple-epoch/Makefile
@@ -0,0 +1 @@
+../../src/Makefile \ No newline at end of file
diff --git a/extra/legacy/test/packages/pkg-simple-epoch/PKGBUILD b/extra/legacy/test/packages/pkg-simple-epoch/PKGBUILD
new file mode 100644
index 0000000..eebe2bd
--- /dev/null
+++ b/extra/legacy/test/packages/pkg-simple-epoch/PKGBUILD
@@ -0,0 +1,23 @@
+pkgname=pkg-simple-epoch
+pkgver=1
+pkgrel=1
+epoch=1
+pkgdesc="A package called ${pkgname}"
+arch=('i686' 'x86_64')
+url='http://www.archlinux.org/'
+license=('GPL')
+depends=('glibc')
+makedepends=('gcc')
+source=('Makefile' 'test.c')
+md5sums=('c6cb8dcc86253355fed559416d0c8dcf'
+ '3c1e4279feb678fd9cabaccdb28e40d0')
+
+build() {
+ cd ${srcdir}
+ make
+}
+
+package() {
+ cd ${srcdir}
+ make install DESTDIR=${pkgdir} DESTBIN=${pkgname}
+}
diff --git a/extra/legacy/test/packages/pkg-simple-epoch/test.c b/extra/legacy/test/packages/pkg-simple-epoch/test.c
new file mode 120000
index 0000000..ed5b5ac
--- /dev/null
+++ b/extra/legacy/test/packages/pkg-simple-epoch/test.c
@@ -0,0 +1 @@
+../../src/test.c \ No newline at end of file
diff --git a/extra/legacy/test/packages/pkg-split-a/Makefile b/extra/legacy/test/packages/pkg-split-a/Makefile
new file mode 120000
index 0000000..50be211
--- /dev/null
+++ b/extra/legacy/test/packages/pkg-split-a/Makefile
@@ -0,0 +1 @@
+../../src/Makefile \ No newline at end of file
diff --git a/extra/legacy/test/packages/pkg-split-a/PKGBUILD b/extra/legacy/test/packages/pkg-split-a/PKGBUILD
new file mode 100644
index 0000000..e941976
--- /dev/null
+++ b/extra/legacy/test/packages/pkg-split-a/PKGBUILD
@@ -0,0 +1,28 @@
+pkgbase=pkg-split-a
+pkgname=('pkg-split-a1' 'pkg-split-a2')
+pkgver=1
+pkgrel=1
+pkgdesc="A split package called ${pkgbase}"
+arch=('i686' 'x86_64')
+url='http://www.archlinux.org/'
+license=('GPL')
+depends=('glibc')
+makedepends=('gcc')
+source=('Makefile' 'test.c')
+md5sums=('c6cb8dcc86253355fed559416d0c8dcf'
+ '3c1e4279feb678fd9cabaccdb28e40d0')
+
+build() {
+ cd ${srcdir}
+ make
+}
+
+package_pkg-split-a1() {
+ cd ${srcdir}
+ make install DESTDIR=${pkgdir} DESTBIN=${pkgname[0]}
+}
+
+package_pkg-split-a2() {
+ cd ${srcdir}
+ make install DESTDIR=${pkgdir} DESTBIN=${pkgname[1]}
+}
diff --git a/extra/legacy/test/packages/pkg-split-a/test.c b/extra/legacy/test/packages/pkg-split-a/test.c
new file mode 120000
index 0000000..ed5b5ac
--- /dev/null
+++ b/extra/legacy/test/packages/pkg-split-a/test.c
@@ -0,0 +1 @@
+../../src/test.c \ No newline at end of file
diff --git a/extra/legacy/test/packages/pkg-split-b/Makefile b/extra/legacy/test/packages/pkg-split-b/Makefile
new file mode 120000
index 0000000..50be211
--- /dev/null
+++ b/extra/legacy/test/packages/pkg-split-b/Makefile
@@ -0,0 +1 @@
+../../src/Makefile \ No newline at end of file
diff --git a/extra/legacy/test/packages/pkg-split-b/PKGBUILD b/extra/legacy/test/packages/pkg-split-b/PKGBUILD
new file mode 100644
index 0000000..6ddbc45
--- /dev/null
+++ b/extra/legacy/test/packages/pkg-split-b/PKGBUILD
@@ -0,0 +1,29 @@
+pkgbase=pkg-split-b
+pkgname=('pkg-split-b1' 'pkg-split-b2')
+pkgver=1
+pkgrel=1
+pkgdesc="A split package called ${pkgbase}"
+arch=('i686' 'x86_64')
+url='http://www.archlinux.org/'
+license=('GPL')
+
+depends=('glibc')
+makedepends=('gcc')
+source=('Makefile' 'test.c')
+md5sums=('c6cb8dcc86253355fed559416d0c8dcf'
+ '3c1e4279feb678fd9cabaccdb28e40d0')
+
+build() {
+ cd ${srcdir}
+ make
+}
+
+package_pkg-split-b1() {
+ cd ${srcdir}
+ make install DESTDIR=${pkgdir} DESTBIN=${pkgname[0]}
+}
+
+package_pkg-split-b2() {
+ cd ${srcdir}
+ make install DESTDIR=${pkgdir} DESTBIN=${pkgname[1]}
+}
diff --git a/extra/legacy/test/packages/pkg-split-b/test.c b/extra/legacy/test/packages/pkg-split-b/test.c
new file mode 120000
index 0000000..ed5b5ac
--- /dev/null
+++ b/extra/legacy/test/packages/pkg-split-b/test.c
@@ -0,0 +1 @@
+../../src/test.c \ No newline at end of file
diff --git a/extra/legacy/test/rsync_output_sample b/extra/legacy/test/rsync_output_sample
new file mode 100644
index 0000000..72d9cd0
--- /dev/null
+++ b/extra/legacy/test/rsync_output_sample
@@ -0,0 +1,14 @@
+dr-xr-sr-x 4096 2010/09/11 11:37:10 .
+-rw-r--r-- 11 2011/02/08 00:00:01 lastsync
+drwxrwxr-x 15 2010/09/11 11:28:50 community-staging
+drwxrwxr-x 30 2010/09/11 11:28:50 community-staging/os
+drwxrwxr-x 8192 2011/02/07 17:00:01 community-staging/os/i686
+lrwxrwxrwx 52 2010/12/23 16:51:01 community-staging/os/i686/alex-2.3.4-1-i686.pkg.tar.xz -> ../../../pool/community/alex-2.3.4-1-i686.pkg.tar.xz
+lrwxrwxrwx 27 2011/02/07 14:02:54 community-staging/os/i686/community-staging.db -> community-staging.db.tar.gz
+-rw-rw-r-- 2237 2011/02/07 14:02:54 community-staging/os/i686/community-staging.db.tar.gz
+-rw-rw-r-- 3209 2011/02/07 14:00:13 community-staging/os/i686/community-staging.db.tar.gz.old
+drwxrwxr-x 15 2009/07/22 15:07:56 community
+drwxrwxr-x 40 2009/08/04 15:57:42 community/os
+drwxrwsr-x 36864 2011/02/03 05:00:01 community/os/any
+-rw-rw-r-- 303336 2010/07/16 10:06:28 community/os/any/any2dvd-0.34-4-any.pkg.tar.xz
+-rw-rw-r-- 221664 2010/03/28 15:55:48 community/os/x86_64/gmime22-2.2.26-1-x86_64.pkg.tar.xz
diff --git a/extra/legacy/test/runTest b/extra/legacy/test/runTest
new file mode 100755
index 0000000..b8713d8
--- /dev/null
+++ b/extra/legacy/test/runTest
@@ -0,0 +1,15 @@
+#!/bin/bash
+
+. "$(dirname ${BASH_SOURCE[0]})/lib/common.inc"
+
+for t in "$(dirname ${BASH_SOURCE[0]})/test.d/"*.sh; do
+ l=$(basename ${t} .sh)
+ if [ -x ${t} ]; then
+ msg "Running test '${l}'"
+ ${t}
+ [ $? -ne 0 ] && die "Test '${l}' failed"
+ echo -e "\n\n\n"
+ else
+ warning "Skipping test ${l}"
+ fi
+done
diff --git a/extra/legacy/test/src/Makefile b/extra/legacy/test/src/Makefile
new file mode 100644
index 0000000..105b730
--- /dev/null
+++ b/extra/legacy/test/src/Makefile
@@ -0,0 +1,5 @@
+all:
+ gcc $(CFLAGS) -o test test.c
+
+install:
+ install -D -m755 test $(DESTDIR)/usr/bin/$(DESTBIN)
diff --git a/extra/legacy/test/src/test.c b/extra/legacy/test/src/test.c
new file mode 100644
index 0000000..a661689
--- /dev/null
+++ b/extra/legacy/test/src/test.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+#include <stdlib.h>
+
+int main(void) {
+ printf("Arch is the best!\n");
+ return EXIT_SUCCESS;
+}
diff --git a/extra/legacy/test/test.d/create-filelists.sh b/extra/legacy/test/test.d/create-filelists.sh
new file mode 100755
index 0000000..49734c4
--- /dev/null
+++ b/extra/legacy/test/test.d/create-filelists.sh
@@ -0,0 +1,105 @@
+#!/bin/bash
+
+curdir=$(readlink -e $(dirname $0))
+. "${curdir}/../lib/common.inc"
+
+testCreateSimpleFileLists() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-simple-a' 'pkg-simple-b' 'pkg-simple-epoch')
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage extra ${pkgbase} ${arch}
+ done
+ done
+ ../db-update
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ if ! bsdtar -xOf "${FTP_BASE}/extra/os/${arch}/extra${FILESEXT}" | grep -q "usr/bin/${pkgbase}"; then
+ fail "usr/bin/${pkgbase} not found in ${arch}/extra${FILESEXT}"
+ fi
+ done
+ done
+}
+
+testCreateAnyFileLists() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-any-a' 'pkg-any-b')
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ releasePackage extra ${pkgbase} any
+ done
+ ../db-update
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ if ! bsdtar -xOf "${FTP_BASE}/extra/os/${arch}/extra${FILESEXT}" | grep -q "usr/share/${pkgbase}/test"; then
+ fail "usr/share/${pkgbase}/test not found in ${arch}/extra${FILESEXT}"
+ fi
+ done
+ done
+}
+
+testCreateSplitFileLists() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-split-a' 'pkg-split-b')
+ local pkg
+ local pkgbase
+ local pkgname
+ local pkgnames
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage extra ${pkgbase} ${arch}
+ done
+ done
+ ../db-update
+
+ for pkgbase in ${pkgs[@]}; do
+ pkgnames=($(source "${TMP}/svn-packages-copy/${pkgbase}/trunk/PKGBUILD"; echo ${pkgname[@]}))
+ for pkgname in ${pkgnames[@]}; do
+ for arch in ${arches[@]}; do
+ if ! bsdtar -xOf "${FTP_BASE}/extra/os/${arch}/extra${FILESEXT}" | grep -q "usr/bin/${pkgname}"; then
+ fail "usr/bin/${pkgname} not found in ${arch}/extra${FILESEXT}"
+ fi
+ done
+ done
+ done
+}
+
+
+testCleanupFileLists() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-simple-a' 'pkg-simple-b')
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage extra ${pkgbase} ${arch}
+ done
+ done
+ ../db-update
+
+ for arch in ${arches[@]}; do
+ ../db-remove extra ${arch} pkg-simple-a
+ done
+
+ for arch in ${arches[@]}; do
+ if ! bsdtar -xOf "${FTP_BASE}/extra/os/${arch}/extra${FILESEXT}" | grep -q "usr/bin/pkg-simple-b"; then
+ fail "usr/bin/pkg-simple-b not found in ${arch}/extra${FILESEXT}"
+ fi
+ if bsdtar -xOf "${FTP_BASE}/extra/os/${arch}/extra${FILESEXT}" | grep -q "usr/bin/pkg-simple-a"; then
+ fail "usr/bin/pkg-simple-a still found in ${arch}/extra${FILESEXT}"
+ fi
+ done
+
+}
+
+. "${curdir}/../lib/shunit2"
diff --git a/extra/legacy/test/test.d/db-move.sh b/extra/legacy/test/test.d/db-move.sh
new file mode 100755
index 0000000..9d7c1f6
--- /dev/null
+++ b/extra/legacy/test/test.d/db-move.sh
@@ -0,0 +1,122 @@
+#!/bin/bash
+
+curdir=$(readlink -e $(dirname $0))
+. "${curdir}/../lib/common.inc"
+
+testMoveSimplePackages() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-simple-a' 'pkg-simple-b')
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage testing ${pkgbase} ${arch}
+ done
+ done
+
+ ../db-update
+
+ ../db-move testing extra pkg-simple-a
+
+ for arch in ${arches[@]}; do
+ checkPackage extra pkg-simple-a-1-1-${arch}.pkg.tar.xz ${arch}
+ checkRemovedPackage testing pkg-simple-a-1-1-${arch}.pkg.tar.xz ${arch}
+
+ checkPackage testing pkg-simple-b-1-1-${arch}.pkg.tar.xz ${arch}
+ done
+}
+
+testMoveMultiplePackages() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-simple-a' 'pkg-simple-b')
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage testing ${pkgbase} ${arch}
+ done
+ done
+
+ ../db-update
+
+ ../db-move testing extra pkg-simple-a pkg-simple-b
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ checkPackage extra ${pkgbase}-1-1-${arch}.pkg.tar.xz ${arch}
+ checkRemovedPackage testing ${pkgbase}-1-1-${arch}.pkg.tar.xz ${arch}
+ done
+ done
+}
+
+testMoveEpochPackages() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-simple-epoch')
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage testing ${pkgbase} ${arch}
+ done
+ done
+
+ ../db-update
+
+ ../db-move testing extra pkg-simple-epoch
+
+ for arch in ${arches[@]}; do
+ checkPackage extra pkg-simple-epoch-1:1-1-${arch}.pkg.tar.xz ${arch}
+ checkRemovedPackage testing pkg-simple-epoch-1:1-1-${arch}.pkg.tar.xz ${arch}
+ done
+}
+
+testMoveAnyPackages() {
+ local pkgs=('pkg-any-a' 'pkg-any-b')
+ local pkgbase
+
+ for pkgbase in ${pkgs[@]}; do
+ releasePackage testing ${pkgbase} any
+ done
+
+ ../db-update
+ ../db-move testing extra pkg-any-a
+
+ checkAnyPackage extra pkg-any-a-1-1-any.pkg.tar.xz
+ checkRemovedAnyPackage testing pkg-any-a
+ checkAnyPackage testing pkg-any-b-1-1-any.pkg.tar.xz
+}
+
+testMoveSplitPackages() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-split-a' 'pkg-split-b')
+ local pkg
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage testing ${pkgbase} ${arch}
+ done
+ done
+
+ ../db-update
+ ../db-move testing extra pkg-split-a
+
+ for arch in ${arches[@]}; do
+ for pkg in "${pkgdir}/pkg-split-a"/*-${arch}${PKGEXT}; do
+ checkPackage extra $(basename ${pkg}) ${arch}
+ done
+ done
+ for arch in ${arches[@]}; do
+ for pkg in "${pkgdir}/pkg-split-b"/*-${arch}${PKGEXT}; do
+ checkPackage testing $(basename ${pkg}) ${arch}
+ done
+ done
+
+ checkRemovedAnyPackage testing pkg-split-a
+}
+
+. "${curdir}/../lib/shunit2"
diff --git a/extra/legacy/test/test.d/db-remove.sh b/extra/legacy/test/test.d/db-remove.sh
new file mode 100755
index 0000000..416e693
--- /dev/null
+++ b/extra/legacy/test/test.d/db-remove.sh
@@ -0,0 +1,77 @@
+#!/bin/bash
+
+curdir=$(readlink -e $(dirname $0))
+. "${curdir}/../lib/common.inc"
+
+testRemovePackages() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-simple-a' 'pkg-simple-b' 'pkg-split-a' 'pkg-split-b' 'pkg-simple-epoch')
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage extra ${pkgbase} ${arch}
+ done
+ done
+
+ ../db-update
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ ../db-remove extra ${arch} ${pkgbase}
+ done
+ done
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ checkRemovedPackage extra ${pkgbase} ${arch}
+ done
+ done
+}
+
+testRemoveMultiplePackages() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-simple-a' 'pkg-simple-b' 'pkg-split-a' 'pkg-split-b' 'pkg-simple-epoch')
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage extra ${pkgbase} ${arch}
+ done
+ done
+
+ ../db-update
+
+ for arch in ${arches[@]}; do
+ ../db-remove extra ${arch} ${pkgs[@]}
+ done
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ checkRemovedPackage extra ${pkgbase} ${arch}
+ done
+ done
+}
+
+testRemoveAnyPackages() {
+ local pkgs=('pkg-any-a' 'pkg-any-b')
+ local pkgbase
+
+ for pkgbase in ${pkgs[@]}; do
+ releasePackage extra ${pkgbase} any
+ done
+
+ ../db-update
+
+ for pkgbase in ${pkgs[@]}; do
+ ../db-remove extra any ${pkgbase}
+ done
+
+ for pkgbase in ${pkgs[@]}; do
+ checkRemovedAnyPackage extra ${pkgbase}
+ done
+}
+
+. "${curdir}/../lib/shunit2"
diff --git a/extra/legacy/test/test.d/db-repo-add.sh b/extra/legacy/test/test.d/db-repo-add.sh
new file mode 100755
index 0000000..8603104
--- /dev/null
+++ b/extra/legacy/test/test.d/db-repo-add.sh
@@ -0,0 +1,54 @@
+#!/bin/bash
+
+curdir=$(readlink -e $(dirname $0))
+. "${curdir}/../lib/common.inc"
+
+testAddSimplePackages() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-simple-a' 'pkg-simple-b')
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ cp "${pkgdir}/${pkgbase}/${pkgbase}-1-1-${arch}.pkg.tar.xz" "${FTP_BASE}/${PKGPOOL}/"
+ touch "${FTP_BASE}/${PKGPOOL}/${pkgbase}-1-1-${arch}.pkg.tar.xz.sig"
+ ln -s "${FTP_BASE}/${PKGPOOL}/${pkgbase}-1-1-${arch}.pkg.tar.xz" "${FTP_BASE}/extra/os/${arch}/"
+ ln -s "${FTP_BASE}/${PKGPOOL}/${pkgbase}-1-1-${arch}.pkg.tar.xz.sig" "${FTP_BASE}/extra/os/${arch}/"
+ ../db-repo-add extra ${arch} ${pkgbase}-1-1-${arch}.pkg.tar.xz
+ done
+ done
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ checkPackageDB extra ${pkgbase}-1-1-${arch}.pkg.tar.xz ${arch}
+ done
+ done
+}
+
+testAddMultiplePackages() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-simple-a' 'pkg-simple-b')
+ local pkgbase
+ local arch
+
+ for arch in ${arches[@]}; do
+ add_pkgs=()
+ for pkgbase in ${pkgs[@]}; do
+ cp "${pkgdir}/${pkgbase}/${pkgbase}-1-1-${arch}.pkg.tar.xz" "${FTP_BASE}/${PKGPOOL}/"
+ touch "${FTP_BASE}/${PKGPOOL}/${pkgbase}-1-1-${arch}.pkg.tar.xz.sig"
+ ln -s "${FTP_BASE}/${PKGPOOL}/${pkgbase}-1-1-${arch}.pkg.tar.xz" "${FTP_BASE}/extra/os/${arch}/"
+ ln -s "${FTP_BASE}/${PKGPOOL}/${pkgbase}-1-1-${arch}.pkg.tar.xz.sig" "${FTP_BASE}/extra/os/${arch}/"
+ add_pkgs[${#add_pkgs[*]}]=${pkgbase}-1-1-${arch}.pkg.tar.xz
+ done
+ ../db-repo-add extra ${arch} ${add_pkgs[@]}
+ done
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ checkPackageDB extra ${pkgbase}-1-1-${arch}.pkg.tar.xz ${arch}
+ done
+ done
+}
+
+. "${curdir}/../lib/shunit2"
diff --git a/extra/legacy/test/test.d/db-repo-remove.sh b/extra/legacy/test/test.d/db-repo-remove.sh
new file mode 100755
index 0000000..315d63d
--- /dev/null
+++ b/extra/legacy/test/test.d/db-repo-remove.sh
@@ -0,0 +1,58 @@
+#!/bin/bash
+
+curdir=$(readlink -e $(dirname $0))
+. "${curdir}/../lib/common.inc"
+
+testRemovePackages() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-simple-a' 'pkg-simple-b' 'pkg-simple-epoch')
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage extra ${pkgbase} ${arch}
+ done
+ done
+
+ ../db-update
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ ../db-repo-remove extra ${arch} ${pkgbase}
+ done
+ done
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ checkRemovedPackageDB extra ${pkgbase} ${arch}
+ done
+ done
+}
+
+testRemoveMultiplePackages() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-simple-a' 'pkg-simple-b' 'pkg-simple-epoch')
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage extra ${pkgbase} ${arch}
+ done
+ done
+
+ ../db-update
+
+ for arch in ${arches[@]}; do
+ ../db-repo-remove extra ${arch} ${pkgs[@]}
+ done
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ checkRemovedPackageDB extra ${pkgbase} ${arch}
+ done
+ done
+}
+
+. "${curdir}/../lib/shunit2"
diff --git a/extra/legacy/test/test.d/db-update.sh b/extra/legacy/test/test.d/db-update.sh
new file mode 100755
index 0000000..e38c328
--- /dev/null
+++ b/extra/legacy/test/test.d/db-update.sh
@@ -0,0 +1,165 @@
+#!/bin/bash
+
+curdir=$(readlink -e $(dirname $0))
+. "${curdir}/../lib/common.inc"
+
+testAddSimplePackages() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-simple-a' 'pkg-simple-b')
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage extra ${pkgbase} ${arch}
+ done
+ done
+
+ ../db-update
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ checkPackage extra ${pkgbase}-1-1-${arch}.pkg.tar.xz ${arch}
+ done
+ done
+}
+
+testAddSingleSimplePackage() {
+ releasePackage extra 'pkg-simple-a' 'i686'
+ ../db-update
+ checkPackage extra 'pkg-simple-a-1-1-i686.pkg.tar.xz' 'i686'
+}
+
+testAddSingleEpochPackage() {
+ releasePackage extra 'pkg-simple-epoch' 'i686'
+ ../db-update
+ checkPackage extra 'pkg-simple-epoch-1:1-1-i686.pkg.tar.xz' 'i686'
+}
+
+testAddAnyPackages() {
+ local pkgs=('pkg-any-a' 'pkg-any-b')
+ local pkgbase
+
+ for pkgbase in ${pkgs[@]}; do
+ releasePackage extra ${pkgbase} any
+ done
+
+ ../db-update
+
+ for pkgbase in ${pkgs[@]}; do
+ checkAnyPackage extra ${pkgbase}-1-1-any.pkg.tar.xz
+ done
+}
+
+testAddSplitPackages() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-split-a' 'pkg-split-b')
+ local pkg
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage extra ${pkgbase} ${arch}
+ done
+ done
+
+ ../db-update
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ for pkg in "${pkgdir}/${pkgbase}"/*-${arch}${PKGEXT}; do
+ checkPackage extra $(basename ${pkg}) ${arch}
+ done
+ done
+ done
+}
+
+testUpdateAnyPackage() {
+ releasePackage extra pkg-any-a any
+ ../db-update
+
+ pushd "${TMP}/svn-packages-copy/pkg-any-a/trunk/" >/dev/null
+ sed 's/pkgrel=1/pkgrel=2/g' -i PKGBUILD
+ svn commit -q -m"update pkg to pkgrel=2" >/dev/null
+ sudo extra-i686-build >/dev/null 2>&1
+ mv pkg-any-a-1-2-any.pkg.tar.xz "${pkgdir}/pkg-any-a/"
+ popd >/dev/null
+
+ releasePackage extra pkg-any-a any
+ ../db-update
+
+ checkAnyPackage extra pkg-any-a-1-2-any.pkg.tar.xz any
+
+ rm -f "${pkgdir}/pkg-any-a/pkg-any-a-1-2-any.pkg.tar.xz"
+}
+
+testUpdateAnyPackageToDifferentRepositoriesAtOnce() {
+ releasePackage extra pkg-any-a any
+
+ pushd "${TMP}/svn-packages-copy/pkg-any-a/trunk/" >/dev/null
+ sed 's/pkgrel=1/pkgrel=2/g' -i PKGBUILD
+ svn commit -q -m"update pkg to pkgrel=2" >/dev/null
+ sudo extra-i686-build >/dev/null 2>&1
+ mv pkg-any-a-1-2-any.pkg.tar.xz "${pkgdir}/pkg-any-a/"
+ popd >/dev/null
+
+ releasePackage testing pkg-any-a any
+
+ ../db-update
+
+ checkAnyPackage extra pkg-any-a-1-1-any.pkg.tar.xz any
+ checkAnyPackage testing pkg-any-a-1-2-any.pkg.tar.xz any
+
+ rm -f "${pkgdir}/pkg-any-a/pkg-any-a-1-2-any.pkg.tar.xz"
+}
+
+testUpdateSameAnyPackageToSameRepository() {
+ releasePackage extra pkg-any-a any
+ ../db-update
+ checkAnyPackage extra pkg-any-a-1-1-any.pkg.tar.xz any
+
+ releasePackage extra pkg-any-a any
+ ../db-update >/dev/null 2>&1 && (fail 'Adding an existing package to the same repository should fail'; return 1)
+}
+
+testUpdateSameAnyPackageToDifferentRepositories() {
+ releasePackage extra pkg-any-a any
+ ../db-update
+ checkAnyPackage extra pkg-any-a-1-1-any.pkg.tar.xz any
+
+ releasePackage testing pkg-any-a any
+ ../db-update >/dev/null 2>&1 && (fail 'Adding an existing package to another repository should fail'; return 1)
+
+ local arch
+ for arch in i686 x86_64; do
+ ( [ -r "${FTP_BASE}/testing/os/${arch}/testing${DBEXT%.tar.*}" ] \
+ && bsdtar -xf "${FTP_BASE}/testing/os/${arch}/testing${DBEXT%.tar.*}" -O | grep -q ${pkgbase}) \
+ && fail "${pkgbase} should not be in testing/os/${arch}/testing${DBEXT%.tar.*}"
+ done
+}
+
+
+testAddIncompleteSplitPackage() {
+ local arches=('i686' 'x86_64')
+ local repo='extra'
+ local pkgbase='pkg-split-a'
+ local arch
+
+ for arch in ${arches[@]}; do
+ releasePackage ${repo} ${pkgbase} ${arch}
+ done
+
+ # remove a split package to make db-update fail
+ rm "${STAGING}"/extra/${pkgbase}1-*
+
+ ../db-update >/dev/null 2>&1 && fail "db-update should fail when a split package is missing!"
+
+ for arch in ${arches[@]}; do
+ ( [ -r "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT%.tar.*}" ] \
+ && bsdtar -xf "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT%.tar.*}" -O | grep -q ${pkgbase}) \
+ && fail "${pkgbase} should not be in ${repo}/os/${arch}/${repo}${DBEXT%.tar.*}"
+ done
+}
+
+. "${curdir}/../lib/shunit2"
diff --git a/extra/legacy/test/test.d/ftpdir-cleanup.sh b/extra/legacy/test/test.d/ftpdir-cleanup.sh
new file mode 100755
index 0000000..20026b4
--- /dev/null
+++ b/extra/legacy/test/test.d/ftpdir-cleanup.sh
@@ -0,0 +1,121 @@
+#!/bin/bash
+
+curdir=$(readlink -e $(dirname $0))
+. "${curdir}/../lib/common.inc"
+
+testCleanupSimplePackages() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-simple-a' 'pkg-simple-b')
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage extra ${pkgbase} ${arch}
+ done
+ done
+
+ ../db-update
+
+ for arch in ${arches[@]}; do
+ ../db-remove extra ${arch} pkg-simple-a
+ done
+
+ ../cron-jobs/ftpdir-cleanup >/dev/null
+
+ for arch in ${arches[@]}; do
+ local pkg1="pkg-simple-a-1-1-${arch}.pkg.tar.xz"
+ checkRemovedPackage extra 'pkg-simple-a' ${arch}
+ [ -f "${FTP_BASE}/${PKGPOOL}/${pkg1}" ] && fail "${PKGPOOL}/${pkg1} found"
+ [ -f "${FTP_BASE}/${repo}/os/${arch}/${pkg1}" ] && fail "${repo}/os/${arch}/${pkg1} found"
+
+ local pkg2="pkg-simple-b-1-1-${arch}.pkg.tar.xz"
+ checkPackage extra ${pkg2} ${arch}
+ done
+}
+
+testCleanupEpochPackages() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-simple-epoch')
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage extra ${pkgbase} ${arch}
+ done
+ done
+
+ ../db-update
+
+ for arch in ${arches[@]}; do
+ ../db-remove extra ${arch} pkg-simple-epoch
+ done
+
+ ../cron-jobs/ftpdir-cleanup >/dev/null
+
+ for arch in ${arches[@]}; do
+ local pkg1="pkg-simple-epoch-1:1-1-${arch}.pkg.tar.xz"
+ checkRemovedPackage extra 'pkg-simple-epoch' ${arch}
+ [ -f "${FTP_BASE}/${PKGPOOL}/${pkg1}" ] && fail "${PKGPOOL}/${pkg1} found"
+ [ -f "${FTP_BASE}/${repo}/os/${arch}/${pkg1}" ] && fail "${repo}/os/${arch}/${pkg1} found"
+ done
+}
+
+testCleanupAnyPackages() {
+ local pkgs=('pkg-any-a' 'pkg-any-b')
+ local pkgbase
+ local arch='any'
+
+ for pkgbase in ${pkgs[@]}; do
+ releasePackage extra ${pkgbase} any
+ done
+
+ ../db-update
+ ../db-remove extra any pkg-any-a
+ ../cron-jobs/ftpdir-cleanup >/dev/null
+
+ local pkg1='pkg-any-a-1-1-any.pkg.tar.xz'
+ checkRemovedAnyPackage extra 'pkg-any-a'
+ [ -f "${FTP_BASE}/${PKGPOOL}/${pkg1}" ] && fail "${PKGPOOL}/${pkg1} found"
+ [ -f "${FTP_BASE}/${repo}/os/${arch}/${pkg1}" ] && fail "${repo}/os/${arch}/${pkg1} found"
+
+ local pkg2="pkg-any-b-1-1-${arch}.pkg.tar.xz"
+ checkAnyPackage extra ${pkg2}
+}
+
+testCleanupSplitPackages() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-split-a' 'pkg-split-b')
+ local pkg
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage extra ${pkgbase} ${arch}
+ done
+ done
+
+ ../db-update
+
+ for arch in ${arches[@]}; do
+ ../db-remove extra ${arch} ${pkgs[0]}
+ done
+
+ ../cron-jobs/ftpdir-cleanup >/dev/null
+
+ for arch in ${arches[@]}; do
+ for pkg in "${pkgdir}/${pkgs[0]}"/*-${arch}${PKGEXT}; do
+ checkRemovedPackage extra ${pkgs[0]} ${arch}
+ [ -f "${FTP_BASE}/${PKGPOOL}/${pkg}" ] && fail "${PKGPOOL}/${pkg} found"
+ [ -f "${FTP_BASE}/${repo}/os/${arch}/${pkg}" ] && fail "${repo}/os/${arch}/${pkg} found"
+ done
+
+ for pkg in "${pkgdir}/${pkgs[1]}"/*-${arch}${PKGEXT}; do
+ checkPackage extra $(basename ${pkg}) ${arch}
+ done
+ done
+}
+
+. "${curdir}/../lib/shunit2"
diff --git a/extra/legacy/test/test.d/packages.sh b/extra/legacy/test/test.d/packages.sh
new file mode 100755
index 0000000..488cb15
--- /dev/null
+++ b/extra/legacy/test/test.d/packages.sh
@@ -0,0 +1,11 @@
+#!/bin/bash
+
+curdir=$(readlink -e $(dirname $0))
+. "${curdir}/../lib/common.inc"
+
+testPackages() {
+ # TODO: namcap -r sodepends fails with i686 packages
+ find "${pkgdir}" -name "*${PKGEXT}" -exec namcap -e sodepends,pkgnameindesc {} + || fail 'namcap failed'
+}
+
+. "${curdir}/../lib/shunit2"
diff --git a/extra/legacy/test/test.d/pool-transition.sh b/extra/legacy/test/test.d/pool-transition.sh
new file mode 100755
index 0000000..5873f00
--- /dev/null
+++ b/extra/legacy/test/test.d/pool-transition.sh
@@ -0,0 +1,152 @@
+#!/bin/bash
+
+curdir=$(readlink -e $(dirname $0))
+. "${curdir}/../lib/common.inc"
+
+testMovePackagesWithoutPool() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-simple-a' 'pkg-simple-b' 'pkg-split-a' 'pkg-split-b')
+ local pkgbase
+ local arch
+ local pkg
+ local old
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage testing ${pkgbase} ${arch}
+ done
+ done
+
+ ../db-update
+
+ # transform two packages to old style layout
+ for arch in ${arches[@]}; do
+ for old in 0 2; do
+ for pkg in "${pkgdir}/${pkgs[${old}]}"/*-${arch}${PKGEXT}; do
+ pkg=$(basename $pkg)
+ mv -f "${FTP_BASE}/${PKGPOOL}/${pkg}" "${FTP_BASE}/testing/os/${arch}/${pkg}"
+ done
+ done
+ done
+
+ ../cron-jobs/ftpdir-cleanup >/dev/null
+
+ ../db-move testing extra ${pkgs[@]}
+
+ ../cron-jobs/ftpdir-cleanup >/dev/null
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ for pkg in "${pkgdir}/${pkgbase}"/*-${arch}${PKGEXT}; do
+ checkPackage extra $(basename ${pkg}) ${arch}
+ done
+ checkRemovedPackage testing ${pkgbase} ${arch}
+ done
+ done
+}
+
+testUpdateAnyPackageWithoutPool() {
+ local pkgname='pkg-any-a'
+ local pkg1='pkg-any-a-1-1-any.pkg.tar.xz'
+ local pkg2='pkg-any-a-1-2-any.pkg.tar.xz'
+ local arch
+
+
+ releasePackage extra pkg-any-a any
+ ../db-update
+ # transform two packages to old style layout
+ mv -f "${FTP_BASE}/${PKGPOOL}/${pkg1}" "${FTP_BASE}/extra/os/any"
+ for arch in i686 x86_64; do
+ ln -sf "../any/${pkg1}" "${FTP_BASE}/extra/os/${arch}"
+ done
+
+ pushd "${TMP}/svn-packages-copy/${pkgname}/trunk/" >/dev/null
+ sed 's/pkgrel=1/pkgrel=2/g' -i PKGBUILD
+ svn commit -q -m"update pkg to pkgrel=2" >/dev/null
+ sudo extra-i686-build >/dev/null 2>&1
+ mv "${pkg2}" "${pkgdir}/${pkgname}/"
+ popd >/dev/null
+
+ releasePackage extra ${pkgname} any
+ ../db-update
+ rm -f "${pkgdir}/${pkgname}/${pkg2}"
+
+ ../cron-jobs/ftpdir-cleanup >/dev/null
+
+ checkAnyPackage extra "${pkg2}"
+
+ [ -f "${FTP_BASE}/${PKGPOOL}/${pkg1}" ] && fail "${PKGPOOL}/${pkg1} found"
+ for arch in any i686 x86_64; do
+ [ -f "${FTP_BASE}/extra/os/${arch}/${pkg1}" ] && fail "extra/os/${arch}/${pkg1} found"
+ done
+}
+
+testMoveAnyPackagesWithoutPool() {
+ local pkgs=('pkg-any-a' 'pkg-any-b')
+ local pkgbase
+ local arch
+ local pkg
+
+ for pkgbase in ${pkgs[@]}; do
+ releasePackage testing ${pkgbase} any
+ done
+
+ ../db-update
+
+ # transform a package to old style layout
+ for pkg in "${pkgdir}/${pkgs[0]}"/*-any${PKGEXT}; do
+ pkg=$(basename $pkg)
+ mv -f "${FTP_BASE}/${PKGPOOL}/${pkg}" "${FTP_BASE}/testing/os/any/${pkg}"
+ for arch in i686 x86_64; do
+ ln -sf "../any/${pkg}" "${FTP_BASE}/testing/os/${arch}/${pkg}"
+ done
+ done
+
+ ../cron-jobs/ftpdir-cleanup >/dev/null
+
+ ../db-move testing extra ${pkgs[@]}
+
+ ../cron-jobs/ftpdir-cleanup >/dev/null
+
+ for pkgbase in ${pkgs[@]}; do
+ for pkg in "${pkgdir}/${pkgbase}"/*-any${PKGEXT}; do
+ checkAnyPackage extra $(basename ${pkg})
+ done
+ checkRemovedAnyPackage testing ${pkgbase}
+ done
+
+ for pkg in "${pkgdir}/${pkgs[0]}"/*-any${PKGEXT}; do
+ pkg=$(basename $pkg)
+ for arch in any i686 x86_64; do
+ [ -f "${FTP_BASE}/testing/os/${arch}/${pkg}" ] && fail "testing/os/${arch}/${pkg} found"
+ done
+ done
+}
+
+testUpdateSameAnyPackageToDifferentRepositoriesWithoutPool() {
+ local pkg
+ local arch
+
+ releasePackage extra pkg-any-a any
+ ../db-update
+
+ # transform a package to old style layout
+ for pkg in "${pkgdir}/pkg-any-a"/*-any${PKGEXT}; do
+ pkg=$(basename $pkg)
+ mv -f "${FTP_BASE}/${PKGPOOL}/${pkg}" "${FTP_BASE}/extra/os/any/${pkg}"
+ for arch in i686 x86_64; do
+ ln -sf "../any/${pkg}" "${FTP_BASE}/extra/os/${arch}/${pkg}"
+ done
+ done
+
+ releasePackage testing pkg-any-a any
+ ../db-update >/dev/null 2>&1 && (fail 'Adding an existing package to another repository should fail'; return 1)
+
+ for arch in i686 x86_64; do
+ ( [ -r "${FTP_BASE}/testing/os/${arch}/testing${DBEXT%.tar.*}" ] \
+ && bsdtar -xf "${FTP_BASE}/testing/os/${arch}/testing${DBEXT%.tar.*}" -O | grep -q pkg-any-a) \
+ && fail "pkg-any-a should not be in testing/os/${arch}/testing${DBEXT%.tar.*}"
+ done
+}
+
+. "${curdir}/../lib/shunit2"
diff --git a/extra/legacy/test/test.d/signed-packages.sh b/extra/legacy/test/test.d/signed-packages.sh
new file mode 100755
index 0000000..5d6f4ff
--- /dev/null
+++ b/extra/legacy/test/test.d/signed-packages.sh
@@ -0,0 +1,13 @@
+#!/bin/bash
+
+curdir=$(readlink -e $(dirname $0))
+. "${curdir}/../lib/common.inc"
+
+testAddUnsignedPackage() {
+ releasePackage extra 'pkg-simple-a' 'i686'
+ # remove any signature
+ rm "${STAGING}"/extra/*.sig
+ ../db-update >/dev/null 2>&1 && fail "db-update should fail when a signature is missing!"
+}
+
+. "${curdir}/../lib/shunit2"
diff --git a/extra/legacy/test/test.d/sourceballs.sh b/extra/legacy/test/test.d/sourceballs.sh
new file mode 100755
index 0000000..fdcf08c
--- /dev/null
+++ b/extra/legacy/test/test.d/sourceballs.sh
@@ -0,0 +1,84 @@
+#!/bin/bash
+
+curdir=$(readlink -e $(dirname $0))
+. "${curdir}/../lib/common.inc"
+
+testSourceballs() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-simple-a' 'pkg-simple-b' 'pkg-simple-epoch')
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage extra ${pkgbase} ${arch}
+ done
+ done
+ ../db-update
+
+ ../cron-jobs/sourceballs
+ for pkgbase in ${pkgs[@]}; do
+ [ ! -r ${FTP_BASE}/${SRCPOOL}/${pkgbase}-*${SRCEXT} ] && fail "source package not found!"
+ done
+}
+
+testAnySourceballs() {
+ local pkgs=('pkg-any-a' 'pkg-any-b')
+ local pkgbase
+
+ for pkgbase in ${pkgs[@]}; do
+ releasePackage extra ${pkgbase} any
+ done
+ ../db-update
+
+ ../cron-jobs/sourceballs
+ for pkgbase in ${pkgs[@]}; do
+ [ ! -r ${FTP_BASE}/${SRCPOOL}/${pkgbase}-*${SRCEXT} ] && fail "source package not found!"
+ done
+}
+
+testSplitSourceballs() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-split-a' 'pkg-split-b')
+ local pkg
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage extra ${pkgbase} ${arch}
+ done
+ done
+
+ ../db-update
+
+ ../cron-jobs/sourceballs
+ for pkgbase in ${pkgs[@]}; do
+ [ ! -r ${FTP_BASE}/${SRCPOOL}/${pkgbase}-*${SRCEXT} ] && fail "source package not found!"
+ done
+}
+
+testSourceballsCleanup() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-simple-a' 'pkg-simple-b')
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage extra ${pkgbase} ${arch}
+ done
+ done
+ ../db-update
+ ../cron-jobs/sourceballs
+
+ for arch in ${arches[@]}; do
+ ../db-remove extra ${arch} pkg-simple-a
+ done
+
+ ../cron-jobs/sourceballs
+ [ -r ${FTP_BASE}/${SRCPOOL}/pkg-simple-a-*${SRCEXT} ] && fail "source package was not removed!"
+ [ ! -r ${FTP_BASE}/${SRCPOOL}/pkg-simple-b-*${SRCEXT} ] && fail "source package not found!"
+}
+
+. "${curdir}/../lib/shunit2"
diff --git a/extra/legacy/test/test.d/testing2x.sh b/extra/legacy/test/test.d/testing2x.sh
new file mode 100755
index 0000000..eda6cd6
--- /dev/null
+++ b/extra/legacy/test/test.d/testing2x.sh
@@ -0,0 +1,27 @@
+#!/bin/bash
+
+curdir=$(readlink -e $(dirname $0))
+. "${curdir}/../lib/common.inc"
+
+testTesting2xAnyPackage() {
+ releasePackage core pkg-any-a any
+ ../db-update
+
+ pushd "${TMP}/svn-packages-copy/pkg-any-a/trunk/" >/dev/null
+ sed 's/pkgrel=1/pkgrel=2/g' -i PKGBUILD
+ svn commit -q -m"update pkg to pkgrel=2" >/dev/null
+ sudo extra-i686-build >/dev/null 2>&1
+ mv pkg-any-a-1-2-any.pkg.tar.xz "${pkgdir}/pkg-any-a/"
+ popd >/dev/null
+
+ releasePackage testing pkg-any-a any
+ ../db-update
+ rm -f "${pkgdir}/pkg-any-a/pkg-any-a-1-2-any.pkg.tar.xz"
+
+ ../testing2x pkg-any-a
+
+ checkAnyPackage core pkg-any-a-1-2-any.pkg.tar.xz any
+ checkRemovedAnyPackage testing pkg-any-a
+}
+
+. "${curdir}/../lib/shunit2"
diff --git a/extra/legacy/test/test_filter.py b/extra/legacy/test/test_filter.py
new file mode 100644
index 0000000..d8006f9
--- /dev/null
+++ b/extra/legacy/test/test_filter.py
@@ -0,0 +1,196 @@
+# -*- encoding: utf-8 -*-
+""" """
+
+__author__ = "Joshua Ismael Haase Hernández <hahj87@gmail.com>"
+__version__ = "$Revision: 1.1 $"
+__date__ = "$Date: 2011/02/08 $"
+__copyright__ = "Copyright (c) 2011 Joshua Ismael Haase Hernández"
+__license__ = "GPL3+"
+
+from repm.config import *
+from repm.filter import *
+import unittest
+
+class pkginfo_from_file_KnownValues(unittest.TestCase):
+ # (filename, name, version, release, arch)
+ # filename is location
+ known=(
+ ("community-testing/os/i686/inputattach-1.24-3-i686.pkg.tar.xz","inputattach","1.24","3","i686"),
+ ("community-testing/os/i686/ngspice-22-1-i686.pkg.tar.xz","ngspice","22","1","i686"),
+ ("community-testing/os/i686/tmux-1.4-2-i686.pkg.tar.xz","tmux","1.4","2","i686"),
+ ("community-testing/os/i686/tor-0.2.1.29-2-i686.pkg.tar.xz","tor","0.2.1.29","2","i686"),
+ ("../../../pool/community/tor-0.2.1.29-2-i686.pkg.tar.xz","tor","0.2.1.29","2","i686"),
+ ("community-testing/os/x86_64/inputattach-1.24-3-x86_64.pkg.tar.xz","inputattach","1.24","3","x86_64"),
+ ("../../../pool/community/inputattach-1.24-3-x86_64.pkg.tar.xz","inputattach","1.24","3","x86_64"),
+ ("tor-0.2.1.29-2-x86_64.pkg.tar.xz","tor","0.2.1.29","2","x86_64"),
+ )
+
+ def generate_results(self, example_tuple, attr):
+ location, name, version, release, arch = example_tuple
+ return pkginfo_from_filename(location)[attr], locals()[attr]
+
+ def testReturnPackageObject(self):
+ for i in self.known:
+ location, name, version, release, arch = i
+ self.assertIsInstance(pkginfo_from_filename(location),Package)
+
+ def testNames(self):
+ for i in self.known:
+ k,v = self.generate_results(example_tuple=i,attr="name")
+ self.assertEqual(k, v)
+
+ def testVersions(self):
+ for i in self.known:
+ k,v = self.generate_results(example_tuple=i,attr="version")
+ self.assertEqual(k, v)
+
+ def testArchs(self):
+ for i in self.known:
+ k,v = self.generate_results(example_tuple=i,attr="arch")
+ self.assertEqual(k, v)
+
+ def testReleases(self):
+ for i in self.known:
+ k,v = self.generate_results(example_tuple=i,attr="release")
+ self.assertEqual(k, v)
+
+ def testLocations(self):
+ for i in self.known:
+ k,v = self.generate_results(example_tuple=i,attr="location")
+ self.assertEqual(k, v)
+
+class pkginfo_from_file_BadInput(unittest.TestCase):
+ bad=("community-testing/os/i686/community-testing.db",
+ "community-testing/os/i686/community-testing.db.tar.gz",
+ "community-testing/os/i686/community-testing.db.tar.gz.old",
+ "community-testing/os/i686/community-testing.files",
+ "community-testing/os/i686/community-testing.files.tar.gz",
+ "community-testing/os/x86_64")
+
+ def testBadInput(self):
+ for i in self.bad:
+ self.assertRaises(NonValidFile,pkginfo_from_filename,i)
+
+class pkginfoFromRsyncOutput(unittest.TestCase):
+ example_package_list=(Package(),Package(),Package())
+ example_package_list[0].package_info={ "name" : "alex",
+ "version" : "2.3.4",
+ "release" : "1",
+ "arch" : "i686",
+ "license" : False,
+ "location": "community-staging/os/i686/alex-2.3.4-1-i686.pkg.tar.xz",
+ "depends" : False,}
+ example_package_list[1].package_info={ "name" : "any2dvd",
+ "version" : "0.34",
+ "release" : "4",
+ "arch" : "any",
+ "license" : False,
+ "location": "community/os/any/any2dvd-0.34-4-any.pkg.tar.xz",
+ "depends" : False,}
+ example_package_list[2].package_info={ "name" : "gmime22",
+ "version" : "2.2.26",
+ "release" : "1",
+ "arch" : "x86_64",
+ "license" : False,
+ "location": "community/os/x86_64/gmime22-2.2.26-1-x86_64.pkg.tar.xz",
+ "depends" : False,}
+
+ try:
+ output_file = open("rsync_output_sample")
+ rsync_out= output_file.read()
+ output_file.close()
+ except IOError: print("There is no rsync_output_sample file")
+
+ pkglist = pkginfo_from_rsync_output(rsync_out)
+
+ def testOutputArePackages(self):
+ if not self.pkglist:
+ self.fail("not pkglist:" + str(self.pkglist))
+ for pkg in self.pkglist:
+ self.assertIsInstance(pkg,Package)
+
+ def testPackageInfo(self):
+ if not self.pkglist:
+ self.fail("Pkglist doesn't exist: " + str(self.pkglist))
+ self.assertEqual(self.pkglist,self.example_package_list)
+
+class generateRsyncBlacklist(unittest.TestCase):
+ example_package_list=(Package(),Package(),Package())
+ example_package_list[0].package_info={ "name" : "alex",
+ "version" : "2.3.4",
+ "release" : "1",
+ "arch" : "i686",
+ "license" : False,
+ "location": "community-staging/os/i686/alex-2.3.4-1-i686.pkg.tar.xz",
+ "depends" : False,}
+ example_package_list[1].package_info={ "name" : "any2dvd",
+ "version" : "0.34",
+ "release" : "4",
+ "arch" : "any",
+ "license" : False,
+ "location": "community/os/any/any2dvd-0.34-4-any.pkg.tar.xz",
+ "depends" : False,}
+ example_package_list[2].package_info={ "name" : "gmime22",
+ "version" : "2.2.26",
+ "release" : "1",
+ "arch" : "x86_64",
+ "license" : False,
+ "location": "community/os/x86_64/gmime22-2.2.26-1-x86_64.pkg.tar.xz",
+ "depends" : False,}
+
+ def testListado(self):
+ self.assertEqual(listado("blacklist_sample"),["alex","gmime22"])
+
+ def testExcludeFiles(self):
+ a=rsyncBlacklist_from_blacklist(self.example_package_list,
+ listado("blacklist_sample"),
+ False)
+ b=[self.example_package_list[0]["location"],self.example_package_list[2]["location"]]
+ self.assertEqual(a,b)
+
+class pkginfo_from_descKnownValues(unittest.TestCase):
+ pkgsample=Package()
+ pkgsample.package_info={"name" : "binutils",
+ "version" : "2.21",
+ "release" : "4",
+ "arch" : "x86_64",
+ "license" : "GPL",
+ "location": "binutils-2.21-4-x86_64.pkg.tar.xz",
+ "depends" : False,}
+ fsock=open("desc")
+ pkggen=pkginfo_from_desc(fsock.read())
+ fsock.close()
+ def testPkginfoFromDesc(self):
+ if self.pkggen is None:
+ self.fail("return value is None")
+ self.assertEqual(self.pkgsample,self.pkggen)
+
+class pkginfo_from_db(unittest.TestCase):
+ archdb = os.path.join("./workdir")
+ example_package_list=(Package(),Package(),Package())
+ example_package_list[0].package_info={ "name" : "acl",
+ "version" : "2.2.49",
+ "release" : "2",
+ "arch" : "x86_64",
+ "license" : ("LGPL",),
+ "location": "acl-2.2.49-2-x86_64.pkg.tar.xz",
+ "depends" : ("attr>=2.4.41"),}
+ example_package_list[1].package_info={ "name" : "glibc",
+ "version" : "2.13",
+ "release" : "4",
+ "arch" : "x86_64",
+ "license" : ("GPL","LGPL"),
+ "location": "glibc-2.13-4-x86_64.pkg.tar.xz",
+ "depends" : ("linux-api-headers>=2.6.37","tzdata",),}
+ example_package_list[2].package_info={ "name" : "",
+ "version" : "2.2.26",
+ "release" : "1",
+ "arch" : "x86_64",
+ "license" : False,
+ "location": "",
+ "depends" : False,}
+
+
+if __name__ == "__main__":
+ unittest.main()
+