summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorLuke Shumaker <lukeshu@sbcglobal.net>2016-04-17 10:45:45 -0400
committerLuke Shumaker <lukeshu@sbcglobal.net>2016-04-17 10:45:45 -0400
commita79d7c7251f4e6fd2f8b288b5c91a77a66ecff65 (patch)
treef44145d6cda8bb20070edf1ce464ad60b6bf3a04
parentf73a4a8844641b780644b8b96865372dc34936bd (diff)
parent752bd700e9da464006bfbd9877cc858dfad545ba (diff)
Merge branch 'lukeshu/xbs' into testmerge
# Conflicts: # test/lib/common.inc # test/test.d/create-filelists.sh # test/test.d/db-move.sh # test/test.d/db-remove.sh # test/test.d/db-repo-add.sh # test/test.d/db-repo-remove.sh # test/test.d/db-update.sh # test/test.d/ftpdir-cleanup.sh # test/test.d/sourceballs.sh
-rw-r--r--.gitignore6
-rw-r--r--COPYING674
-rw-r--r--TODO10
-rw-r--r--config33
-rw-r--r--config.local.import-community5
-rw-r--r--config.local.import-packages5
-rw-r--r--config.local.parabola14
-rwxr-xr-xcron-jobs/check_archlinux/check_packages.py2
-rwxr-xr-xcron-jobs/check_archlinux/parse_pkgbuilds.sh64
-rwxr-xr-xcron-jobs/devlist-mailer13
-rwxr-xr-xcron-jobs/ftpdir-cleanup59
-rwxr-xr-xcron-jobs/integrity-check8
-rwxr-xr-xcron-jobs/make_repo_torrents70
-rw-r--r--cron-jobs/makepkg.conf121
-rwxr-xr-xcron-jobs/sourceballs76
-rwxr-xr-xcron-jobs/update-web-db78
l---------cron-jobs/update-web-files-db1
-rwxr-xr-xdb-check-package-libraries.py193
-rw-r--r--db-functions295
-rwxr-xr-xdb-import288
-rw-r--r--db-import.conf23
-rwxr-xr-xdb-init6
-rwxr-xr-xdb-list-nonfree.py28
-rwxr-xr-xdb-list-unsigned-packages38
-rwxr-xr-xdb-list-unsigned-packages.py96
-rwxr-xr-xdb-move112
-rwxr-xr-xdb-pick-mirror25
-rwxr-xr-xdb-remove49
-rwxr-xr-xdb-repo-add30
-rwxr-xr-xdb-repo-remove28
-rwxr-xr-xdb-update98
-rwxr-xr-xmake_individual_torrent52
-rw-r--r--test/__init__.py0
-rw-r--r--test/blacklist_sample2
-rw-r--r--test/core.db.tar.gzbin0 -> 1345 bytes
-rw-r--r--test/depends4
-rw-r--r--test/desc39
-rw-r--r--test/lib/common.inc41
-rw-r--r--test/rsync_output_sample14
-rwxr-xr-xtest/test.d/create-filelists.sh12
-rwxr-xr-xtest/test.d/db-update.sh10
-rwxr-xr-xtest/test.d/testing2x.sh2
-rw-r--r--test/test_filter.py196
-rwxr-xr-xtesting2x61
44 files changed, 2233 insertions, 748 deletions
diff --git a/.gitignore b/.gitignore
index f47d96a..98a5228 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,9 @@
*~
+*.pyc
/config.local
test/packages/*/*.pkg.tar.?z
+\#*#
+.#*
+yftime
+src*
+pkg*
diff --git a/COPYING b/COPYING
new file mode 100644
index 0000000..94a9ed0
--- /dev/null
+++ b/COPYING
@@ -0,0 +1,674 @@
+ GNU GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users. We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors. You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights. Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received. You must make sure that they, too, receive
+or can get the source code. And you must show them these terms so they
+know their rights.
+
+ Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+ For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software. For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+ Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so. This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software. The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable. Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products. If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+ Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary. To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Use with the GNU Affero General Public License.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+ <one line to give the program's name and a brief idea of what it does.>
+ Copyright (C) <year> <name of author>
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+ <program> Copyright (C) <year> <name of author>
+ This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, your program's commands
+might be different; for a GUI interface, you would use an "about box".
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU GPL, see
+<http://www.gnu.org/licenses/>.
+
+ The GNU General Public License does not permit incorporating your program
+into proprietary programs. If your program is a subroutine library, you
+may consider it more useful to permit linking proprietary applications with
+the library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License. But first, please read
+<http://www.gnu.org/philosophy/why-not-lgpl.html>.
diff --git a/TODO b/TODO
new file mode 100644
index 0000000..9dd4b52
--- /dev/null
+++ b/TODO
@@ -0,0 +1,10 @@
+* Test Suite for clean_repo.py
+
+ - Review all repo
+ - Remove all blacklisted packages
+ - Get pending list right
+ - Extract licenses all right
+
+* Fix db-move
+
+ - Make it use abslibre
diff --git a/config b/config
index 3df6c95..bd05ac8 100644
--- a/config
+++ b/config
@@ -1,18 +1,22 @@
-FTP_BASE="/srv/ftp"
-SVNREPO=''
-SVNUSER=''
+#!/hint/bash
+
+case "$USER" in
+ db-import-packages) _name=import-packages;;
+ db-import-community) _name=import-community;;
+ *) _name=parabola;;
+esac
+
+FTP_BASE="/srv/repo/main"
PKGREPOS=()
PKGPOOL=''
SRCPOOL=''
-TESTING_REPO=''
-STABLE_REPOS=()
-CLEANUP_DESTDIR="/var/tmp"
+CLEANUP_DESTDIR="/srv/repo/private/${_name}/package-cleanup"
CLEANUP_DRYRUN=false
# Time in days to keep moved packages
CLEANUP_KEEP=30
-SOURCE_CLEANUP_DESTDIR="/var/tmp"
+SOURCE_CLEANUP_DESTDIR="/srv/repo/private/${_name}/source-cleanup"
SOURCE_CLEANUP_DRYRUN=false
# Time in days to keep moved sourcepackages
SOURCE_CLEANUP_KEEP=14
@@ -22,8 +26,8 @@ REQUIRE_SIGNATURE=true
LOCK_DELAY=10
LOCK_TIMEOUT=300
-STAGING="$HOME/staging"
-TMPDIR="/var/tmp"
+[ -n "${STAGING:-}" ] || STAGING="$HOME/staging/unknown/staging"
+export TMPDIR="${TMPDIR:-/tmp}"
ARCHES=(i686 x86_64)
DBEXT=".db.tar.gz"
FILESEXT=".files.tar.gz"
@@ -31,7 +35,14 @@ PKGEXT=".pkg.tar.?z"
SRCEXT=".src.tar.gz"
# Allowed licenses: get sourceballs only for licenses in this array
-ALLOWED_LICENSES=('GPL' 'GPL1' 'GPL2' 'LGPL' 'LGPL1' 'LGPL2' 'LGPL2.1')
+# Empty (commented out) to get sourceballs for all packages
+#ALLOWED_LICENSES=('GPL' 'GPL1' 'GPL2' 'LGPL' 'LGPL1' 'LGPL2' 'LGPL2.1')
+
+# Where to send error emails, and who they are from
+LIST="dev@lists.parabola.nu"
+FROM="dbscripts+${_name}@$(hostname -f)"
# Override default config with config.local
-[ -f "$(dirname ${BASH_SOURCE[0]})/config.local" ] && . "$(dirname ${BASH_SOURCE[0]})/config.local"
+[ -f "$(dirname "${BASH_SOURCE[0]}")/config.local" ] && . "$(dirname "${BASH_SOURCE[0]}")/config.local"
+[ -f "$(dirname "${BASH_SOURCE[0]}")/config.local.${_name}" ] && . "$(dirname "${BASH_SOURCE[0]}")/config.local.${_name}"
+unset _name
diff --git a/config.local.import-community b/config.local.import-community
new file mode 100644
index 0000000..393fd57
--- /dev/null
+++ b/config.local.import-community
@@ -0,0 +1,5 @@
+#!/hint/bash
+
+PKGREPOS=({community,multilib}{,-testing,-staging})
+PKGPOOL='pool/community'
+SRCPOOL='sources/community'
diff --git a/config.local.import-packages b/config.local.import-packages
new file mode 100644
index 0000000..c699b28
--- /dev/null
+++ b/config.local.import-packages
@@ -0,0 +1,5 @@
+#!/hint/bash
+
+PKGREPOS=('core' 'extra' 'testing' 'staging' {kde,gnome}-unstable)
+PKGPOOL='pool/packages'
+SRCPOOL='sources/packages'
diff --git a/config.local.parabola b/config.local.parabola
new file mode 100644
index 0000000..648dfbb
--- /dev/null
+++ b/config.local.parabola
@@ -0,0 +1,14 @@
+#!/hint/bash
+
+PKGREPOS=(
+ # Main repos
+ libre{,-testing}
+ libre-multilib{,-testing}
+ # Community project repos
+ {nonsystemd,nonprism}{,-testing}
+ pcr kernels cross java
+ # User repos
+ '~smv' '~xihh' '~brendan' '~lukeshu' '~emulatorman' '~aurelien' '~jorginho' '~coadde' '~drtan'
+)
+PKGPOOL='pool/parabola'
+SRCPOOL='sources/parabola'
diff --git a/cron-jobs/check_archlinux/check_packages.py b/cron-jobs/check_archlinux/check_packages.py
index d233bf6..ac0194f 100755
--- a/cron-jobs/check_archlinux/check_packages.py
+++ b/cron-jobs/check_archlinux/check_packages.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/env python2
#
# check_archlinux.py
#
diff --git a/cron-jobs/check_archlinux/parse_pkgbuilds.sh b/cron-jobs/check_archlinux/parse_pkgbuilds.sh
index 3f92169..b857ac8 100755
--- a/cron-jobs/check_archlinux/parse_pkgbuilds.sh
+++ b/cron-jobs/check_archlinux/parse_pkgbuilds.sh
@@ -6,23 +6,23 @@
exit() { return; }
splitpkg_overrides=('depends' 'optdepends' 'provides' 'conflicts')
-variables=('pkgname' 'pkgbase' 'epoch' 'pkgver' 'pkgrel' 'makedepends' 'arch' ${splitpkg_overrides[@]})
+variables=('pkgname' 'pkgbase' 'epoch' 'pkgver' 'pkgrel' 'makedepends' 'arch' "${splitpkg_overrides[@]}")
readonly -a variables splitpkg_overrides
backup_package_variables() {
- for var in ${splitpkg_overrides[@]}; do
+ for var in "${splitpkg_overrides[@]}"; do
indirect="${var}_backup"
- eval "${indirect}=(\${$var[@]})"
+ eval "${indirect}=(\"\${$var[@]}\")"
done
}
restore_package_variables() {
- for var in ${splitpkg_overrides[@]}; do
+ for var in "${splitpkg_overrides[@]}"; do
indirect="${var}_backup"
if [ -n "${!indirect}" ]; then
- eval "${var}=(\${$indirect[@]})"
+ eval "${var}=(\"\${$indirect[@]}\")"
else
- unset ${var}
+ unset "${var}"
fi
done
}
@@ -42,31 +42,31 @@ print_info() {
if [ -n "$arch" ]; then
echo "%ARCH%"
- for i in ${arch[@]}; do echo $i; done
+ for i in "${arch[@]}"; do echo "$i"; done
echo ""
fi
if [ -n "$depends" ]; then
echo "%DEPENDS%"
- for i in ${depends[@]}; do
- echo $i
+ for i in "${depends[@]}"; do
+ echo "$i"
done
echo ""
fi
if [ -n "$makedepends" ]; then
echo "%MAKEDEPENDS%"
- for i in ${makedepends[@]}; do
- echo $i
+ for i in "${makedepends[@]}"; do
+ echo "$i"
done
echo ""
fi
if [ -n "$conflicts" ]; then
echo "%CONFLICTS%"
- for i in ${conflicts[@]}; do echo $i; done
+ for i in "${conflicts[@]}"; do echo "$i"; done
echo ""
fi
if [ -n "$provides" ]; then
echo "%PROVIDES%"
- for i in ${provides[@]}; do echo $i; done
+ for i in "${provides[@]}"; do echo "$i"; done
echo ""
fi
}
@@ -75,10 +75,10 @@ source_pkgbuild() {
ret=0
dir=$1
pkgbuild=$dir/PKGBUILD
- for var in ${variables[@]}; do
- unset ${var}
+ for var in "${variables[@]}"; do
+ unset "${var}"
done
- source $pkgbuild &>/dev/null || ret=$?
+ source "$pkgbuild" &>/dev/null || ret=$?
# ensure $pkgname and $pkgver variables were found
if [ $ret -ne 0 -o -z "$pkgname" -o -z "$pkgver" ]; then
@@ -88,8 +88,8 @@ source_pkgbuild() {
if [ "${#pkgname[@]}" -gt "1" ]; then
pkgbase=${pkgbase:-${pkgname[0]}}
- for pkg in ${pkgname[@]}; do
- if [ "$(type -t package_${pkg})" != "function" ]; then
+ for pkg in "${pkgname[@]}"; do
+ if [ "$(type -t "package_${pkg}")" != "function" ]; then
echo -e "%INVALID%\n$pkgbuild\n"
return 1
else
@@ -98,13 +98,13 @@ source_pkgbuild() {
while IFS= read -r line; do
var=${line%%=*}
var="${var#"${var%%[![:space:]]*}"}" # remove leading whitespace characters
- for realvar in ${variables[@]}; do
+ for realvar in "${variables[@]}"; do
if [ "$var" == "$realvar" ]; then
eval $line
break
fi
done
- done < <(type package_${pkg})
+ done < <(type "package_${pkg}")
print_info
restore_package_variables
fi
@@ -118,20 +118,20 @@ source_pkgbuild() {
}
find_pkgbuilds() {
- #Skip over some dirs
- local d="${1##*/}"
- if [ "$d" = "CVS" -o "$d" = ".svn" ]; then
- return
- fi
-
- if [ -f $1/PKGBUILD ]; then
- source_pkgbuild $1
+ #Skip over some dirs
+ local d="${1##*/}"
+ if [ "$d" = "CVS" -o "$d" = ".svn" ]; then
+ return
+ fi
+
+ if [ -f "$1/PKGBUILD" ]; then
+ source_pkgbuild "$1"
return
fi
empty=1
- for dir in $1/*; do
- if [ -d $dir ]; then
- find_pkgbuilds $dir
+ for dir in "$1"/*; do
+ if [ -d "$dir" ]; then
+ find_pkgbuilds "$dir"
unset empty
fi
done
@@ -147,7 +147,7 @@ fi
CARCH=$1
shift
for dir in "$@"; do
- find_pkgbuilds $dir
+ find_pkgbuilds "$dir"
done
exit 0
diff --git a/cron-jobs/devlist-mailer b/cron-jobs/devlist-mailer
index ca2e46b..7f298b9 100755
--- a/cron-jobs/devlist-mailer
+++ b/cron-jobs/devlist-mailer
@@ -2,27 +2,26 @@
#Dummy helper to send email to arch-dev
# It does nothing if no output
-LIST="arch-dev-public@archlinux.org"
-#LIST="aaronmgriffin@gmail.com"
-FROM="repomaint@archlinux.org"
+# Load $LIST and $FROM from the config file
+. "$(dirname "$(readlink -e "$0")")/../config"
SUBJECT="Repository Maintenance $(date +"%d-%m-%Y")"
if [ $# -ge 1 ]; then
- SUBJECT="$1 $(date +"%d-%m-%Y")"
+ SUBJECT="$1 $(date +"%d-%m-%Y")"
fi
if [ $# -ge 2 ]; then
- LIST="$2"
+ LIST="$2"
fi
stdin="$(cat)"
#echo used to strip whitespace for checking for actual data
if [ -n "$(echo $stdin)" ]; then
-echo "Subject: $SUBJECT
+ echo "Subject: $SUBJECT
To: $LIST
From: $FROM
-$stdin" | /usr/sbin/sendmail -F$FROM "$LIST"
+$stdin" | /usr/sbin/sendmail -F"$FROM" "$LIST"
fi
diff --git a/cron-jobs/ftpdir-cleanup b/cron-jobs/ftpdir-cleanup
index e1294bd..4063c09 100755
--- a/cron-jobs/ftpdir-cleanup
+++ b/cron-jobs/ftpdir-cleanup
@@ -1,22 +1,13 @@
#!/bin/bash
-. "$(dirname $0)/../config"
-. "$(dirname $0)/../db-functions"
-
-# just like mv -f, but we touch the file and then copy the content so
-# default ACLs in the target dir will be applied
-mv_acl() {
- rm -f "$2"
- touch "$2"
- cat "$1" >"$2" || return 1
- rm -f "$1"
-}
+. "$(dirname "$(readlink -e "$0")")/../config"
+. "$(dirname "$(readlink -e "$0")")/../db-functions"
clean_pkg() {
local pkg
local target
- if ! ${CLEANUP_DRYRUN}; then
+ if ! "${CLEANUP_DRYRUN}"; then
for pkg in "$@"; do
if [ -h "$pkg" ]; then
rm -f "$pkg" "$pkg.sig"
@@ -33,16 +24,16 @@ clean_pkg() {
script_lock
-for repo in ${PKGREPOS[@]}; do
- for arch in ${ARCHES[@]}; do
- repo_lock ${repo} ${arch} || exit 1
+for repo in "${PKGREPOS[@]}"; do
+ for arch in "${ARCHES[@]}"; do
+ repo_lock "${repo}" "${arch}" || exit 1
done
done
-${CLEANUP_DRYRUN} && warning 'dry run mode is active'
+"${CLEANUP_DRYRUN}" && warning 'dry run mode is active'
-for repo in ${PKGREPOS[@]}; do
- for arch in ${ARCHES[@]}; do
+for repo in "${PKGREPOS[@]}"; do
+ for arch in "${ARCHES[@]}"; do
if [ ! -f "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}" ]; then
continue
fi
@@ -53,17 +44,17 @@ for repo in ${PKGREPOS[@]}; do
missing_pkgs=($(comm -13 "${WORKDIR}/repo-${repo}-${arch}" "${WORKDIR}/db-${repo}-${arch}"))
if [ ${#missing_pkgs[@]} -ge 1 ]; then
- error "Missing packages in [${repo}] (${arch})..."
- for missing_pkg in ${missing_pkgs[@]}; do
- msg2 "${missing_pkg}"
+ error "Missing packages in [%s] (%s)..." "${repo}" "${arch}"
+ for missing_pkg in "${missing_pkgs[@]}"; do
+ msg2 '%s' "${missing_pkg}"
done
fi
old_pkgs=($(comm -23 "${WORKDIR}/repo-${repo}-${arch}" "${WORKDIR}/db-${repo}-${arch}"))
if [ ${#old_pkgs[@]} -ge 1 ]; then
- msg "Removing old packages from [${repo}] (${arch})..."
- for old_pkg in ${old_pkgs[@]}; do
- msg2 "${old_pkg}"
+ msg "Removing old packages from [%s] (%s)..." "${repo}" "${arch}"
+ for old_pkg in "${old_pkgs[@]}"; do
+ msg2 '%s' "${old_pkg}"
clean_pkg "${FTP_BASE}/${repo}/os/${arch}/${old_pkg}"
done
fi
@@ -73,32 +64,32 @@ done
# get a list of all available packages in the pacakge pool
find "$FTP_BASE/${PKGPOOL}" -name "*${PKGEXT}" -printf '%f\n' | sort > "${WORKDIR}/pool"
# create a list of packages in our db
-cat "${WORKDIR}/db-"* | sort -u > "${WORKDIR}/db"
+find "${WORKDIR}" -maxdepth 1 -type f -name 'db-*' -exec cat {} \; | sort -u > "${WORKDIR}/db"
old_pkgs=($(comm -23 "${WORKDIR}/pool" "${WORKDIR}/db"))
if [ ${#old_pkgs[@]} -ge 1 ]; then
msg "Removing old packages from package pool..."
- for old_pkg in ${old_pkgs[@]}; do
- msg2 "${old_pkg}"
+ for old_pkg in "${old_pkgs[@]}"; do
+ msg2 '%s' "${old_pkg}"
clean_pkg "$FTP_BASE/${PKGPOOL}/${old_pkg}"
done
fi
-old_pkgs=($(find ${CLEANUP_DESTDIR} -type f -name "*${PKGEXT}" -mtime +${CLEANUP_KEEP} -printf '%f\n'))
+old_pkgs=($(find "${CLEANUP_DESTDIR}" -type f -name "*${PKGEXT}" -mtime +"${CLEANUP_KEEP}" -printf '%f\n'))
if [ ${#old_pkgs[@]} -ge 1 ]; then
msg "Removing old packages from the cleanup directory..."
- for old_pkg in ${old_pkgs[@]}; do
- msg2 "${old_pkg}"
- if ! ${CLEANUP_DRYRUN}; then
+ for old_pkg in "${old_pkgs[@]}"; do
+ msg2 '%s' "${old_pkg}"
+ if ! "${CLEANUP_DRYRUN}"; then
rm -f "${CLEANUP_DESTDIR}/${old_pkg}"
rm -f "${CLEANUP_DESTDIR}/${old_pkg}.sig"
fi
done
fi
-for repo in ${PKGREPOS[@]}; do
- for arch in ${ARCHES[@]}; do
- repo_unlock ${repo} ${arch}
+for repo in "${PKGREPOS[@]}"; do
+ for arch in "${ARCHES[@]}"; do
+ repo_unlock "${repo}" "${arch}"
done
done
diff --git a/cron-jobs/integrity-check b/cron-jobs/integrity-check
index f6c26cf..7459380 100755
--- a/cron-jobs/integrity-check
+++ b/cron-jobs/integrity-check
@@ -1,6 +1,6 @@
#!/bin/bash
-dirname="$(dirname $0)"
+dirname="$(dirname "$(readlink -e "$0")")"
. "${dirname}/../config"
. "${dirname}/../db-functions"
@@ -8,17 +8,17 @@ dirname="$(dirname $0)"
script_lock
if [ $# -ne 1 ]; then
- die "usage: ${0##*/} <mailto>"
+ die "usage: %s <mailto>" "${0##*/}"
fi
mailto=$1
check() {
- ${dirname}/check_archlinux/check_packages.py \
+ "${dirname}"/check_archlinux/check_packages.py \
--repos="${repos}" \
--abs-tree="/srv/abs/rsync/${arch},/srv/abs/rsync/any" \
--repo-dir="${FTP_BASE}" \
--arch="${arch}" \
- 2>&1 | ${dirname}/devlist-mailer "Integrity Check ${arch}: ${repos}" "${mailto}"
+ 2>&1 | "${dirname}"/devlist-mailer "Integrity Check ${arch}: ${repos}" "${mailto}"
}
repos='core,extra,community'
diff --git a/cron-jobs/make_repo_torrents b/cron-jobs/make_repo_torrents
new file mode 100755
index 0000000..2eb0978
--- /dev/null
+++ b/cron-jobs/make_repo_torrents
@@ -0,0 +1,70 @@
+#!/bin/bash
+# Copyright (C) 2014 Joseph Graham <joseph@t67.eu>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+# This script finds any updated packages and calls
+# `make_indivudual_torrent' for each of them.
+
+username=$( id -un )
+
+case "${username}" in
+ repo | root )
+ true
+ ;;
+ * )
+ echo "This script must be run as repo user or root user."
+ echo "ByeBye!"
+ exit 1
+ ;;
+esac
+
+# pacman doesn't support multiple different packages of the same name,
+# so it's OK to just stuff all the torrents into a single directory.
+script_directory="$(dirname "$(readlink -e "$0")")/.."
+. "$(dirname "$(readlink -e "$0")")/../config"
+public_location="$FTP_BASE/"
+torrent_location="$FTP_BASE/torrents/"
+
+cd "${torrent_location}"
+
+# Find any directories that might have packages in then
+find "${public_location}" -name 'os' -type 'd' |
+while read dir
+do
+ # Find any packages
+ find "${dir}" -name '*\.pkg\.tar\.xz' |
+ while read pkg
+ do
+ pkg_name="${pkg##*/}"
+
+ if [[ -h "${pkg}" ]] # check if it's a symbolic link
+ then
+ # We get the target of the symlink
+ pkg=$( readlink -f "${pkg}" )
+ fi
+
+ # If a .torrent file does not already exist for this package, we call
+ # `make_individual_torrent' to make it.
+ if ! [[ -f "${torrent_location}${pkg_name}.torrent" ]]
+ then
+ "$script_directory/make_individual_torrent" "${pkg}" "${public_location}"
+ fi
+ done
+done
+
+if [[ "${username}" == root ]]
+then
+ chown repo *
+fi
diff --git a/cron-jobs/makepkg.conf b/cron-jobs/makepkg.conf
deleted file mode 100644
index 603edba..0000000
--- a/cron-jobs/makepkg.conf
+++ /dev/null
@@ -1,121 +0,0 @@
-#
-# /etc/makepkg.conf
-#
-
-#########################################################################
-# SOURCE ACQUISITION
-#########################################################################
-#
-#-- The download utilities that makepkg should use to acquire sources
-# Format: 'protocol::agent'
-DLAGENTS=('ftp::/usr/bin/curl -sS -fC - --ftp-pasv --retry 3 --retry-delay 3 -o %o %u'
- 'http::/usr/bin/curl -sS -fLC - --retry 3 --retry-delay 3 -o %o %u'
- 'https::/usr/bin/curl -sS -fLC - --retry 3 --retry-delay 3 -o %o %u'
- 'rsync::/usr/bin/rsync -q --no-motd -z %u %o'
- 'scp::/usr/bin/scp -q -C %u %o')
-
-# Other common tools:
-# /usr/bin/snarf
-# /usr/bin/lftpget -c
-# /usr/bin/wget
-
-#########################################################################
-# ARCHITECTURE, COMPILE FLAGS
-#########################################################################
-#
-CARCH="x86_64"
-CHOST="x86_64-unknown-linux-gnu"
-
-#-- Compiler and Linker Flags
-# -march (or -mcpu) builds exclusively for an architecture
-# -mtune optimizes for an architecture, but builds for whole processor family
-CFLAGS="-march=x86-64 -mtune=generic -O2 -pipe -fstack-protector --param=ssp-buffer-size=4 -D_FORTIFY_SOURCE=2"
-CXXFLAGS="-march=x86-64 -mtune=generic -O2 -pipe -fstack-protector --param=ssp-buffer-size=4 -D_FORTIFY_SOURCE=2"
-LDFLAGS="-Wl,-O1,--sort-common,--as-needed,-z,relro,--hash-style=gnu"
-#-- Make Flags: change this for DistCC/SMP systems
-#MAKEFLAGS="-j2"
-
-#########################################################################
-# BUILD ENVIRONMENT
-#########################################################################
-#
-# Defaults: BUILDENV=(fakeroot !distcc color !ccache check !sign)
-# A negated environment option will do the opposite of the comments below.
-#
-#-- fakeroot: Allow building packages as a non-root user
-#-- distcc: Use the Distributed C/C++/ObjC compiler
-#-- color: Colorize output messages
-#-- ccache: Use ccache to cache compilation
-#-- check: Run the check() function if present in the PKGBUILD
-#-- sign: Generate PGP signature file
-#
-BUILDENV=(fakeroot !distcc color !ccache check !sign)
-#
-#-- If using DistCC, your MAKEFLAGS will also need modification. In addition,
-#-- specify a space-delimited list of hosts running in the DistCC cluster.
-#DISTCC_HOSTS=""
-#
-#-- Specify a directory for package building.
-#BUILDDIR=/tmp/makepkg
-
-#########################################################################
-# GLOBAL PACKAGE OPTIONS
-# These are default values for the options=() settings
-#########################################################################
-#
-# Default: OPTIONS=(strip docs libtool emptydirs zipman purge !upx)
-# A negated option will do the opposite of the comments below.
-#
-#-- strip: Strip symbols from binaries/libraries
-#-- docs: Save doc directories specified by DOC_DIRS
-#-- libtool: Leave libtool (.la) files in packages
-#-- emptydirs: Leave empty directories in packages
-#-- zipman: Compress manual (man and info) pages in MAN_DIRS with gzip
-#-- purge: Remove files specified by PURGE_TARGETS
-#-- upx: Compress binary executable files using UPX
-#
-OPTIONS=(strip docs libtool emptydirs zipman purge !upx)
-
-#-- File integrity checks to use. Valid: md5, sha1, sha256, sha384, sha512
-INTEGRITY_CHECK=(md5)
-#-- Options to be used when stripping binaries. See `man strip' for details.
-STRIP_BINARIES="--strip-all"
-#-- Options to be used when stripping shared libraries. See `man strip' for details.
-STRIP_SHARED="--strip-unneeded"
-#-- Options to be used when stripping static libraries. See `man strip' for details.
-STRIP_STATIC="--strip-debug"
-#-- Manual (man and info) directories to compress (if zipman is specified)
-MAN_DIRS=({usr{,/local}{,/share},opt/*}/{man,info})
-#-- Doc directories to remove (if !docs is specified)
-DOC_DIRS=(usr/{,local/}{,share/}{doc,gtk-doc} opt/*/{doc,gtk-doc})
-#-- Files to be removed from all packages (if purge is specified)
-PURGE_TARGETS=(usr/{,share}/info/dir .packlist *.pod)
-
-#########################################################################
-# PACKAGE OUTPUT
-#########################################################################
-#
-# Default: put built package and cached source in build directory
-#
-#-- Destination: specify a fixed directory where all packages will be placed
-#PKGDEST=/home/packages
-#-- Source cache: specify a fixed directory where source files will be cached
-#SRCDEST=/home/sources
-#-- Source packages: specify a fixed directory where all src packages will be placed
-#SRCPKGDEST=/home/srcpackages
-#-- Packager: name/email of the person or organization building packages
-#PACKAGER="John Doe <john@doe.com>"
-#-- Specify a key to use for package signing
-#GPGKEY=""
-
-#########################################################################
-# EXTENSION DEFAULTS
-#########################################################################
-#
-# WARNING: Do NOT modify these variables unless you know what you are
-# doing.
-#
-PKGEXT='.pkg.tar.xz'
-SRCEXT='.src.tar.gz'
-
-# vim: set ft=sh ts=2 sw=2 et:
diff --git a/cron-jobs/sourceballs b/cron-jobs/sourceballs
index 6f75ccc..c02912a 100755
--- a/cron-jobs/sourceballs
+++ b/cron-jobs/sourceballs
@@ -1,15 +1,15 @@
#!/bin/bash
-dirname="$(dirname $(readlink -e $0))"
+dirname="$(dirname "$(readlink -e "$0")")"
. "${dirname}/../config"
. "${dirname}/../db-functions"
pushd "${WORKDIR}" >/dev/null
script_lock
-for repo in ${PKGREPOS[@]}; do
- for arch in ${ARCHES[@]}; do
- repo_lock ${repo} ${arch} || exit 1
+for repo in "${PKGREPOS[@]}"; do
+ for arch in "${ARCHES[@]}"; do
+ repo_lock "${repo}" "${arch}" || exit 1
done
done
@@ -18,8 +18,8 @@ renice +10 -p $$ > /dev/null
# Create a readable file for each repo with the following format
# <pkgbase|pkgname> <pkgver>-<pkgrel> <arch> <license>[ <license>]
-for repo in ${PKGREPOS[@]}; do
- for arch in ${ARCHES[@]}; do
+for repo in "${PKGREPOS[@]}"; do
+ for arch in "${ARCHES[@]}"; do
# Repo does not exist; skip it
if [ ! -f "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}" ]; then
continue
@@ -39,9 +39,9 @@ for repo in ${PKGREPOS[@]}; do
done | sort -u > "${WORKDIR}/db-${repo}"
done
-for repo in ${PKGREPOS[@]}; do
- for arch in ${ARCHES[@]}; do
- repo_unlock ${repo} ${arch}
+for repo in "${PKGREPOS[@]}"; do
+ for arch in "${ARCHES[@]}"; do
+ repo_unlock "${repo}" "${arch}"
done
done
@@ -49,22 +49,22 @@ done
find "${FTP_BASE}/${SRCPOOL}" -xtype f -name "*${SRCEXT}" -printf '%f\n' | sort -u > "${WORKDIR}/available-src-pkgs"
# Check for all packages if we need to build a source package
-for repo in ${PKGREPOS[@]}; do
+for repo in "${PKGREPOS[@]}"; do
newpkgs=()
failedpkgs=()
while read line; do
- pkginfo=(${line})
+ pkginfo=("${line}")
pkgbase=${pkginfo[0]}
pkgver=${pkginfo[1]}
pkgarch=${pkginfo[2]}
- pkglicense=(${pkginfo[@]:3})
+ pkglicense=("${pkginfo[@]:3}")
# Should this package be skipped?
if grep -Fqx "${pkgbase}" "${dirname}/sourceballs.skip"; then
continue
fi
# Check if the license or .force file does not enforce creating a source package
- if ! ([[ -z ${ALLOWED_LICENSES[@]} ]] || chk_license ${pkglicense[@]} || grep -Fqx "${pkgbase}" "${dirname}/sourceballs.force"); then
+ if ! ([[ -z ${ALLOWED_LICENSES[*]} ]] || chk_license "${pkglicense[@]}" || grep -Fqx "${pkgbase}" "${dirname}/sourceballs.force"); then
continue
fi
# Store the expected file name of the source package
@@ -73,29 +73,29 @@ for repo in ${PKGREPOS[@]}; do
# Build the source package if its not already there
if ! grep -Fqx "${pkgbase}-${pkgver}${SRCEXT}" "${WORKDIR}/available-src-pkgs"; then
# Check if we had failed before
- if in_array "${pkgbase}-${pkgver}${SRCEXT}" ${failedpkgs[@]}; then
+ if in_array "${pkgbase}-${pkgver}${SRCEXT}" "${failedpkgs[@]}"; then
continue
fi
- # Get the sources from svn
+ # Get the sources from xbs
mkdir -p -m0770 "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}"
- arch_svn export -q "${SVNREPO}/${pkgbase}/repos/${repo}-${pkgarch}" \
+ cp -a "$(xbs releasepath "${pkgbase}" "${repo}" "${pkgarch}")" \
"${WORKDIR}/pkgbuilds/${repo}-${pkgarch}/${pkgbase}" >/dev/null 2>&1
if [ $? -ge 1 ]; then
- failedpkgs[${#failedpkgs[*]}]="${pkgbase}-${pkgver}${SRCEXT}"
+ failedpkgs+=("${pkgbase}-${pkgver}${SRCEXT}")
continue
fi
# Build the actual source package
pushd "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}/${pkgbase}" >/dev/null
- makepkg --nocolor --allsource --ignorearch --skippgpcheck --config "${dirname}/makepkg.conf" >"${WORKDIR}/${pkgbase}.log" 2>&1
+ SRCPKGDEST=. makepkg --nocolor --allsource --ignorearch --skippgpcheck >"${WORKDIR}/${pkgbase}.log" 2>&1
if [ $? -eq 0 ] && [ -f "${pkgbase}-${pkgver}${SRCEXT}" ]; then
- mv "${pkgbase}-${pkgver}${SRCEXT}" "${FTP_BASE}/${SRCPOOL}"
+ mv_acl "${pkgbase}-${pkgver}${SRCEXT}" "${FTP_BASE}/${SRCPOOL}/${pkgbase}-${pkgver}${SRCEXT}"
# Avoid creating the same source package for every arch
echo "${pkgbase}-${pkgver}${SRCEXT}" >> "${WORKDIR}/available-src-pkgs"
- newpkgs[${#newpkgs[*]}]="${pkgbase}-${pkgver}${SRCEXT}"
+ newpkgs+=("${pkgbase}-${pkgver}${SRCEXT}")
else
- failedpkgs[${#failedpkgs[*]}]="${pkgbase}-${pkgver}${SRCEXT}"
+ failedpkgs+=("${pkgbase}-${pkgver}${SRCEXT}")
cat "${WORKDIR}/${pkgbase}.log" >> "${WORKDIR}/makepkg-fail.log"
fi
popd >/dev/null
@@ -103,42 +103,42 @@ for repo in ${PKGREPOS[@]}; do
done < "${WORKDIR}/db-${repo}"
if [ ${#newpkgs[@]} -ge 1 ]; then
- msg "Adding source packages for [${repo}]..."
- for new_pkg in ${newpkgs[@]}; do
- msg2 "${new_pkg}"
+ msg "Adding source packages for [%s]..." "${repo}"
+ for new_pkg in "${newpkgs[@]}"; do
+ msg2 '%s' "${new_pkg}"
done
fi
if [ ${#failedpkgs[@]} -ge 1 ]; then
- msg "Failed to create source packages for [${repo}]..."
- for failed_pkg in ${failedpkgs[@]}; do
- msg2 "${failed_pkg}"
+ msg "Failed to create source packages for [%s]..." "${repo}"
+ for failed_pkg in "${failedpkgs[@]}"; do
+ msg2 '%s' "${failed_pkg}"
done
fi
done
# Cleanup old source packages
-cat "${WORKDIR}/expected-src-pkgs" | sort -u > "${WORKDIR}/expected-src-pkgs.sort"
-cat "${WORKDIR}/available-src-pkgs" | sort -u > "${WORKDIR}/available-src-pkgs.sort"
+find "${WORKDIR}" -maxdepth 1 -type f -name 'expected-src-pkgs' -exec cat {} \; | sort -u > "${WORKDIR}/expected-src-pkgs.sort"
+find "${WORKDIR}" -maxdepth 1 -type f -name 'available-src-pkgs' -exec cat {} \; | sort -u > "${WORKDIR}/available-src-pkgs.sort"
old_pkgs=($(comm -23 "${WORKDIR}/available-src-pkgs.sort" "${WORKDIR}/expected-src-pkgs.sort"))
if [ ${#old_pkgs[@]} -ge 1 ]; then
msg "Removing old source packages..."
- ${SOURCE_CLEANUP_DRYRUN} && warning 'dry run mode is active'
- for old_pkg in ${old_pkgs[@]}; do
- msg2 "${old_pkg}"
- if ! ${SOURCE_CLEANUP_DRYRUN}; then
- mv "$FTP_BASE/${SRCPOOL}/${old_pkg}" "${SOURCE_CLEANUP_DESTDIR}"
+ "${SOURCE_CLEANUP_DRYRUN}" && warning 'dry run mode is active'
+ for old_pkg in "${old_pkgs[@]}"; do
+ msg2 '%s' "${old_pkg}"
+ if ! "${SOURCE_CLEANUP_DRYRUN}"; then
+ mv_acl "$FTP_BASE/${SRCPOOL}/${old_pkg}" "${SOURCE_CLEANUP_DESTDIR}/${old_pkg}"
touch "${SOURCE_CLEANUP_DESTDIR}/${old_pkg}"
fi
done
fi
-old_pkgs=($(find ${SOURCE_CLEANUP_DESTDIR} -type f -name "*${SRCEXT}" -mtime +${SOURCE_CLEANUP_KEEP} -printf '%f\n'))
+old_pkgs=($(find "${SOURCE_CLEANUP_DESTDIR}" -type f -name "*${SRCEXT}" -mtime +"${SOURCE_CLEANUP_KEEP}" -printf '%f\n'))
if [ ${#old_pkgs[@]} -ge 1 ]; then
msg "Removing old source packages from the cleanup directory..."
- for old_pkg in ${old_pkgs[@]}; do
- msg2 "${old_pkg}"
- ${SOURCE_CLEANUP_DRYRUN} || rm -f "${SOURCE_CLEANUP_DESTDIR}/${old_pkg}"
+ for old_pkg in "${old_pkgs[@]}"; do
+ msg2 '%s' "${old_pkg}"
+ "${SOURCE_CLEANUP_DRYRUN}" || rm -f "${SOURCE_CLEANUP_DESTDIR}/${old_pkg}"
done
fi
diff --git a/cron-jobs/update-web-db b/cron-jobs/update-web-db
deleted file mode 100755
index 195d1fc..0000000
--- a/cron-jobs/update-web-db
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/bin/bash
-
-. "$(dirname $0)/../config"
-. "$(dirname $0)/../db-functions"
-
-# setup paths
-SPATH="/srv/http/archweb"
-ENVPATH="/srv/http/archweb-env/bin/activate"
-
-# having "more important repos" last should make [core] trickle to the top of
-# the updates list each hour rather than being overwhelmed by big [extra] and
-# [community] updates
-REPOS=('community-testing' 'multilib-testing' 'multilib' 'community' 'extra' 'testing' 'core')
-LOGOUT="/tmp/archweb_update.log"
-
-# figure out what operation to perform
-cmd="${0##*/}"
-if [[ $cmd != "update-web-db" && $cmd != "update-web-files-db" ]]; then
- die "Invalid command name '$cmd' specified!"
-fi
-
-script_lock
-
-# run at nice 5. it can churn quite a bit of cpu after all.
-renice +5 -p $$ > /dev/null
-
-echo "$cmd: Updating DB at $(date)" >> "${LOGOUT}"
-
-# source our virtualenv if it exists
-if [ -f "$ENVPATH" ]; then
- . "$ENVPATH"
-fi
-
-case "$cmd" in
- update-web-db)
- dbfileext="${DBEXT}"
- flags=""
- ;;
- update-web-files-db)
- dbfileext="${FILESEXT}"
- flags="--filesonly"
- ;;
-esac
-
-# Lock the repos and get a copy of the db files to work on
-for repo in ${REPOS[@]}; do
- for arch in ${ARCHES[@]}; do
- repo_lock ${repo} ${arch} || exit 1
- dbfile="/srv/ftp/${repo}/os/${arch}/${repo}${dbfileext}"
- if [ -f "${dbfile}" ]; then
- mkdir -p "${WORKDIR}/${repo}/${arch}"
- cp "${dbfile}" "${WORKDIR}/${repo}/${arch}/${repo}${dbfileext}"
- fi
- repo_unlock ${repo} ${arch}
- done
-done
-
-# Run reporead on our db copy
-pushd $SPATH >/dev/null
-for repo in ${REPOS[@]}; do
- for arch in ${ARCHES[@]}; do
- dbcopy="${WORKDIR}/${repo}/${arch}/${repo}${dbfileext}"
- if [ -f "${dbcopy}" ]; then
- echo "Updating ${repo}-${arch}" >> "${LOGOUT}"
- ./manage.py reporead ${flags} ${arch} "${dbcopy}" >> "${LOGOUT}" 2>&1
- echo "" >> "${LOGOUT}"
- fi
- done
-done
-popd >/dev/null
-echo "" >> "${LOGOUT}"
-
-# rotate the file if it is getting big (> 10M), overwriting any old backup
-if [[ $(stat -c%s "${LOGOUT}") -gt 10485760 ]]; then
- mv "${LOGOUT}" "${LOGOUT}.old"
-fi
-
-script_unlock
diff --git a/cron-jobs/update-web-files-db b/cron-jobs/update-web-files-db
deleted file mode 120000
index 0c2c4fa..0000000
--- a/cron-jobs/update-web-files-db
+++ /dev/null
@@ -1 +0,0 @@
-update-web-db \ No newline at end of file
diff --git a/db-check-package-libraries.py b/db-check-package-libraries.py
new file mode 100755
index 0000000..bc2349b
--- /dev/null
+++ b/db-check-package-libraries.py
@@ -0,0 +1,193 @@
+#!/usr/bin/env python3
+# Copyright (C) 2012 Michał Masłowski <mtjm@mtjm.eu>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+
+"""
+Check which libraries are provided or required by a package, store
+this in a database, update and list broken packages.
+
+Dependencies:
+
+- Python 3.2 or later with SQLite 3 support
+
+- ``bsdtar``
+
+- ``readelf``
+"""
+
+
+import os.path
+import re
+import sqlite3
+import subprocess
+import tempfile
+
+
+#: Regexp matching an interesting dynamic entry.
+_DYNAMIC = re.compile(r"^\s*[0-9a-fx]+"
+ "\s*\((NEEDED|SONAME)\)[^:]*:\s*\[(.+)\]$")
+
+
+def make_db(path):
+ """Make a new, empty, library database at *path*."""
+ con = sqlite3.connect(path)
+ con.executescript("""
+create table provided(
+ library varchar not null,
+ package varchar not null
+);
+create table used(
+ library varchar not null,
+ package varchar not null
+);
+""")
+ con.close()
+
+
+def begin(database):
+ """Connect to *database* and start a transaction."""
+ con = sqlite3.connect(database)
+ con.execute("begin exclusive")
+ return con
+
+
+def add_provided(con, package, libraries):
+ """Write that *package* provides *libraries*."""
+ for library in libraries:
+ con.execute("insert into provided (package, library) values (?,?)",
+ (package, library))
+
+
+def add_used(con, package, libraries):
+ """Write that *package* uses *libraries*."""
+ for library in libraries:
+ con.execute("insert into used (package, library) values (?,?)",
+ (package, library))
+
+
+def remove_package(con, package):
+ """Remove all entries for a package."""
+ con.execute("delete from provided where package=?", (package,))
+ con.execute("delete from used where package=?", (package,))
+
+
+def add_package(con, package):
+ """Add entries from a named *package*."""
+ # Extract to a temporary directory. This could be done more
+ # efficiently, since there is no need to store more than one file
+ # at once.
+ with tempfile.TemporaryDirectory() as temp:
+ tar = subprocess.Popen(("bsdtar", "xf", package, "-C", temp))
+ tar.communicate()
+ with open(os.path.join(temp, ".PKGINFO")) as pkginfo:
+ for line in pkginfo:
+ if line.startswith("pkgname ="):
+ pkgname = line[len("pkgname ="):].strip()
+ break
+ # Don't list previously removed libraries.
+ remove_package(con, pkgname)
+ provided = set()
+ used = set()
+ # Search for ELFs.
+ for dirname, dirnames, filenames in os.walk(temp):
+ assert dirnames is not None # unused, avoid pylint warning
+ for file_name in filenames:
+ path = os.path.join(dirname, file_name)
+ with open(path, "rb") as file_object:
+ if file_object.read(4) != b"\177ELF":
+ continue
+ readelf = subprocess.Popen(("readelf", "-d", path),
+ stdout=subprocess.PIPE)
+ for line in readelf.communicate()[0].split(b"\n"):
+ match = _DYNAMIC.match(line.decode("ascii"))
+ if match:
+ if match.group(1) == "SONAME":
+ provided.add(match.group(2))
+ elif match.group(1) == "NEEDED":
+ used.add(match.group(2))
+ else:
+ raise AssertionError("unknown entry type "
+ + match.group(1))
+ add_provided(con, pkgname, provided)
+ add_used(con, pkgname, used)
+
+
+def init(arguments):
+ """Initialize."""
+ make_db(arguments.database)
+
+
+def add(arguments):
+ """Add packages."""
+ con = begin(arguments.database)
+ for package in arguments.packages:
+ add_package(con, package)
+ con.commit()
+ con.close()
+
+
+def remove(arguments):
+ """Remove packages."""
+ con = begin(arguments.database)
+ for package in arguments.packages:
+ remove_package(con, package)
+ con.commit()
+ con.close()
+
+
+def check(arguments):
+ """List broken packages."""
+ con = begin(arguments.database)
+ available = set(row[0] for row
+ in con.execute("select library from provided"))
+ for package, library in con.execute("select package, library from used"):
+ if library not in available:
+ print(package, "needs", library)
+ con.close()
+
+
+def main():
+ """Get arguments and run the command."""
+ from argparse import ArgumentParser
+ parser = ArgumentParser(prog="check-package-libraries.py",
+ description="Check packages for "
+ "provided/needed libraries")
+ parser.add_argument("-d", "--database", type=str,
+ help="Database file to use",
+ default="package-libraries.sqlite")
+ subparsers = parser.add_subparsers()
+ subparser = subparsers.add_parser(name="init",
+ help="initialize the database")
+ subparser.set_defaults(command=init)
+ subparser = subparsers.add_parser(name="add",
+ help="add packages to database")
+ subparser.add_argument("packages", nargs="+", type=str,
+ help="package files to add")
+ subparser.set_defaults(command=add)
+ subparser = subparsers.add_parser(name="remove",
+ help="remove packages from database")
+ subparser.add_argument("packages", nargs="+", type=str,
+ help="package names to remove")
+ subparser.set_defaults(command=remove)
+ subparser = subparsers.add_parser(name="check",
+ help="list broken packages")
+ subparser.set_defaults(command=check)
+ arguments = parser.parse_args()
+ arguments.command(arguments)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/db-functions b/db-functions
index b3a4293..62260bb 100644
--- a/db-functions
+++ b/db-functions
@@ -1,7 +1,7 @@
-#!/bin/bash
+#!/hint/bash
# Some PKGBUILDs need CARCH to be set
-CARCH="x86_64"
+CARCH=$(. "$(librelib conf.sh)"; load_files makepkg; echo "$CARCH")
# Useful functions
UMASK=""
@@ -12,88 +12,41 @@ set_umask () {
}
restore_umask () {
- umask $UMASK >/dev/null
+ umask "$UMASK" >/dev/null
+}
+
+# just like mv -f, but we touch the file and then copy the content so
+# default ACLs in the target dir will be applied
+mv_acl() {
+ rm -f "$2"
+ touch "$2"
+ cat "$1" >"$2" || return 1
+ rm -f "$1"
}
# set up general environment
-WORKDIR=$(mktemp -d "${TMPDIR}/${0##*/}.XXXXXXXXXX")
+WORKDIR=$(mktemp -dt "${0##*/}.XXXXXXXXXX")
if [ -n "${SVNUSER}" ]; then
setfacl -m u:"${SVNUSER}":rwx "${WORKDIR}"
setfacl -m d:u:"${USER}":rwx "${WORKDIR}"
setfacl -m d:u:"${SVNUSER}":rwx "${WORKDIR}"
fi
LOCKS=()
+REPO_MODIFIED=0
-# check if messages are to be printed using color
-unset ALL_OFF BOLD BLUE GREEN RED YELLOW
-if [[ -t 2 ]]; then
- ALL_OFF="$(tput sgr0)"
- BOLD="$(tput bold)"
- BLUE="${BOLD}$(tput setaf 4)"
- GREEN="${BOLD}$(tput setaf 2)"
- RED="${BOLD}$(tput setaf 1)"
- YELLOW="${BOLD}$(tput setaf 3)"
-fi
-readonly ALL_OFF BOLD BLUE GREEN RED YELLOW
-
-plain() {
- local mesg=$1; shift
- printf "${BOLD} ${mesg}${ALL_OFF}\n" "$@"
-}
-
-msg() {
- local mesg=$1; shift
- printf "${GREEN}==>${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@"
-}
-
-msg2() {
- local mesg=$1; shift
- printf "${BLUE} ->${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@"
-}
-
-warning() {
- local mesg=$1; shift
- printf "${YELLOW}==> WARNING:${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@" >&2
-}
-
-error() {
- local mesg=$1; shift
- printf "${RED}==> ERROR${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@" >&2
-}
-
-##
-# usage : in_array( $needle, $haystack )
-# return : 0 - found
-# 1 - not found
-##
-in_array() {
- local needle=$1; shift
- [[ -z $1 ]] && return 1 # Not Found
- local item
- for item in "$@"; do
- [[ $item = $needle ]] && return 0 # Found
- done
- return 1 # Not Found
-}
-
-##
-# usage : get_full_version( $epoch, $pkgver, $pkgrel )
-# return : full version spec, including epoch (if necessary), pkgver, pkgrel
-##
-get_full_version() {
- if [[ $1 -eq 0 ]]; then
- # zero epoch case, don't include it in version
- echo $2-$3
- else
- echo $1:$2-$3
- fi
-}
+# Used: plain, msg, msg2, warning, error, in_array, get_full_version, abort, die
+# Overwritten: cleanup
+# Ignored: stat_busy, stat_done,
+# setup_workdir, trap_abort, trap_exit,
+# lock, slock, lock_close
+# pkgver_equal, find_cached_package, check_root
+. "$(librelib common)"
script_lock() {
local LOCKDIR="$TMPDIR/.scriptlock.${0##*/}"
if ! mkdir "$LOCKDIR" >/dev/null 2>&1 ; then
- local _owner="$(/usr/bin/stat -c %U $LOCKDIR)"
- error "Script ${0##*/} is already locked by $_owner."
+ local _owner="$(/usr/bin/stat -c %U "$LOCKDIR")"
+ error "Script %s is already locked by %s." "${0##*/}" "$_owner"
exit 1
else
set_umask
@@ -104,7 +57,7 @@ script_lock() {
script_unlock() {
local LOCKDIR="$TMPDIR/.scriptlock.${0##*/}"
if [ ! -d "$LOCKDIR" ]; then
- warning "Script ${0##*/} was not locked!"
+ warning "Script %s was not locked!" "${0##*/}"
restore_umask
return 1
else
@@ -120,30 +73,25 @@ cleanup() {
local arch
trap - EXIT INT QUIT TERM
- for l in ${LOCKS[@]}; do
+ for l in "${LOCKS[@]}"; do
repo=${l%.*}
arch=${l#*.}
if [ -d "$TMPDIR/.repolock.$repo.$arch" ]; then
- msg "Removing left over lock from [${repo}] (${arch})"
- repo_unlock $repo $arch
+ msg "Removing left over lock from [%s] (%s)" "${repo}" "${arch}"
+ repo_unlock "$repo" "$arch"
fi
done
if [ -d "$TMPDIR/.scriptlock.${0##*/}" ]; then
- msg "Removing left over lock from ${0##*/}"
+ msg "Removing left over lock from %s" "${0##*/}"
script_unlock
fi
rm -rf "$WORKDIR"
- [ "$1" ] && exit $1
-}
-abort() {
- msg 'Aborting...'
- cleanup 0
-}
+ if (( REPO_MODIFIED )); then
+ date +%s > "${FTP_BASE}/lastupdate"
+ fi
-die() {
- error "$*"
- cleanup 1
+ [ "$1" ] && exit "$1"
}
trap abort INT QUIT TERM HUP
@@ -163,11 +111,11 @@ repo_lock () {
# This is the lock file used by repo-add and repo-remove
if [ -f "${DBLOCKFILE}" ]; then
- error "Repo [${1}] (${2}) is already locked by repo-{add,remove} process $(cat $DBLOCKFILE)"
+ error "Repo [%s] (%s) is already locked by repo-{add,remove} process %s" "$1" "$2" "$(<"$DBLOCKFILE")"
return 1
fi
if [ -f "${FILESLOCKFILE}" ]; then
- error "Repo [${1}] (${2}) is already locked by repo-{add,remove} process $(cat ${FILESLOCKFILE})"
+ error "Repo [%s] (%s) is already locked by repo-{add,remove} process %s" "$1" "$2" "$(<"$FILESLOCKFILE")"
return 1
fi
@@ -181,28 +129,28 @@ repo_lock () {
fi
_count=0
- while [ $_count -le $_trial ] || $_lockblock ; do
+ while [ "$_count" -le "$_trial" ] || "$_lockblock" ; do
if ! mkdir "$LOCKDIR" >/dev/null 2>&1 ; then
- _owner="$(/usr/bin/stat -c %U $LOCKDIR)"
- warning "Repo [${1}] (${2}) is already locked by $_owner. "
- msg2 "Retrying in $LOCK_DELAY seconds..."
+ _owner="$(/usr/bin/stat -c %U "$LOCKDIR")"
+ warning "Repo [%s] (%s) is already locked by %s." "${1}" "${2}" "$_owner"
+ msg2 "Retrying in %d seconds..." "$LOCK_DELAY"
else
- LOCKS[${#LOCKS[*]}]="$1.$2"
+ LOCKS+=("$1.$2")
set_umask
return 0
fi
- sleep $LOCK_DELAY
+ sleep "$LOCK_DELAY"
let _count=$_count+1
done
- error "Repo [${1}] (${2}) is already locked by $_owner. Giving up!"
+ error "Repo [%s] (%s) is already locked by %s. Giving up!" "${1}" "${2}" "$_owner"
return 1
}
repo_unlock () { #repo_unlock <repo-name> <arch>
local LOCKDIR="$TMPDIR/.repolock.$1.$2"
if [ ! -d "$LOCKDIR" ]; then
- warning "Repo lock [${1}] (${2}) was not locked!"
+ warning "Repo lock [%s] (%s) was not locked!" "${1}" "${2}"
restore_umask
return 1
else
@@ -250,7 +198,7 @@ getpkgname() {
_name="$(_grep_pkginfo "$1" "pkgname")"
if [ -z "$_name" ]; then
- error "Package '$1' has no pkgname in the PKGINFO. Fail!"
+ error "Package '%s' has no pkgname in the PKGINFO. Fail!" "$1"
exit 1
fi
@@ -263,7 +211,7 @@ getpkgver() {
_ver="$(_grep_pkginfo "$1" "pkgver")"
if [ -z "$_ver" ]; then
- error "Package '$1' has no pkgver in the PKGINFO. Fail!"
+ error "Package '%s' has no pkgver in the PKGINFO. Fail!" "$1"
exit 1
fi
@@ -275,59 +223,66 @@ getpkgarch() {
_ver="$(_grep_pkginfo "$1" "arch")"
if [ -z "$_ver" ]; then
- error "Package '$1' has no arch in the PKGINFO. Fail!"
+ error "Package '%s' has no arch in the PKGINFO. Fail!" "$1"
exit 1
fi
echo "$_ver"
}
+check_packager() {
+ local _packager
+
+ _packager=$(_grep_pkginfo "$1" "packager")
+ [[ $_packager && $_packager != 'Unknown Packager' ]]
+}
+
getpkgfile() {
if [[ ${#} -ne 1 ]]; then
error 'No canonical package found!'
exit 1
elif [ ! -f "${1}" ]; then
- error "Package ${1} not found!"
+ error "Package %s not found!" "${1}"
exit 1
- elif ${REQUIRE_SIGNATURE} && [ ! -f "${1}.sig" ]; then
- error "Package signature ${1}.sig not found!"
+ elif "${REQUIRE_SIGNATURE}" && [ ! -f "${1}.sig" ]; then
+ error "Package signature %s not found!" "${1}.sig"
exit 1
fi
- echo ${1}
+ echo "${1}"
}
getpkgfiles() {
local f
- if [ ! -z "$(echo ${@%\.*} | sed "s/ /\n/g" | sort | uniq -D)" ]; then
+ if [ ! -z "$(printf '%s\n' "${@%\.*}" | sort | uniq -D)" ]; then
error 'Duplicate packages found!'
exit 1
fi
- for f in ${@}; do
+ for f in "${@}"; do
if [ ! -f "${f}" ]; then
- error "Package ${f} not found!"
+ error "Package %s not found!" "${f}"
exit 1
- elif ${REQUIRE_SIGNATURE} && [ ! -f "${f}.sig" ]; then
- error "Package signature ${f}.sig not found!"
+ elif "${REQUIRE_SIGNATURE}" && [ ! -f "${f}.sig" ]; then
+ error "Package signature %s not found!" "${f}.sig"
exit 1
fi
done
- echo ${@}
+ echo "${@}"
}
check_pkgfile() {
local pkgfile=$1
- local pkgname="$(getpkgname ${pkgfile})"
+ local pkgname="$(getpkgname "${pkgfile}")"
[ $? -ge 1 ] && return 1
- local pkgver="$(getpkgver ${pkgfile})"
+ local pkgver="$(getpkgver "${pkgfile}")"
[ $? -ge 1 ] && return 1
- local pkgarch="$(getpkgarch ${pkgfile})"
+ local pkgarch="$(getpkgarch "${pkgfile}")"
[ $? -ge 1 ] && return 1
- in_array "${pkgarch}" ${ARCHES[@]} 'any' || return 1
+ in_array "${pkgarch}" "${ARCHES[@]}" 'any' || return 1
if echo "${pkgfile##*/}" | grep -q "${pkgname}-${pkgver}-${pkgarch}"; then
return 0
@@ -336,32 +291,25 @@ check_pkgfile() {
fi
}
-check_pkgsvn() {
+check_pkgxbs() {
local pkgfile="${1}"
- local _pkgbase="$(getpkgbase ${pkgfile})"
+ local _pkgbase="$(getpkgbase "${pkgfile}")"
[ $? -ge 1 ] && return 1
- local _pkgname="$(getpkgname ${pkgfile})"
+ local _pkgname="$(getpkgname "${pkgfile}")"
[ $? -ge 1 ] && return 1
- local _pkgver="$(getpkgver ${pkgfile})"
+ local _pkgver="$(getpkgver "${pkgfile}")"
[ $? -ge 1 ] && return 1
- local _pkgarch="$(getpkgarch ${pkgfile})"
+ local _pkgarch="$(getpkgarch "${pkgfile}")"
[ $? -ge 1 ] && return 1
local repo="${2}"
- in_array "${repo}" ${PKGREPOS[@]} || return 1
+ in_array "${repo}" "${PKGREPOS[@]}" || return 1
- if [ ! -f "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}" ]; then
- mkdir -p "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}"
- arch_svn export -q "${SVNREPO}/${_pkgbase}/repos/${repo}-${_pkgarch}/PKGBUILD" \
- "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}" >/dev/null
- [ $? -ge 1 ] && return 1
- fi
-
- local svnver="$(. "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}"; echo $(get_full_version ${epoch:-0} ${pkgver} ${pkgrel}) )"
- [ "${svnver}" == "${_pkgver}" ] || return 1
+ local xbsver="$(. "$(xbs releasepath "${_pkgbase}" "${repo}" "${_pkgarch}")/PKGBUILD"; get_full_version "${_pkgname}")"
+ [ "${xbsver}" == "${_pkgver}" ] || return 1
- local svnnames=($(. "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}"; echo ${pkgname[@]}))
- in_array "${_pkgname}" ${svnnames[@]} || return 1
+ local xbsnames=($(. "$(xbs releasepath "${_pkgbase}" "${repo}" "${_pkgarch}")/PKGBUILD"; echo "${pkgname[@]}"))
+ in_array "${_pkgname}" "${xbsnames[@]}" || return 1
return 0
}
@@ -369,41 +317,32 @@ check_pkgsvn() {
check_splitpkgs() {
local repo="${1}"
shift
- local pkgfiles=(${@})
+ local pkgfiles=("${@}")
local pkgfile
local pkgdir
- local svnname
+ local xbsname
mkdir -p "${WORKDIR}/check_splitpkgs/"
pushd "${WORKDIR}/check_splitpkgs" >/dev/null
- for pkgfile in ${pkgfiles[@]}; do
+ for pkgfile in "${pkgfiles[@]}"; do
issplitpkg "${pkgfile}" || continue
- local _pkgbase="$(getpkgbase ${pkgfile})"
- local _pkgname="$(getpkgname ${pkgfile})"
- local _pkgarch="$(getpkgarch ${pkgfile})"
+ local _pkgbase="$(getpkgbase "${pkgfile}")"
+ local _pkgname="$(getpkgname "${pkgfile}")"
+ local _pkgarch="$(getpkgarch "${pkgfile}")"
mkdir -p "${repo}/${_pkgarch}/${_pkgbase}"
echo "${_pkgname}" >> "${repo}/${_pkgarch}/${_pkgbase}/staging"
- if [ ! -f "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}" ]; then
- mkdir -p "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}"
- arch_svn export -q "${SVNREPO}/${_pkgbase}/repos/${repo}-${_pkgarch}/PKGBUILD" \
- "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}" >/dev/null
- [ $? -ge 1 ] && return 1
- fi
-
- local svnnames=($(. "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}"; echo ${pkgname[@]}))
- for svnname in ${svnnames[@]}; do
- echo "${svnname}" >> "${repo}/${_pkgarch}/${_pkgbase}/svn"
- done
+ local xbsnames=($(. "$(xbs releasepath "${_pkgbase}" "${repo}" "${_pkgarch}")/PKGBUILD"; echo "${pkgname[@]}"))
+ printf '%s\n' "${xbsnames[@]}" >> "${repo}/${_pkgarch}/${_pkgbase}/xbs"
done
popd >/dev/null
for pkgdir in "${WORKDIR}/check_splitpkgs/${repo}"/*/*; do
[ ! -d "${pkgdir}" ] && continue
sort -u "${pkgdir}/staging" -o "${pkgdir}/staging"
- sort -u "${pkgdir}/svn" -o "${pkgdir}/svn"
- if [ ! -z "$(comm -13 "${pkgdir}/staging" "${pkgdir}/svn")" ]; then
+ sort -u "${pkgdir}/xbs" -o "${pkgdir}/xbs"
+ if [ ! -z "$(comm -13 "${pkgdir}/staging" "${pkgdir}/xbs")" ]; then
return 1
fi
done
@@ -414,11 +353,11 @@ check_splitpkgs() {
check_pkgrepos() {
local pkgfile=$1
- local pkgname="$(getpkgname ${pkgfile})"
+ local pkgname="$(getpkgname "${pkgfile}")"
[ $? -ge 1 ] && return 1
- local pkgver="$(getpkgver ${pkgfile})"
+ local pkgver="$(getpkgver "${pkgfile}")"
[ $? -ge 1 ] && return 1
- local pkgarch="$(getpkgarch ${pkgfile})"
+ local pkgarch="$(getpkgarch "${pkgfile}")"
[ $? -ge 1 ] && return 1
[ -f "${FTP_BASE}/${PKGPOOL}/${pkgname}-${pkgver}-${pkgarch}"${PKGEXT} ] && return 1
@@ -432,8 +371,8 @@ check_pkgrepos() {
#usage: chk_license ${license[@]}"
chk_license() {
local l
- for l in ${@}; do
- in_array ${l} ${ALLOWED_LICENSES[@]} && return 0
+ for l in "${@}"; do
+ in_array "${l}" "${ALLOWED_LICENSES[@]}" && return 0
done
return 1
@@ -445,16 +384,16 @@ check_repo_permission() {
[ ${#PKGREPOS[@]} -eq 0 ] && return 1
[ -z "${PKGPOOL}" ] && return 1
- in_array "${repo}" ${PKGREPOS[@]} || return 1
+ in_array "${repo}" "${PKGREPOS[@]}" || return 1
[ -w "$FTP_BASE/${PKGPOOL}" ] || return 1
local arch
- for arch in ${ARCHES}; do
+ for arch in "${ARCHES[@]}"; do
local dir="${FTP_BASE}/${repo}/os/${arch}/"
[ -w "${dir}" ] || return 1
- [ -f "${dir}"${repo}${DBEXT} -a ! -w "${dir}"${repo}${DBEXT} ] && return 1
- [ -f "${dir}"${repo}${FILESEXT} -a ! -w "${dir}"${repo}${FILESEXT} ] && return 1
+ [ -f "${dir}${repo}"${DBEXT} -a ! -w "${dir}${repo}"${DBEXT} ] && return 1
+ [ -f "${dir}${repo}"${FILESEXT} -a ! -w "${dir}${repo}"${FILESEXT} ] && return 1
done
return 0
@@ -468,52 +407,50 @@ set_repo_permission() {
if [ -w "${dbfile}" ]; then
local group=$(/usr/bin/stat --printf='%G' "$(dirname "${dbfile}")")
- chgrp $group "${dbfile}" || error "Could not change group of ${dbfile} to $group"
- chgrp $group "${filesfile}" || error "Could not change group of ${filesfile} to $group"
- chmod g+w "${dbfile}" || error "Could not set write permission for group $group to ${dbfile}"
- chmod g+w "${filesfile}" || error "Could not set write permission for group $group to ${filesfile}"
+ chgrp "$group" "${dbfile}" || error "Could not change group of %s to %s" "${dbfile}" "$group"
+ chgrp "$group" "${filesfile}" || error "Could not change group of %s to %s" "${filesfile}" "$group"
+ chmod g+w "${dbfile}" || error "Could not set write permission for group %s to %s" "$group" "${dbfile}"
+ chmod g+w "${filesfile}" || error "Could not set write permission for group %s to %s" "$group" "${filesfile}"
else
- error "You don't have permission to change ${dbfile}"
+ error "You don't have permission to change %s" "${dbfile}"
fi
}
arch_repo_add() {
local repo=$1
local arch=$2
- local pkgs=(${@:3})
+ local pkgs=("${@:3}")
+ printf -v pkgs_str -- '%q ' "${pkgs[@]}"
# package files might be relative to repo dir
pushd "${FTP_BASE}/${repo}/os/${arch}" >/dev/null
- /usr/bin/repo-add -q "${repo}${DBEXT}" ${pkgs[@]} \
- || error "repo-add ${repo}${DBEXT} ${pkgs[@]}"
- /usr/bin/repo-add -f -q "${repo}${FILESEXT}" ${pkgs[@]} \
- || error "repo-add -f ${repo}${FILESEXT} ${pkgs[@]}"
+ /usr/bin/repo-add -q "${repo}${DBEXT}" "${pkgs[@]}" \
+ || error 'repo-add %q %s' "${repo}${DBEXT}" "${pkgs_str% }"
+ /usr/bin/repo-add -f -q "${repo}${FILESEXT}" "${pkgs[@]}" \
+ || error 'repo-add -f %q %s' "${repo}${FILESEXT}" "${pkgs_str% }"
popd >/dev/null
set_repo_permission "${repo}" "${arch}"
+
+ REPO_MODIFIED=1
}
arch_repo_remove() {
local repo=$1
local arch=$2
- local pkgs=(${@:3})
+ local pkgs=("${@:3}")
local dbfile="${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}"
local filesfile="${FTP_BASE}/${repo}/os/${arch}/${repo}${FILESEXT}"
if [ ! -f "${dbfile}" ]; then
- error "No database found at '${dbfile}'"
+ error "No database found at '%s'" "${dbfile}"
return 1
fi
- /usr/bin/repo-remove -q "${dbfile}" ${pkgs[@]} \
- || error "repo-remove ${dbfile} ${pkgs[@]}"
- /usr/bin/repo-remove -q "${filesfile}" ${pkgs[@]} \
- || error "repo-remove ${filesfile} ${pkgs[@]}"
+ printf -v pkgs_str -- '%q ' "${pkgs[@]}"
+ /usr/bin/repo-remove -q "${dbfile}" "${pkgs[@]}" \
+ || error 'repo-remove %q %s' "${dbfile}" "${pkgs_str% }"
+ /usr/bin/repo-remove -q "${filesfile}" "${pkgs[@]}" \
+ || error 'repo-remove %q %s' "${filesfile}" "${pkgs_str% }"
set_repo_permission "${repo}" "${arch}"
-}
-arch_svn() {
- if [ -z "${SVNUSER}" ]; then
- /usr/bin/svn "${@}"
- else
- sudo -u "${SVNUSER}" -- /usr/bin/svn --username "${USER}" "${@}"
- fi
+ REPO_MODIFIED=1
}
diff --git a/db-import b/db-import
new file mode 100755
index 0000000..a8a073d
--- /dev/null
+++ b/db-import
@@ -0,0 +1,288 @@
+#!/bin/bash
+set -euE
+# Imports Arch-like repos, running them through a blacklist
+# License: GPLv3
+
+. "$(dirname "$(readlink -e "$0")")/config" # for: FTP_BASE DBEXT
+. "$(dirname "$(readlink -e "$0")")/db-import.conf" # for: IMPORTDIR IMPORTS
+. "$(librelib messages)"
+. "$(librelib blacklist)"
+
+# DBs = pacman DataBases
+
+# This replaces two scripts:
+# - abslibre : imported ABS tree from Arch
+# - db-sync : imported pacman DBs from Arch
+
+# The flow here is:
+# 1. "${IMPORTDIR}/cache/${name}/dbs/" # Download the pacman databases
+# 2. "${IMPORTDIR}/cache/${name}/abs/" # Download the ABS tree
+# 3. "${IMPORTDIR}/clean/${name}/dbs/" # Run the pacman DBs through the blacklist
+# 4. "${IMPORTDIR}/clean/${name}/pkgs/" # Download the pkg files mentioned in "clean/${name}/dbs/"
+# 5. "${IMPORTDIR}/staging/${tag}" # Copy all the package files we just downloaded to here
+# 6. Run `db-update on` with STAGING="${IMPORTDIR}/staging/${tag}"
+
+# generic arguments to pass to rsync, borrowed from `abs`
+SYNCARGS='-mrtvlH --no-motd --no-p --no-o --no-g'
+
+main() {
+ blacklist-update
+
+ local importStr
+ for importStr in "${IMPORTS[@]}"; do
+ local importAry=($importStr)
+ local name=${importAry[0]}
+ local pkgmirror=${importAry[1]}
+ local absmirror=${importAry[2]}
+ local tags=("${importAry[@]:3}")
+
+ msg "Fetching remote package source: %s" "$name"
+ fetch_dbs "$name" "$pkgmirror"
+ fetch_abs "$name" "$absmirror" "${tags[@]}"
+ msg "Filtering blacklisted packages from remote package source: %s" "$name"
+ clean_dbs "$name" "${tags[@]}"
+ fetch_pkgs "$name" "${tags[@]}"
+ msg "Publishing changes from remote package source: %s" "$name"
+ publish "$name" "${tags[@]}"
+ done
+ return $r
+}
+
+fetch_dbs() {
+ local name=$1
+ local pkgmirror=$2
+
+ msg2 'Synchronizing package databases...'
+
+ mkdir -p -- "${IMPORTDIR}/cache/${name}/dbs"
+ # Grab just the .db files from $pkgmirror
+ rsync $SYNCARGS --delete-after \
+ --include="*/" \
+ --include="*.db" \
+ --include="*${DBEXT}" \
+ --exclude="*" \
+ "rsync://${pkgmirror}/" "${IMPORTDIR}/cache/${name}/dbs"
+}
+
+fetch_abs() {
+ local name=$1
+ local absmirror=$2
+ local tags=("${@:3}")
+
+ local fake_home
+ local absroot
+
+ # Sync the ABS tree from $absmirror
+ local arch
+ for arch in $(list_arches "${tags[@]}"); do
+ msg2 'Synchronizing %s ABS tree...' "$arch"
+
+ absroot="${IMPORTDIR}/cache/${name}/abs/${arch}"
+ mkdir -p -- "$absroot"
+
+ # Configure `abs` for this mirror
+ fake_home="${IMPORTDIR}/homes/${name}/${arch}"
+ mkdir -p -- "$fake_home"
+ {
+ printf "ABSROOT='%s'\n" "$absroot"
+ printf "SYNCSERVER='%s'\n" "$absmirror"
+ printf "ARCH='%s'\n" "$arch"
+ printf 'REPOS=(\n'
+ list_repos "$arch" "${tags[@]}"
+ printf ')\n'
+ } > "${fake_home}/.abs.conf"
+
+ # Run `abs`
+ HOME=$fake_home abs
+ done
+}
+
+clean_dbs() {
+ local name=$1
+ local tags=("${@:2}")
+
+ rm -rf -- "${IMPORTDIR}/clean/$name"
+
+ local tag
+ for tag in "${tags[@]}"; do
+ msg2 'Creating clean version of %s package database...' "$tag"
+
+ local cache="${IMPORTDIR}/cache/$name/dbs/$(db_file "$tag")"
+ local clean="${IMPORTDIR}/clean/$name/dbs/$(db_file "$tag")"
+ install -Dm644 "$cache" "$clean"
+
+ blacklist-cat | blacklist-get-pkg | xargs -d '\n' repo-remove "$clean"
+ done
+}
+
+fetch_pkgs() {
+ local name=$1
+ local tags=("${@:2}")
+
+ local repo arch dbfile whitelist
+
+ local tag
+ for tag in "${tags[@]}"; do
+ msg2 'Syncronizing package files for %s...' "$tag"
+ repo=${tag%-*}
+ arch=${tag##*-}
+
+ dbfile="${IMPORTDIR}/clean/$name/dbs/$(db_file "$tag")"
+ whitelist="${IMPORTDIR}/clean/$name/dbs/$tag.whitelist"
+
+ list_pkgs "$dbfile" > "$whitelist"
+
+ # fetch the architecture-specific packages
+ rsync $SYNCARGS --delete-after --delete-excluded \
+ --delay-updates \
+ --include-from=<(sed "s|\$|-$arch.tar.?z|" "$whitelist") \
+ --exclude='*' \
+ "rsync://${pkgmirror}/$(db_dir "$tag")/" \
+ "${IMPORTDIR}/clean/${name}/pkgs/${tag}/"
+
+ # fetch the architecture-independent packages
+ rsync $SYNCARGS --delete-after --delete-excluded \
+ --delay-updates \
+ --include-from=<(sed "s|\$|-any.tar.?z|" "$whitelist") \
+ --exclude='*' \
+ "rsync://${pkgmirror}/$(db_dir "$tag")/" \
+ "${IMPORTDIR}/clean/${name}/pkgs/${repo}-any/"
+ done
+}
+
+publish() {
+ local name=$1
+ local tags=("${@:2}")
+
+ local tag
+ for tag in "${tags[@]}"; do
+ msg2 'Publishing changes to %s...' "$tag"
+ publish_tag "$name" "$tag"
+ done
+}
+
+publish_tag() {
+ local name=$1
+ local tag=$2
+
+ local repo=${tag%-*}
+ local arch=${tag##*-}
+ local dir="${IMPORTDIR}/clean/${name}/pkgs/${tag}"
+
+ local found
+ local error=false
+ local files=()
+
+ local pkgid pkgarch
+ for pkgid in $(list_added_pkgs "$name" "$tag"); do
+ found=false
+
+ for pkgarch in "${arch}" any; do
+ file="${dir}/${pkgid}-${arch}".pkg.tar.?z
+ if ! $found && [[ -r $file ]]; then
+ files+=("$file")
+ found=true
+ fi
+ done
+
+ if ! $found; then
+ error 'Could not find package file for %s' "$pkgid"
+ error=true
+ fi
+ done
+
+ if $error; then
+ error 'Quitting...'
+ return 1
+ fi
+
+ mkdir -p -- "${IMPORTDIR}/staging/${tag}/${repo}"
+ cp -al -- "${files[@]}" "${IMPORTDIR}/staging/${tag}/${repo}/"
+ STAGING="${IMPORTDIR}/staging/${tag}" db-update
+
+ # XXX: db-remove wants pkgbase, not pkgname
+ list_removed_pkgs "$name" "$tag" | xargs -d '\n' db-remove "$repo" "$arch"
+}
+
+################################################################################
+
+# Usage: list_arches repo-arch...
+# Returns a list of the architectures mentioned in a list of "repo-arch" pairs.
+list_arches() {
+ local tags=("$@")
+ printf '%s\n' "${tags[@]##*-}" | sort -u
+}
+
+# Usage: list_repos arch repo-arch...
+# Returns a list of all the repositories mentioned for a given architecture in a
+# list of "repo-arch" pairs.
+list_repos() {
+ local arch=$1
+ local tags=("${@:2}")
+ printf '%s\n' "${tags[@]}" | sed -n "s/-$arch\$//p"
+}
+
+# Usage: db_dir repo-arch
+db_dir() {
+ local tag=$1
+ local repo=${tag%-*}
+ local arch=${tag##*-}
+ echo "${repo}/os/${arch}"
+}
+
+# Usage; db_file repo-arch
+db_file() {
+ local tag=$1
+ local repo=${tag%-*}
+ local arch=${tag##*-}
+ echo "${repo}/os/${arch}/${repo}${DBEXT}"
+}
+
+# Usage: list_pkgs dbfile
+# Prints "$pkgname-$(get_full_version "$pkgname")" for every package in $dbfile
+list_pkgs() {
+ local dbfile=$1
+ bsdtar tf "$dbfile" | cut -d/ -f1
+}
+
+# Usage: list_pkgs | sep_ver
+# Separates the pkgname from the version (replaces the '-' with ' ') for the
+# list provided on stdin.
+sep_ver() {
+ sed -r 's/-([^-]*-[^-]*)$/ \1/'
+}
+
+# Usage: list_removed_pkgs importsrc repo-arch
+# Prints "$pkgname-$(get_full_version "$pkgname")" for every removed package.
+list_removed_pkgs() {
+ local name=$1
+ local tag=$2
+
+ local old="${FTP_BASE}/$(db_file "$tag")"
+ local new="${IMPORTDIR}/clean/$name/dbs/$(db_file "$tag")"
+
+ # make a list of:
+ # pkgname oldver[ newver]
+ # It will include removed or updated packages (changed packages)
+ join -a1 \
+ <(list_pkgs "$old"|sep_ver|sort) \
+ <(list_pkgs "$new"|sep_ver|sort)
+ | grep -v ' .* ' # remove updated packages
+ | sed 's/ /-/' # re-combine the pkgname and version
+}
+
+# Usage: list_added_pkgs importsrc repo-arch
+# slightly a misnomer; added and updated
+# Prints "$pkgname-$(get_full_version "$pkgname")" for every added or updated
+# package.
+list_added_pkgs() {
+ local name=$1
+ local tag=$2
+
+ local old="${FTP_BASE}/$(db_file "$tag")"
+ local new="${IMPORTDIR}/clean/$name/dbs/$(db_file "$tag")"
+
+ comm -13 <(list_pkgs "$old") <(list_pkgs "$new")
+}
+
+main "$@"
diff --git a/db-import.conf b/db-import.conf
new file mode 100644
index 0000000..aaf7b0f
--- /dev/null
+++ b/db-import.conf
@@ -0,0 +1,23 @@
+#!/hint/bash
+
+IMPORTDIR=/srv/repo/import
+
+case "$USER" in
+ db-import-packages)
+ _archrepos=(
+ {core,extra,testing,staging}-{i686,x86_64}
+ {gnome,kde}-unstable-{i686,x86_64}
+ );;
+ db-import-community)
+ _archrepos=(
+ community{,-testing,-staging}-{i686,x86_64}
+ multilib{,-testing,-staging}-x86_64
+ );;
+esac
+
+_archpkgmirror=$(db-pick-mirror rsync https://www.archlinux.org/mirrors/status/tier/1/json/)
+
+# name pkgmirror absmirror repo-arch...
+IMPORTS=("archlinux ${_archpkgmirror} rsync.archlinux.org ${_archrepos[*]}")
+
+unset _archrepos _archpkgmirror
diff --git a/db-init b/db-init
new file mode 100755
index 0000000..e25dbff
--- /dev/null
+++ b/db-init
@@ -0,0 +1,6 @@
+#!/bin/bash
+# Creates the repo structure defined in config
+
+source "$(dirname "$(readlink -e "$0")")/config"
+
+mkdir -p -- "${FTP_BASE}"/{"${PKGPOOL}","${SRCPOOL}"} "${CLEANUP_DESTDIR}" "${SOURCE_CLEANUP_DESTDIR}" "${STAGING}"
diff --git a/db-list-nonfree.py b/db-list-nonfree.py
new file mode 100755
index 0000000..a486fa5
--- /dev/null
+++ b/db-list-nonfree.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python2
+#-*- encoding: utf-8 -*-
+from filter import *
+import argparse
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ prog="nonfree_in_db",
+ description="Cleans nonfree files on repo",)
+
+ parser.add_argument("-k", "--blacklist-file", type=str,
+ help="File containing blacklisted names",
+ required=True,)
+
+ parser.add_argument("-b", "--database", type=str,
+ help="dabatase to clean",
+ required=True,)
+
+ args=parser.parse_args()
+
+ if not (args.blacklist_file and args.database):
+ parser.print_help()
+ exit(1)
+
+ blacklist=listado(args.blacklist_file)
+ pkgs=get_pkginfo_from_db(args.database)
+
+ print(" ".join([pkg["name"] for pkg in pkgs if pkg["name"] in blacklist]))
diff --git a/db-list-unsigned-packages b/db-list-unsigned-packages
new file mode 100755
index 0000000..095e1e6
--- /dev/null
+++ b/db-list-unsigned-packages
@@ -0,0 +1,38 @@
+#!/bin/bash
+# Copyright (C) 2012 Michał Masłowski <mtjm@mtjm.eu>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+set -e
+
+# Output a list of repo/package-name-and-version pairs representing
+# unsigned packages available for architecture $1 and specified for
+# architecture $2 (usually $1 or any, default is to list all).
+
+. "$(dirname "$(readlink -e "$0")")/config"
+. "$(dirname "$(readlink -e "$0")")/db-functions"
+
+if [ $# -lt 1 ]; then
+ msg "usage: %s <architecture>" "${0##*/}"
+ exit 1
+fi
+
+arch=$1
+shift
+
+for repo in "${PKGREPOS[@]}"
+do
+ db="${FTP_BASE}/${repo}/os/${arch}/${repo}.db"
+ [ -f "$db" ] && "$(dirname "$(readlink -e "$0")")/db-list-unsigned-packages.py" "$repo" "$@" < "$db"
+done
diff --git a/db-list-unsigned-packages.py b/db-list-unsigned-packages.py
new file mode 100755
index 0000000..80cff51
--- /dev/null
+++ b/db-list-unsigned-packages.py
@@ -0,0 +1,96 @@
+#!/usr/bin/env python3
+# Copyright (C) 2012 Michał Masłowski <mtjm@mtjm.eu>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+
+"""
+Output a list of repo/package-name-and-version pairs representing
+unsigned packages in the database at standard input of repo named in
+the first argument and specified for architectures listed in the
+following arguments (usually the one of the database or any, default
+is to list all).
+
+If the --keyset argument is passed, print the key fingerprint of every
+signed package.
+"""
+
+
+import base64
+import subprocess
+import sys
+import tarfile
+
+
+def main():
+ """Do the job."""
+ check_keys = False
+ if "--keyset" in sys.argv:
+ sys.argv.remove("--keyset")
+ check_keys = True
+ repo = sys.argv[1]
+ pkgarches = frozenset(name.encode("utf-8") for name in sys.argv[2:])
+ packages = []
+ keys = []
+ with tarfile.open(fileobj=sys.stdin.buffer) as archive:
+ for entry in archive:
+ if entry.name.endswith("/desc"):
+ content = archive.extractfile(entry)
+ skip = False
+ is_arch = False
+ key = None
+ for line in content:
+ if is_arch:
+ is_arch = False
+ if pkgarches and line.strip() not in pkgarches:
+ skip = True # different architecture
+ break
+ if line == b"%PGPSIG%\n":
+ skip = True # signed
+ key = b""
+ if check_keys:
+ continue
+ else:
+ break
+ if line == b"%ARCH%\n":
+ is_arch = True
+ continue
+ if key is not None:
+ if line.strip():
+ key += line.strip()
+ else:
+ break
+ if check_keys and key:
+ key_binary = base64.b64decode(key)
+ keys.append(key_binary)
+ packages.append(repo + "/" + entry.name[:-5])
+ if skip:
+ continue
+ print(repo + "/" + entry.name[:-5])
+ if check_keys and keys:
+ # We have collected all signed package names in packages and
+ # all keys in keys. Let's now ask gpg to list all signatures
+ # and find which keys made them.
+ packets = subprocess.check_output(("gpg", "--list-packets"),
+ input=b"".join(keys))
+ i = 0
+ for line in packets.decode("latin1").split("\n"):
+ if line.startswith(":signature packet:"):
+ keyid = line[line.index("keyid ") + len("keyid "):]
+ print(packages[i], keyid)
+ i += 1
+
+
+if __name__ == "__main__":
+ main()
diff --git a/db-move b/db-move
index 1fa44d4..89a0ad6 100755
--- a/db-move
+++ b/db-move
@@ -1,106 +1,88 @@
#!/bin/bash
-. "$(dirname $0)/config"
-. "$(dirname $0)/db-functions"
+. "$(dirname "$(readlink -e "$0")")/config"
+. "$(dirname "$(readlink -e "$0")")/db-functions"
if [ $# -lt 3 ]; then
- msg "usage: ${0##*/} <repo-from> <repo-to> <pkgname|pkgbase> ..."
+ msg "usage: %s <repo-from> <repo-to> <pkgname|pkgbase> ..." "${0##*/}"
exit 1
fi
-args=(${@})
+args=("${@}")
repo_from="${args[0]}"
repo_to="${args[1]}"
ftppath_from="${FTP_BASE}/${repo_from}/os/"
ftppath_to="${FTP_BASE}/${repo_to}/os/"
-if ! check_repo_permission $repo_to || ! check_repo_permission $repo_from; then
- die "You don't have permission to move packages from ${repo_from} to ${repo_to}"
+if ! check_repo_permission "$repo_to" || ! check_repo_permission "$repo_from"; then
+ die "You don't have permission to move packages from %s to %s" "${repo_from}" "${repo_to}"
fi
# TODO: this might lock too much (architectures)
-for pkgarch in ${ARCHES[@]}; do
- repo_lock ${repo_to} ${pkgarch} || exit 1
- repo_lock ${repo_from} ${pkgarch} || exit 1
+for pkgarch in "${ARCHES[@]}"; do
+ repo_lock "${repo_to}" "${pkgarch}" || exit 1
+ repo_lock "${repo_from}" "${pkgarch}" || exit 1
done
-# check if packages to be moved exist in svn and ftp dir
-arch_svn checkout -q -N "${SVNREPO}" "${WORKDIR}/svn" >/dev/null
-for pkgbase in ${args[@]:2}; do
- arch_svn up -q "${WORKDIR}/svn/${pkgbase}" >/dev/null
- for pkgarch in ${ARCHES[@]} 'any'; do
- svnrepo_from="${WORKDIR}/svn/${pkgbase}/repos/${repo_from}-${pkgarch}"
- if [ -r "${svnrepo_from}/PKGBUILD" ]; then
- pkgnames=($(. "${svnrepo_from}/PKGBUILD"; echo ${pkgname[@]}))
+# First loop is to check that all necessary files exist
+for pkgbase in "${args[@]:2}"; do
+ for pkgarch in "${ARCHES[@]}" 'any'; do
+ xbsrepo_from="$(xbs releasepath "${pkgbase}" "${repo_from}" "${pkgarch}")"
+ if [ -r "${xbsrepo_from}/PKGBUILD" ]; then
+ pkgnames=($(. "${xbsrepo_from}/PKGBUILD"; echo "${pkgname[@]}"))
if [ ${#pkgnames[@]} -lt 1 ]; then
die "Could not read pkgname"
fi
- pkgver=$(. "${svnrepo_from}/PKGBUILD"; echo $(get_full_version ${epoch:-0} ${pkgver} ${pkgrel}))
- if [ -z "${pkgver}" ]; then
- die "Could not read pkgver"
- fi
-
if [ "${pkgarch}" == 'any' ]; then
- tarches=(${ARCHES[@]})
+ tarches=("${ARCHES[@]}")
else
tarches=("${pkgarch}")
fi
- for pkgname in ${pkgnames[@]}; do
- for tarch in ${tarches[@]}; do
- getpkgfile "${ftppath_from}/${tarch}/"${pkgname}-${pkgver}-${pkgarch}${PKGEXT} >/dev/null
+ for pkgname in "${pkgnames[@]}"; do
+ pkgver=$(. "${xbsrepo_from}/PKGBUILD"; get_full_version "${pkgname}")
+ if [ -z "${pkgver}" ]; then
+ die "Could not read pkgver"
+ fi
+ for tarch in "${tarches[@]}"; do
+ getpkgfile "${ftppath_from}/${tarch}/${pkgname}-${pkgver}-${pkgarch}"${PKGEXT} >/dev/null
done
done
continue 2
fi
done
- die "${pkgbase} not found in ${repo_from}"
+ die "%s not found in %s" "${pkgbase}" "${repo_from}"
done
-msg "Moving packages from [${repo_from}] to [${repo_to}]..."
+msg "Moving packages from [%s] to [%s]..." "${repo_from}" "${repo_to}"
declare -A add_pkgs
declare -A remove_pkgs
-for pkgbase in ${args[@]:2}; do
- tag_list=""
- for pkgarch in ${ARCHES[@]} 'any'; do
- svnrepo_from="${WORKDIR}/svn/${pkgbase}/repos/${repo_from}-${pkgarch}"
- svnrepo_to="${WORKDIR}/svn/${pkgbase}/repos/${repo_to}-${pkgarch}"
-
- if [ -f "${svnrepo_from}/PKGBUILD" ]; then
+for pkgbase in "${args[@]:2}"; do
+ # move the package in xbs
+ arches=($(xbs move "${repo_from}" "${repo_to}" "${pkgbase}"))
+ # move the package in ftp
+ for pkgarch in "${arches[@]}"; do
+ xbsrepo_to="$(xbs releasepath "$pkgbase" "$repo_to" "$pkgarch")"
+ if true; then # to add an indent level to make merging easier
if [ "${pkgarch}" == 'any' ]; then
- tarches=(${ARCHES[@]})
+ tarches=("${ARCHES[@]}")
else
tarches=("${pkgarch}")
fi
- msg2 "${pkgbase} ($(echo ${tarches[@]}))"
- pkgnames=($(. "${svnrepo_from}/PKGBUILD"; echo ${pkgname[@]}))
- pkgver=$(. "${svnrepo_from}/PKGBUILD"; echo $(get_full_version ${epoch:-0} ${pkgver} ${pkgrel}))
-
- if [ -d "${svnrepo_to}" ]; then
- for file in $(arch_svn ls "${svnrepo_to}"); do
- arch_svn rm -q "${svnrepo_to}/$file@"
- done
- else
- mkdir "${svnrepo_to}"
- arch_svn add -q "${svnrepo_to}"
- fi
-
- for file in $(arch_svn ls "${svnrepo_from}"); do
- arch_svn mv -q -r HEAD "${svnrepo_from}/$file@" "${svnrepo_to}/"
- done
- arch_svn rm --force -q "${svnrepo_from}"
- tag_list="$tag_list, $pkgarch"
+ msg2 "%s (%s)" "${pkgbase}" "${tarches[*]}"
+ pkgnames=($(. "${xbsrepo_to}/PKGBUILD"; echo "${pkgname[@]}"))
- for pkgname in ${pkgnames[@]}; do
- for tarch in ${tarches[@]}; do
- pkgpath=$(getpkgfile "${ftppath_from}/${tarch}/"${pkgname}-${pkgver}-${pkgarch}${PKGEXT})
+ for pkgname in "${pkgnames[@]}"; do
+ pkgver=$(. "${xbsrepo_to}/PKGBUILD"; get_full_version "${pkgname}")
+ for tarch in "${tarches[@]}"; do
+ pkgpath=$(getpkgfile "${ftppath_from}/${tarch}/${pkgname}-${pkgver}-${pkgarch}"${PKGEXT})
pkgfile="${pkgpath##*/}"
- ln -s "../../../${PKGPOOL}/${pkgfile}" ${ftppath_to}/${tarch}/
- if [ -f ${FTP_BASE}/${PKGPOOL}/${pkgfile}.sig ]; then
- ln -s "../../../${PKGPOOL}/${pkgfile}.sig" ${ftppath_to}/${tarch}/
+ ln -s "../../../${PKGPOOL}/${pkgfile}" "${ftppath_to}/${tarch}/"
+ if [ -f "${FTP_BASE}/${PKGPOOL}/${pkgfile}.sig" ]; then
+ ln -s "../../../${PKGPOOL}/${pkgfile}.sig" "${ftppath_to}/${tarch}/"
fi
add_pkgs[${tarch}]+="${FTP_BASE}/${PKGPOOL}/${pkgfile} "
remove_pkgs[${tarch}]+="${pkgname} "
@@ -108,18 +90,16 @@ for pkgbase in ${args[@]:2}; do
done
fi
done
- tag_list="${tag_list#, }"
- arch_svn commit -q "${WORKDIR}/svn/${pkgbase}" -m "${0##*/}: moved ${pkgbase} from [${repo_from}] to [${repo_to}] (${tag_list})"
done
-for tarch in ${ARCHES[@]}; do
+for tarch in "${ARCHES[@]}"; do
if [ -n "${add_pkgs[${tarch}]}" ]; then
arch_repo_add "${repo_to}" "${tarch}" ${add_pkgs[${tarch}]}
arch_repo_remove "${repo_from}" "${tarch}" ${remove_pkgs[${tarch}]}
fi
done
-for pkgarch in ${ARCHES[@]}; do
- repo_unlock ${repo_from} ${pkgarch}
- repo_unlock ${repo_to} ${pkgarch}
+for pkgarch in "${ARCHES[@]}"; do
+ repo_unlock "${repo_from}" "${pkgarch}"
+ repo_unlock "${repo_to}" "${pkgarch}"
done
diff --git a/db-pick-mirror b/db-pick-mirror
new file mode 100755
index 0000000..4d01b95
--- /dev/null
+++ b/db-pick-mirror
@@ -0,0 +1,25 @@
+#!/usr/bin/env ruby
+
+require 'json'
+require 'net/http'
+
+protocol = ARGV[0]
+jsonurl = ARGV[1]
+
+data = JSON::parse(Net::HTTP.get(URI(jsonurl)))
+
+if data["version"] != 3
+ print "Data format version != 3"
+ exit 1
+end
+
+# Filter out URLs with incomplete information
+urls = data["urls"].select{|a| a.none?{|k,v|v.nil?}}
+rsync_urls = urls.select{|a| a["protocol"]==protocol}
+
+# By score ( (delay+speed)/completion )
+#best = rsync_urls.sort{|a,b| (a["score"] || Float::INFINITY) <=> (b["score"] || Float::INFINITY) }.first
+# By delay/completion
+best = rsync_urls.sort{|a,b| a["delay"]/a["completion_pct"] <=> b["delay"]/b["completion_pct"] }.first
+
+puts best["url"]
diff --git a/db-remove b/db-remove
index 25cb9a7..dcbe4b4 100755
--- a/db-remove
+++ b/db-remove
@@ -1,52 +1,49 @@
#!/bin/bash
-. "$(dirname $0)/config"
-. "$(dirname $0)/db-functions"
+. "$(dirname "$(readlink -e "$0")")/config"
+. "$(dirname "$(readlink -e "$0")")/db-functions"
if [ $# -lt 3 ]; then
- msg "usage: ${0##*/} <repo> <arch> <pkgname|pkgbase> ..."
+ msg "usage: %s <repo> <arch> <pkgname|pkgbase> ..." "${0##*/}"
exit 1
fi
repo="$1"
arch="$2"
-pkgbases=(${@:3})
+pkgbases=("${@:3}")
-ftppath="$FTP_BASE/$repo/os"
-svnrepo="$repo-$arch"
-
-if ! check_repo_permission $repo; then
- die "You don't have permission to remove packages from ${repo}"
+if ! check_repo_permission "$repo"; then
+ die "You don't have permission to remove packages from %s" "${repo}"
fi
if [ "$arch" == "any" ]; then
- tarches=(${ARCHES[@]})
+ tarches=("${ARCHES[@]}")
else
tarches=("$arch")
fi
-for tarch in ${tarches[@]}; do
- repo_lock $repo $tarch || exit 1
+for tarch in "${tarches[@]}"; do
+ repo_lock "$repo" "$tarch" || exit 1
done
remove_pkgs=()
-for pkgbase in ${pkgbases[@]}; do
- msg "Removing $pkgbase from [$repo]..."
- arch_svn checkout -q "${SVNREPO}/${pkgbase}" "${WORKDIR}/svn/${pkgbase}" >/dev/null
-
- if [ -d "${WORKDIR}/svn/$pkgbase/repos/$svnrepo" ]; then
- remove_pkgs=(${remove_pkgs[@]} $(. "${WORKDIR}/svn/$pkgbase/repos/$svnrepo/PKGBUILD"; echo ${pkgname[@]}))
- arch_svn rm --force -q "${WORKDIR}/svn/$pkgbase/repos/$svnrepo"
- arch_svn commit -q "${WORKDIR}/svn/$pkgbase" -m "${0##*/}: $pkgbase removed by $(id -un)"
+for pkgbase in "${pkgbases[@]}"; do
+ msg "Removing %s from [%s]..." "$pkgbase" "$repo"
+
+ path="$(xbs releasepath "$pkgbase" "$repo" "$arch")"
+ if [ -d "$path" ]; then
+ remove_pkgs+=($(. "$path/PKGBUILD"; echo "${pkgname[@]}"))
+ xbs unrelease "$pkgbase" "$repo" "$arch"
else
- warning "$pkgbase not found in $svnrepo"
- warning "Removing only $pkgbase from the repo"
+ warning "%s not found in %s for %s" \
+ "$pkgbase" "$(xbs name)" "$repo-$arch"
+ warning "Removing only %s from the repo" "$pkgbase"
warning "If it was a split package you have to remove the others yourself!"
- remove_pkgs[${#remove_pkgs[*]}]=$pkgbase
+ remove_pkgs+=("$pkgbase")
fi
done
-for tarch in ${tarches[@]}; do
- arch_repo_remove "${repo}" "${tarch}" ${remove_pkgs[@]}
- repo_unlock $repo $tarch
+for tarch in "${tarches[@]}"; do
+ arch_repo_remove "${repo}" "${tarch}" "${remove_pkgs[@]}"
+ repo_unlock "$repo" "$tarch"
done
diff --git a/db-repo-add b/db-repo-add
index 5d5b653..4611bdf 100755
--- a/db-repo-add
+++ b/db-repo-add
@@ -1,41 +1,41 @@
#!/bin/bash
-. "$(dirname $0)/config"
-. "$(dirname $0)/db-functions"
+. "$(dirname "$(readlink -e "$0")")/config"
+. "$(dirname "$(readlink -e "$0")")/db-functions"
if [ $# -lt 3 ]; then
- msg "usage: ${0##*/} <repo> <arch> <pkgfile> ..."
+ msg "usage: %s <repo> <arch> <pkgfile> ..." "${0##*/}"
exit 1
fi
repo="$1"
arch="$2"
-pkgfiles=(${@:3})
+pkgfiles=("${@:3}")
ftppath="$FTP_BASE/$repo/os"
-if ! check_repo_permission $repo; then
- die "You don't have permission to add packages to ${repo}"
+if ! check_repo_permission "$repo"; then
+ die "You don't have permission to add packages to %s" "${repo}"
fi
if [ "$arch" == "any" ]; then
- tarches=(${ARCHES[@]})
+ tarches=("${ARCHES[@]}")
else
tarches=("$arch")
fi
-for tarch in ${tarches[@]}; do
- repo_lock $repo $tarch || exit 1
+for tarch in "${tarches[@]}"; do
+ repo_lock "$repo" "$tarch" || exit 1
done
-for tarch in ${tarches[@]}; do
- for pkgfile in ${pkgfiles[@]}; do
+for tarch in "${tarches[@]}"; do
+ for pkgfile in "${pkgfiles[@]}"; do
if [[ ! -f "${FTP_BASE}/${repo}/os/${arch}/${pkgfile##*/}" ]]; then
- die "Package file ${pkgfile##*/} not found in ${FTP_BASE}/${repo}/os/${arch}/"
+ die "Package file %s not found in %s" "${pkgfile##*/}" "${FTP_BASE}/${repo}/os/${arch}/"
else
- msg "Adding $pkgfile to [$repo]..."
+ msg "Adding %s to [%s]..." "$pkgfile" "$repo"
fi
done
- arch_repo_add "${repo}" "${tarch}" ${pkgfiles[@]}
- repo_unlock $repo $tarch
+ arch_repo_add "${repo}" "${tarch}" "${pkgfiles[@]}"
+ repo_unlock "$repo" "$tarch"
done
diff --git a/db-repo-remove b/db-repo-remove
index 2a693f4..aadc4ce 100755
--- a/db-repo-remove
+++ b/db-repo-remove
@@ -1,37 +1,37 @@
#!/bin/bash
-. "$(dirname $0)/config"
-. "$(dirname $0)/db-functions"
+. "$(dirname "$(readlink -e "$0")")/config"
+. "$(dirname "$(readlink -e "$0")")/db-functions"
if [ $# -lt 3 ]; then
- msg "usage: ${0##*/} <repo> <arch> <pkgname> ..."
+ msg "usage: %s <repo> <arch> <pkgname> ..." "${0##*/}"
exit 1
fi
repo="$1"
arch="$2"
-pkgnames=(${@:3})
+pkgnames=("${@:3}")
ftppath="$FTP_BASE/$repo/os"
-if ! check_repo_permission $repo; then
- die "You don't have permission to remove packages from ${repo}"
+if ! check_repo_permission "$repo"; then
+ die "You don't have permission to remove packages from %s" "${repo}"
fi
if [ "$arch" == "any" ]; then
- tarches=(${ARCHES[@]})
+ tarches=("${ARCHES[@]}")
else
tarches=("$arch")
fi
-for tarch in ${tarches[@]}; do
- repo_lock $repo $tarch || exit 1
+for tarch in "${tarches[@]}"; do
+ repo_lock "$repo" "$tarch" || exit 1
done
-for tarch in ${tarches[@]}; do
- for pkgname in ${pkgnames[@]}; do
- msg "Removing $pkgname from [$repo]..."
+for tarch in "${tarches[@]}"; do
+ for pkgname in "${pkgnames[@]}"; do
+ msg "Removing %s from [%s]..." "$pkgname" "$repo"
done
- arch_repo_remove "${repo}" "${tarch}" ${pkgnames[@]}
- repo_unlock $repo $tarch
+ arch_repo_remove "${repo}" "${tarch}" "${pkgnames[@]}"
+ repo_unlock "$repo" "$tarch"
done
diff --git a/db-update b/db-update
index 576fe2b..559ef3f 100755
--- a/db-update
+++ b/db-update
@@ -1,74 +1,79 @@
#!/bin/bash
-. "$(dirname $0)/config"
-. "$(dirname $0)/db-functions"
+. "$(dirname "$(readlink -e "$0")")/config"
+. "$(dirname "$(readlink -e "$0")")/db-functions"
if [ $# -ge 1 ]; then
- warning "Calling ${0##*/} with a specific repository is no longer supported"
+ warning "Calling %s with a specific repository is no longer supported" "${0##*/}"
exit 1
fi
# Find repos with packages to release
staging_repos=($(find "${STAGING}" -mindepth 1 -type f -name "*${PKGEXT}" -printf '%h\n' | sort -u))
if [ $? -ge 1 ]; then
- die "Could not read ${STAGING}"
+ die "Could not read %s" "${STAGING}"
fi
repos=()
-for staging_repo in ${staging_repos[@]##*/}; do
- if in_array ${staging_repo} ${PKGREPOS[@]}; then
- repos+=(${staging_repo})
+for staging_repo in "${staging_repos[@]##*/}"; do
+ if in_array "${staging_repo}" "${PKGREPOS[@]}"; then
+ repos+=("${staging_repo}")
fi
done
# TODO: this might lock too much (architectures)
-for repo in ${repos[@]}; do
- for pkgarch in ${ARCHES[@]}; do
- repo_lock ${repo} ${pkgarch} || exit 1
+for repo in "${repos[@]}"; do
+ for pkgarch in "${ARCHES[@]}"; do
+ repo_lock "${repo}" "${pkgarch}" || exit 1
done
done
# check if packages are valid
-for repo in ${repos[@]}; do
+for repo in "${repos[@]}"; do
if ! check_repo_permission "${repo}"; then
- die "You don't have permission to update packages in ${repo}"
+ die "You don't have permission to update packages in %s" "${repo}"
fi
pkgs=($(getpkgfiles "${STAGING}/${repo}/"*${PKGEXT}))
if [ $? -eq 0 ]; then
- for pkg in ${pkgs[@]}; do
+ for pkg in "${pkgs[@]}"; do
if [ -h "${pkg}" ]; then
- die "Package ${repo}/${pkg##*/} is a symbolic link"
+ die "Package %s is a symbolic link" "${repo}/${pkg##*/}"
fi
if ! check_pkgfile "${pkg}"; then
- die "Package ${repo}/${pkg##*/} is not consistent with its meta data"
+ die "Package %s is not consistent with its meta data" "${repo}/${pkg##*/}"
fi
- if ${REQUIRE_SIGNATURE} && ! pacman-key -v "${pkg}.sig" >/dev/null 2>&1; then
- die "Package ${repo}/${pkg##*/} does not have a valid signature"
+ if "${REQUIRE_SIGNATURE}" && ! pacman-key -v "${pkg}.sig" >/dev/null 2>&1; then
+ die "Package %s does not have a valid signature" "${repo}/${pkg##*/}"
fi
- if ! check_pkgsvn "${pkg}" "${repo}"; then
- die "Package ${repo}/${pkg##*/} is not consistent with svn repository"
+ if ! check_pkgxbs "${pkg}" "${repo}"; then
+ die "Package %s is not consistent with %s" "${repo}/${pkg##*/}" "$(xbs name)"
fi
if ! check_pkgrepos "${pkg}"; then
- die "Package ${repo}/${pkg##*/} already exists in another repository"
+ die "Package %s already exists in another repository" "${repo}/${pkg##*/}"
+ fi
+ if ! check_packager "${pkg}"; then
+ die "Package ${repo}/${pkg##*/} does not have a valid packager"
fi
done
- if ! check_splitpkgs ${repo} ${pkgs[@]}; then
- die "Missing split packages for ${repo}"
+ if ! check_splitpkgs "${repo}" "${pkgs[@]}"; then
+ die "Missing split packages for %s" "${repo}"
fi
else
- die "Could not read ${STAGING}"
+ die "Could not read %s" "${STAGING}"
fi
done
-for repo in ${repos[@]}; do
- msg "Updating [${repo}]..."
+dirs=()
+for repo in "${repos[@]}"; do
+ msg "Updating [%s]..." "${repo}"
any_pkgs=($(getpkgfiles "${STAGING}/${repo}/"*-any${PKGEXT} 2>/dev/null))
- for pkgarch in ${ARCHES[@]}; do
+ for pkgarch in "${ARCHES[@]}"; do
+ add_dirs=()
add_pkgs=()
- arch_pkgs=($(getpkgfiles "${STAGING}/${repo}/"*-${pkgarch}${PKGEXT} 2>/dev/null))
- for pkg in ${arch_pkgs[@]} ${any_pkgs[@]}; do
+ arch_pkgs=($(getpkgfiles "${STAGING}/${repo}/"*-"${pkgarch}"${PKGEXT} 2>/dev/null))
+ for pkg in "${arch_pkgs[@]}" "${any_pkgs[@]}"; do
pkgfile="${pkg##*/}"
- msg2 "${pkgfile} (${pkgarch})"
+ msg2 "%s (%s)" "${pkgfile}" "${pkgarch}"
# any packages might have been moved by the previous run
if [ -f "${pkg}" ]; then
mv "${pkg}" "$FTP_BASE/${PKGPOOL}"
@@ -81,16 +86,41 @@ for repo in ${repos[@]}; do
if [ -f "$FTP_BASE/${PKGPOOL}/${pkgfile}.sig" ]; then
ln -s "../../../${PKGPOOL}/${pkgfile}.sig" "$FTP_BASE/$repo/os/${pkgarch}"
fi
- add_pkgs[${#add_pkgs[*]}]=${pkgfile}
+ add_dirs+=("${STAGING}/abslibre/$(getpkgarch "$FTP_BASE/$PKGPOOL/$pkgfile")/$repo/$(getpkgbase "$FTP_BASE/$PKGPOOL/$pkgfile")")
+ add_pkgs+=("${pkgfile}")
+ done
+ for add_dir in "${add_dirs[@]}"; do
+ (cd "${add_dir}" && xbs release-server "${repo}" "${pkgarch}") ||
+ error 'cd %q && xbs release-server %q %q' "${add_dir}" "${repo}" "${pkgarch}"
done
if [ ${#add_pkgs[@]} -ge 1 ]; then
- arch_repo_add "${repo}" "${pkgarch}" ${add_pkgs[@]}
+ arch_repo_add "${repo}" "${pkgarch}" "${add_pkgs[@]}"
fi
+ dirs+=("${add_dirs[@]}")
done
done
-for repo in ${repos[@]}; do
- for pkgarch in ${ARCHES[@]}; do
- repo_unlock ${repo} ${pkgarch}
+for repo in "${repos[@]}"; do
+ for pkgarch in "${ARCHES[@]}"; do
+ repo_unlock "${repo}" "${pkgarch}"
done
done
+
+cd "${STAGING}"
+
+# Remove left over XBS files
+rm -rf -- "${dirs[@]}"
+dirname -z -- "${dirs[@]}" |
+ xargs -0 realpath -zm --relative-to="${STAGING}" -- |
+ xargs -0 rmdir -p -- 2>/dev/null
+
+# Stage generated source files
+while read -r file; do
+ pub="${FTP_BASE}/${file}"
+ if [[ -f "$pub" ]]; then
+ warning "file already exists: %s" "${file}"
+ else
+ mkdir -p -- "${pub%/*}"
+ mv -vn "$file" "$pub"
+ fi
+done < <(find other sources -type f 2>/dev/null)
diff --git a/make_individual_torrent b/make_individual_torrent
new file mode 100755
index 0000000..0a7e778
--- /dev/null
+++ b/make_individual_torrent
@@ -0,0 +1,52 @@
+#!/bin/bash
+# Copyright (C) 2014 Joseph Graham <joseph@t67.eu>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+# This script is called by `make_repo_torrents' to make a torrent. It
+# depends on `mktorrent'. It takes the following args:
+# $1 - path of package
+# $2 - public location
+
+# Comma seperated list of trackers, no spaces
+# t67.eu is run by Xylon, hackcoop by fauno & friends
+trackers='http://t67.eu:6969/announce,http://tracker.hackcoop.com.ar/announce'
+
+# This mirror is put as a webseed. Which mirror we use for a webseed
+# doesn't really matter since it's re-written on the client machine by
+# pacman2pacman so it won't normally be used anyway.
+seed_url='http://repo.parabolagnulinux.org/'
+
+if [[ -z "${1}" ]]
+then
+ echo "Error. First arg must be the path of the package."
+ echo 1
+fi
+
+if [[ -z "${2}" ]]
+then
+ echo "Error. Second arg must be the public location."
+ echo 1
+fi
+
+pkg="${1}"
+public_location="${2}"
+
+pkg_name="${pkg##*/}"
+
+# URL of the actual package for the webseed
+webseed="${seed_url}${pkg#${public_location}}"
+
+mktorrent -a "${trackers}" "${pkg}" -w "${webseed}" >/dev/null ||
+echo "Error making torrent for \"${pkg}\""
diff --git a/test/__init__.py b/test/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test/__init__.py
diff --git a/test/blacklist_sample b/test/blacklist_sample
new file mode 100644
index 0000000..2a02af6
--- /dev/null
+++ b/test/blacklist_sample
@@ -0,0 +1,2 @@
+alex:alex-libre: Aquí va un comentario
+gmime22 ::Non free dependencies \ No newline at end of file
diff --git a/test/core.db.tar.gz b/test/core.db.tar.gz
new file mode 100644
index 0000000..5eb2081
--- /dev/null
+++ b/test/core.db.tar.gz
Binary files differ
diff --git a/test/depends b/test/depends
new file mode 100644
index 0000000..7ff3ad4
--- /dev/null
+++ b/test/depends
@@ -0,0 +1,4 @@
+%DEPENDS%
+glibc>=2.13
+zlib
+
diff --git a/test/desc b/test/desc
new file mode 100644
index 0000000..abba644
--- /dev/null
+++ b/test/desc
@@ -0,0 +1,39 @@
+%FILENAME%
+binutils-2.21-4-x86_64.pkg.tar.xz
+
+%NAME%
+binutils
+
+%VERSION%
+2.21-4
+
+%DESC%
+A set of programs to assemble and manipulate binary and object files
+
+%GROUPS%
+base
+
+%CSIZE%
+3412892
+
+%ISIZE%
+17571840
+
+%MD5SUM%
+4e666f87c78998f4839f33dc06d2043a
+
+%URL%
+http://www.gnu.org/software/binutils/
+
+%LICENSE%
+GPL
+
+%ARCH%
+x86_64
+
+%BUILDDATE%
+1297240369
+
+%PACKAGER%
+Allan McRae <allan@archlinux.org>
+
diff --git a/test/lib/common.inc b/test/lib/common.inc
index 954868b..d0c7fec 100644
--- a/test/lib/common.inc
+++ b/test/lib/common.inc
@@ -5,6 +5,10 @@ set -E
. "$(dirname "${BASH_SOURCE[0]}")/../../config.testing"
. "$(dirname "${BASH_SOURCE[0]}")/../../db-functions"
+arch_svn() {
+ /usr/bin/svn "$@"
+}
+
signpkg() {
if [[ -r '/etc/makepkg.conf' ]]; then
source '/etc/makepkg.conf'
@@ -46,21 +50,19 @@ oneTimeSetUp() {
if ! "${build}"; then
if [ "${pkgarch[0]}" == 'any' ]; then
- sudo extra-x86_64-build || die 'extra-x86_64-build failed'
+ sudo libremakepkg || die 'libremakepkg failed'
else
for a in "${pkgarch[@]}"; do
if in_array "$a" "${ARCH_BUILD[@]}"; then
- sudo "extra-${a}-build" || die "extra-${a}-build failed"
+ sudo setarch "$a" libremakepkg -n "$a" || die "setarch ${a} libremakepkg -n ${a} failed"
+ for p in "${pkgname[@]}"; do
+ cp "${p}-${pkgversion}-${a}"${PKGEXT} "$(dirname "${BASH_SOURCE[0]})/../packages/${d##*/}")"
+ done
+ else
+ warning "skipping arch %s" "$a"
fi
done
fi
- for a in "${pkgarch[@]}"; do
- if in_array "$a" "${ARCH_BUILD[@]}"; then
- for p in "${pkgname[@]}"; do
- cp "${p}-${pkgversion}-${a}"${PKGEXT} "$(dirname "${BASH_SOURCE[0]}")/../packages/${d##*/}"
- done
- fi
- done
fi
popd >/dev/null
done
@@ -82,6 +84,7 @@ setUp() {
PKGREPOS=('core' 'extra' 'testing')
PKGPOOL='pool/packages'
+ SRCPOOL='pool/sources'
mkdir -p "${TMP}/"{ftp,tmp,staging,{package,source}-cleanup,svn-packages-{copy,repo}}
for r in "${PKGREPOS[@]}"; do
@@ -105,11 +108,21 @@ setUp() {
arch_svn commit -q -m"initial commit of ${pkg}" "${TMP}/svn-packages-copy"
done
+ mkdir -p "${TMP}/home/.config/libretools"
+ export XDG_CONFIG_HOME="${TMP}/home/.config"
+ printf '%s\n' \
+ 'SVNURL=foo' \
+ "SVNREPO=\"${TMP}/svn-packages-copy\"" \
+ "ARCHES=($(printf '%q ' "${BUILD_ARCHES[@]}"))" \
+ > "$XDG_CONFIG_HOME/libretools/xbs-abs.conf"
+ printf '%s\n' 'BUILDSYSTEM=abs' > "$XDG_CONFIG_HOME/xbs.conf"
+
cat <<eot > "$(dirname "${BASH_SOURCE[0]}")/../../config.local"
FTP_BASE="${TMP}/ftp"
SVNREPO="file://${TMP}/svn-packages-repo"
PKGREPOS=("${PKGREPOS[@]}")
PKGPOOL="${PKGPOOL}"
+ SRCPOOL="${SRCPOOL}"
TESTING_REPO='testing'
STABLE_REPOS=('core' 'extra')
CLEANUP_DESTDIR="${TMP}/package-cleanup"
@@ -139,7 +152,7 @@ releasePackage() {
local pkgname
pushd "${TMP}/svn-packages-copy/${pkgbase}/trunk/" >/dev/null
- archrelease "${repo}-${arch}" >/dev/null 2>&1
+ xbs release "${repo}" "${arch}" >/dev/null 2>&1
pkgver=$(. PKGBUILD; get_full_version "${epoch:-0}" "${pkgver}" "${pkgrel}")
pkgname=($(. PKGBUILD; echo "${pkgname[@]}"))
popd >/dev/null
@@ -167,12 +180,12 @@ checkAnyPackageDB() {
for arch in "${ARCH_BUILD[@]}"; do
[ -L "${FTP_BASE}/${repo}/os/${arch}/${pkg}" ] || fail "${repo}/os/${arch}/${pkg} is not a symlink"
- [ "$(readlink -e "${FTP_BASE}/${repo}/os/${arch}/${pkg}")" == "${FTP_BASE}/${PKGPOOL}/${pkg}" ] \
+ [ "$(readlink -e "${FTP_BASE}/${repo}/os/${arch}/${pkg}")" == "$(readlink -e "${FTP_BASE}/${PKGPOOL}/${pkg}")" ] \
|| fail "${repo}/os/${arch}/${pkg} does not link to ${PKGPOOL}/${pkg}"
if "${REQUIRE_SIGNATURE}"; then
[ -L "${FTP_BASE}/${repo}/os/${arch}/${pkg}.sig" ] || fail "${repo}/os/${arch}/${pkg}.sig is not a symlink"
- [ "$(readlink -e "${FTP_BASE}/${repo}/os/${arch}/${pkg}.sig")" == "${FTP_BASE}/${PKGPOOL}/${pkg}.sig" ] \
+ [ "$(readlink -e "${FTP_BASE}/${repo}/os/${arch}/${pkg}.sig")" == "$(readlink -e "${FTP_BASE}/${PKGPOOL}/${pkg}.sig")" ] \
|| fail "${repo}/os/${arch}/${pkg}.sig does not link to ${PKGPOOL}/${pkg}.sig"
fi
@@ -208,7 +221,7 @@ checkPackageDB() {
[ -L "${FTP_BASE}/${repo}/os/${arch}/${pkg}" ] || fail "${repo}/os/${arch}/${pkg} not a symlink"
[ -r "${STAGING}/${repo}/${pkg}" ] && fail "${repo}/${pkg} found in staging dir"
- [ "$(readlink -e "${FTP_BASE}/${repo}/os/${arch}/${pkg}")" == "${FTP_BASE}/${PKGPOOL}/${pkg}" ] \
+ [ "$(readlink -e "${FTP_BASE}/${repo}/os/${arch}/${pkg}")" == "$(readlink -e "${FTP_BASE}/${PKGPOOL}/${pkg}")" ] \
|| fail "${repo}/os/${arch}/${pkg} does not link to ${PKGPOOL}/${pkg}"
if "${REQUIRE_SIGNATURE}"; then
@@ -216,7 +229,7 @@ checkPackageDB() {
[ -L "${FTP_BASE}/${repo}/os/${arch}/${pkg}.sig" ] || fail "${repo}/os/${arch}/${pkg}.sig is not a symlink"
[ -r "${STAGING}/${repo}/${pkg}.sig" ] && fail "${repo}/${pkg}.sig found in staging dir"
- [ "$(readlink -e "${FTP_BASE}/${repo}/os/${arch}/${pkg}.sig")" == "${FTP_BASE}/${PKGPOOL}/${pkg}.sig" ] \
+ [ "$(readlink -e "${FTP_BASE}/${repo}/os/${arch}/${pkg}.sig")" == "$(readlink -e "${FTP_BASE}/${PKGPOOL}/${pkg}.sig")" ] \
|| fail "${repo}/os/${arch}/${pkg}.sig does not link to ${PKGPOOL}/${pkg}.sig"
fi
diff --git a/test/rsync_output_sample b/test/rsync_output_sample
new file mode 100644
index 0000000..72d9cd0
--- /dev/null
+++ b/test/rsync_output_sample
@@ -0,0 +1,14 @@
+dr-xr-sr-x 4096 2010/09/11 11:37:10 .
+-rw-r--r-- 11 2011/02/08 00:00:01 lastsync
+drwxrwxr-x 15 2010/09/11 11:28:50 community-staging
+drwxrwxr-x 30 2010/09/11 11:28:50 community-staging/os
+drwxrwxr-x 8192 2011/02/07 17:00:01 community-staging/os/i686
+lrwxrwxrwx 52 2010/12/23 16:51:01 community-staging/os/i686/alex-2.3.4-1-i686.pkg.tar.xz -> ../../../pool/community/alex-2.3.4-1-i686.pkg.tar.xz
+lrwxrwxrwx 27 2011/02/07 14:02:54 community-staging/os/i686/community-staging.db -> community-staging.db.tar.gz
+-rw-rw-r-- 2237 2011/02/07 14:02:54 community-staging/os/i686/community-staging.db.tar.gz
+-rw-rw-r-- 3209 2011/02/07 14:00:13 community-staging/os/i686/community-staging.db.tar.gz.old
+drwxrwxr-x 15 2009/07/22 15:07:56 community
+drwxrwxr-x 40 2009/08/04 15:57:42 community/os
+drwxrwsr-x 36864 2011/02/03 05:00:01 community/os/any
+-rw-rw-r-- 303336 2010/07/16 10:06:28 community/os/any/any2dvd-0.34-4-any.pkg.tar.xz
+-rw-rw-r-- 221664 2010/03/28 15:55:48 community/os/x86_64/gmime22-2.2.26-1-x86_64.pkg.tar.xz
diff --git a/test/test.d/create-filelists.sh b/test/test.d/create-filelists.sh
index 21bf292..837c432 100755
--- a/test/test.d/create-filelists.sh
+++ b/test/test.d/create-filelists.sh
@@ -17,7 +17,7 @@ testCreateSimpleFileLists() {
for pkgbase in "${pkgs[@]}"; do
for arch in "${ARCH_BUILD[@]}"; do
- if ! bsdtar -xOf "${FTP_BASE}/extra/os/${arch}/extra${FILESEXT}" | grep -q "usr/bin/${pkgbase}"; then
+ if ! bsdtar -xOf "${FTP_BASE}/extra/os/${arch}/extra${FILESEXT}" | grep "usr/bin/${pkgbase}" &>/dev/null; then
fail "usr/bin/${pkgbase} not found in ${arch}/extra${FILESEXT}"
fi
done
@@ -36,7 +36,7 @@ testCreateAnyFileLists() {
for pkgbase in "${pkgs[@]}"; do
for arch in "${ARCH_BUILD[@]}"; do
- if ! bsdtar -xOf "${FTP_BASE}/extra/os/${arch}/extra${FILESEXT}" | grep -q "usr/share/${pkgbase}/test"; then
+ if ! bsdtar -xOf "${FTP_BASE}/extra/os/${arch}/extra${FILESEXT}" | grep "usr/share/${pkgbase}/test" &>/dev/null; then
fail "usr/share/${pkgbase}/test not found in ${arch}/extra${FILESEXT}"
fi
done
@@ -59,10 +59,10 @@ testCreateSplitFileLists() {
../db-update
for pkgbase in "${pkgs[@]}"; do
- pkgnames=($(source "${TMP}/svn-packages-copy/${pkgbase}/trunk/PKGBUILD"; echo ${pkgname[@]}))
+ pkgnames=($(source "${TMP}/svn-packages-copy/${pkgbase}/trunk/PKGBUILD"; echo "${pkgname[@]}"))
for pkgname in "${pkgnames[@]}"; do
for arch in "${ARCH_BUILD[@]}"; do
- if ! bsdtar -xOf "${FTP_BASE}/extra/os/${arch}/extra${FILESEXT}" | grep -q "usr/bin/${pkgname}"; then
+ if ! bsdtar -xOf "${FTP_BASE}/extra/os/${arch}/extra${FILESEXT}" | grep "usr/bin/${pkgname}" &>/dev/null; then
fail "usr/bin/${pkgname} not found in ${arch}/extra${FILESEXT}"
fi
done
@@ -88,10 +88,10 @@ testCleanupFileLists() {
done
for arch in "${ARCH_BUILD[@]}"; do
- if ! bsdtar -xOf "${FTP_BASE}/extra/os/${arch}/extra${FILESEXT}" | grep -q "usr/bin/pkg-simple-b"; then
+ if ! bsdtar -xOf "${FTP_BASE}/extra/os/${arch}/extra${FILESEXT}" | grep "usr/bin/pkg-simple-b" &>/dev/null; then
fail "usr/bin/pkg-simple-b not found in ${arch}/extra${FILESEXT}"
fi
- if bsdtar -xOf "${FTP_BASE}/extra/os/${arch}/extra${FILESEXT}" | grep -q "usr/bin/pkg-simple-a"; then
+ if bsdtar -xOf "${FTP_BASE}/extra/os/${arch}/extra${FILESEXT}" | grep "usr/bin/pkg-simple-a" &>/dev/null; then
fail "usr/bin/pkg-simple-a still found in ${arch}/extra${FILESEXT}"
fi
done
diff --git a/test/test.d/db-update.sh b/test/test.d/db-update.sh
index ca4efe7..1581bee 100755
--- a/test/test.d/db-update.sh
+++ b/test/test.d/db-update.sh
@@ -80,7 +80,7 @@ testUpdateAnyPackage() {
pushd "${TMP}/svn-packages-copy/pkg-any-a/trunk/" >/dev/null
sed 's/pkgrel=1/pkgrel=2/g' -i PKGBUILD
arch_svn commit -q -m"update pkg to pkgrel=2" >/dev/null
- sudo extra-i686-build
+ sudo libremakepkg
mv pkg-any-a-1-2-any.pkg.tar.xz "${pkgdir}/pkg-any-a/"
popd >/dev/null
@@ -98,7 +98,7 @@ testUpdateAnyPackageToDifferentRepositoriesAtOnce() {
pushd "${TMP}/svn-packages-copy/pkg-any-a/trunk/" >/dev/null
sed 's/pkgrel=1/pkgrel=2/g' -i PKGBUILD
arch_svn commit -q -m"update pkg to pkgrel=2" >/dev/null
- sudo extra-i686-build
+ sudo libremakepkg
mv pkg-any-a-1-2-any.pkg.tar.xz "${pkgdir}/pkg-any-a/"
popd >/dev/null
@@ -130,9 +130,9 @@ testUpdateSameAnyPackageToDifferentRepositories() {
../db-update >/dev/null 2>&1 && (fail 'Adding an existing package to another repository should fail'; return 1)
local arch
- for arch in i686 x86_64; do
+ for arch in "${ARCH_BUILD[@]}"; do
( [ -r "${FTP_BASE}/testing/os/${arch}/testing${DBEXT%.tar.*}" ] \
- && bsdtar -xf "${FTP_BASE}/testing/os/${arch}/testing${DBEXT%.tar.*}" -O | grep -q "${pkgbase}") \
+ && bsdtar -xf "${FTP_BASE}/testing/os/${arch}/testing${DBEXT%.tar.*}" -O | grep ${pkgbase} &>/dev/null) \
&& fail "${pkgbase} should not be in testing/os/${arch}/testing${DBEXT%.tar.*}"
done
}
@@ -154,7 +154,7 @@ testAddIncompleteSplitPackage() {
for arch in "${ARCH_BUILD[@]}"; do
( [ -r "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT%.tar.*}" ] \
- && bsdtar -xf "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT%.tar.*}" -O | grep -q "${pkgbase}") \
+ && bsdtar -xf "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT%.tar.*}" -O | grep ${pkgbase} &>/dev/null) \
&& fail "${pkgbase} should not be in ${repo}/os/${arch}/${repo}${DBEXT%.tar.*}"
done
}
diff --git a/test/test.d/testing2x.sh b/test/test.d/testing2x.sh
index c611ce4..8232490 100755
--- a/test/test.d/testing2x.sh
+++ b/test/test.d/testing2x.sh
@@ -10,7 +10,7 @@ testTesting2xAnyPackage() {
pushd "${TMP}/svn-packages-copy/pkg-any-a/trunk/" >/dev/null
sed 's/pkgrel=1/pkgrel=2/g' -i PKGBUILD
arch_svn commit -q -m"update pkg to pkgrel=2" >/dev/null
- sudo extra-i686-build
+ sudo libremakepkg
mv pkg-any-a-1-2-any.pkg.tar.xz "${pkgdir}/pkg-any-a/"
popd >/dev/null
diff --git a/test/test_filter.py b/test/test_filter.py
new file mode 100644
index 0000000..d8006f9
--- /dev/null
+++ b/test/test_filter.py
@@ -0,0 +1,196 @@
+# -*- encoding: utf-8 -*-
+""" """
+
+__author__ = "Joshua Ismael Haase Hernández <hahj87@gmail.com>"
+__version__ = "$Revision: 1.1 $"
+__date__ = "$Date: 2011/02/08 $"
+__copyright__ = "Copyright (c) 2011 Joshua Ismael Haase Hernández"
+__license__ = "GPL3+"
+
+from repm.config import *
+from repm.filter import *
+import unittest
+
+class pkginfo_from_file_KnownValues(unittest.TestCase):
+ # (filename, name, version, release, arch)
+ # filename is location
+ known=(
+ ("community-testing/os/i686/inputattach-1.24-3-i686.pkg.tar.xz","inputattach","1.24","3","i686"),
+ ("community-testing/os/i686/ngspice-22-1-i686.pkg.tar.xz","ngspice","22","1","i686"),
+ ("community-testing/os/i686/tmux-1.4-2-i686.pkg.tar.xz","tmux","1.4","2","i686"),
+ ("community-testing/os/i686/tor-0.2.1.29-2-i686.pkg.tar.xz","tor","0.2.1.29","2","i686"),
+ ("../../../pool/community/tor-0.2.1.29-2-i686.pkg.tar.xz","tor","0.2.1.29","2","i686"),
+ ("community-testing/os/x86_64/inputattach-1.24-3-x86_64.pkg.tar.xz","inputattach","1.24","3","x86_64"),
+ ("../../../pool/community/inputattach-1.24-3-x86_64.pkg.tar.xz","inputattach","1.24","3","x86_64"),
+ ("tor-0.2.1.29-2-x86_64.pkg.tar.xz","tor","0.2.1.29","2","x86_64"),
+ )
+
+ def generate_results(self, example_tuple, attr):
+ location, name, version, release, arch = example_tuple
+ return pkginfo_from_filename(location)[attr], locals()[attr]
+
+ def testReturnPackageObject(self):
+ for i in self.known:
+ location, name, version, release, arch = i
+ self.assertIsInstance(pkginfo_from_filename(location),Package)
+
+ def testNames(self):
+ for i in self.known:
+ k,v = self.generate_results(example_tuple=i,attr="name")
+ self.assertEqual(k, v)
+
+ def testVersions(self):
+ for i in self.known:
+ k,v = self.generate_results(example_tuple=i,attr="version")
+ self.assertEqual(k, v)
+
+ def testArchs(self):
+ for i in self.known:
+ k,v = self.generate_results(example_tuple=i,attr="arch")
+ self.assertEqual(k, v)
+
+ def testReleases(self):
+ for i in self.known:
+ k,v = self.generate_results(example_tuple=i,attr="release")
+ self.assertEqual(k, v)
+
+ def testLocations(self):
+ for i in self.known:
+ k,v = self.generate_results(example_tuple=i,attr="location")
+ self.assertEqual(k, v)
+
+class pkginfo_from_file_BadInput(unittest.TestCase):
+ bad=("community-testing/os/i686/community-testing.db",
+ "community-testing/os/i686/community-testing.db.tar.gz",
+ "community-testing/os/i686/community-testing.db.tar.gz.old",
+ "community-testing/os/i686/community-testing.files",
+ "community-testing/os/i686/community-testing.files.tar.gz",
+ "community-testing/os/x86_64")
+
+ def testBadInput(self):
+ for i in self.bad:
+ self.assertRaises(NonValidFile,pkginfo_from_filename,i)
+
+class pkginfoFromRsyncOutput(unittest.TestCase):
+ example_package_list=(Package(),Package(),Package())
+ example_package_list[0].package_info={ "name" : "alex",
+ "version" : "2.3.4",
+ "release" : "1",
+ "arch" : "i686",
+ "license" : False,
+ "location": "community-staging/os/i686/alex-2.3.4-1-i686.pkg.tar.xz",
+ "depends" : False,}
+ example_package_list[1].package_info={ "name" : "any2dvd",
+ "version" : "0.34",
+ "release" : "4",
+ "arch" : "any",
+ "license" : False,
+ "location": "community/os/any/any2dvd-0.34-4-any.pkg.tar.xz",
+ "depends" : False,}
+ example_package_list[2].package_info={ "name" : "gmime22",
+ "version" : "2.2.26",
+ "release" : "1",
+ "arch" : "x86_64",
+ "license" : False,
+ "location": "community/os/x86_64/gmime22-2.2.26-1-x86_64.pkg.tar.xz",
+ "depends" : False,}
+
+ try:
+ output_file = open("rsync_output_sample")
+ rsync_out= output_file.read()
+ output_file.close()
+ except IOError: print("There is no rsync_output_sample file")
+
+ pkglist = pkginfo_from_rsync_output(rsync_out)
+
+ def testOutputArePackages(self):
+ if not self.pkglist:
+ self.fail("not pkglist:" + str(self.pkglist))
+ for pkg in self.pkglist:
+ self.assertIsInstance(pkg,Package)
+
+ def testPackageInfo(self):
+ if not self.pkglist:
+ self.fail("Pkglist doesn't exist: " + str(self.pkglist))
+ self.assertEqual(self.pkglist,self.example_package_list)
+
+class generateRsyncBlacklist(unittest.TestCase):
+ example_package_list=(Package(),Package(),Package())
+ example_package_list[0].package_info={ "name" : "alex",
+ "version" : "2.3.4",
+ "release" : "1",
+ "arch" : "i686",
+ "license" : False,
+ "location": "community-staging/os/i686/alex-2.3.4-1-i686.pkg.tar.xz",
+ "depends" : False,}
+ example_package_list[1].package_info={ "name" : "any2dvd",
+ "version" : "0.34",
+ "release" : "4",
+ "arch" : "any",
+ "license" : False,
+ "location": "community/os/any/any2dvd-0.34-4-any.pkg.tar.xz",
+ "depends" : False,}
+ example_package_list[2].package_info={ "name" : "gmime22",
+ "version" : "2.2.26",
+ "release" : "1",
+ "arch" : "x86_64",
+ "license" : False,
+ "location": "community/os/x86_64/gmime22-2.2.26-1-x86_64.pkg.tar.xz",
+ "depends" : False,}
+
+ def testListado(self):
+ self.assertEqual(listado("blacklist_sample"),["alex","gmime22"])
+
+ def testExcludeFiles(self):
+ a=rsyncBlacklist_from_blacklist(self.example_package_list,
+ listado("blacklist_sample"),
+ False)
+ b=[self.example_package_list[0]["location"],self.example_package_list[2]["location"]]
+ self.assertEqual(a,b)
+
+class pkginfo_from_descKnownValues(unittest.TestCase):
+ pkgsample=Package()
+ pkgsample.package_info={"name" : "binutils",
+ "version" : "2.21",
+ "release" : "4",
+ "arch" : "x86_64",
+ "license" : "GPL",
+ "location": "binutils-2.21-4-x86_64.pkg.tar.xz",
+ "depends" : False,}
+ fsock=open("desc")
+ pkggen=pkginfo_from_desc(fsock.read())
+ fsock.close()
+ def testPkginfoFromDesc(self):
+ if self.pkggen is None:
+ self.fail("return value is None")
+ self.assertEqual(self.pkgsample,self.pkggen)
+
+class pkginfo_from_db(unittest.TestCase):
+ archdb = os.path.join("./workdir")
+ example_package_list=(Package(),Package(),Package())
+ example_package_list[0].package_info={ "name" : "acl",
+ "version" : "2.2.49",
+ "release" : "2",
+ "arch" : "x86_64",
+ "license" : ("LGPL",),
+ "location": "acl-2.2.49-2-x86_64.pkg.tar.xz",
+ "depends" : ("attr>=2.4.41"),}
+ example_package_list[1].package_info={ "name" : "glibc",
+ "version" : "2.13",
+ "release" : "4",
+ "arch" : "x86_64",
+ "license" : ("GPL","LGPL"),
+ "location": "glibc-2.13-4-x86_64.pkg.tar.xz",
+ "depends" : ("linux-api-headers>=2.6.37","tzdata",),}
+ example_package_list[2].package_info={ "name" : "",
+ "version" : "2.2.26",
+ "release" : "1",
+ "arch" : "x86_64",
+ "license" : False,
+ "location": "",
+ "depends" : False,}
+
+
+if __name__ == "__main__":
+ unittest.main()
+
diff --git a/testing2x b/testing2x
deleted file mode 100755
index 369857f..0000000
--- a/testing2x
+++ /dev/null
@@ -1,61 +0,0 @@
-#!/bin/bash
-
-. "$(dirname $0)/config"
-. "$(dirname $0)/db-functions"
-
-if [ $# -lt 1 ]; then
- msg "usage: ${0##*/} <pkgname|pkgbase> ..."
- exit 1
-fi
-
-# Lock everything to reduce possibility of interfering task between the different repo-updates
-script_lock
-for repo in ${TESTING_REPO} ${STABLE_REPOS[@]}; do
- for pkgarch in ${ARCHES[@]}; do
- repo_lock ${repo} ${pkgarch} || exit 1
- done
-done
-
-declare -A pkgs
-
-for pkgbase in $*; do
- if [ ! -d "${WORKDIR}/${pkgbase}" ]; then
- arch_svn export -q "${SVNREPO}/${pkgbase}/repos" "${WORKDIR}/${pkgbase}" >/dev/null
-
- found_source=false
- for pkgarch in ${ARCHES[@]} 'any'; do
- svnrepo_from="${WORKDIR}/${pkgbase}/${TESTING_REPO}-${pkgarch}"
- if [ -r "${svnrepo_from}/PKGBUILD" ]; then
- found_source=true
- break
- fi
- done
- ${found_source} || die "${pkgbase} not found in [${TESTING_REPO}]"
- found_target=false
- for pkgarch in ${ARCHES[@]} 'any'; do
- for repo in ${STABLE_REPOS[@]}; do
- svnrepo_to="${WORKDIR}/${pkgbase}/${repo}-${pkgarch}"
- if [ -r "${svnrepo_to}/PKGBUILD" ]; then
- found_target=true
- pkgs[${repo}]+="${pkgbase} "
- break 2
- fi
- done
- done
- ${found_target} || die "${pkgbase} not found in any of these repos: ${STABLE_REPOS[@]}"
- fi
-done
-
-for pkgarch in ${ARCHES[@]}; do
- repo_unlock ${TESTING_REPO} ${pkgarch}
-done
-for repo in ${STABLE_REPOS[@]}; do
- for pkgarch in ${ARCHES[@]}; do
- repo_unlock ${repo} ${pkgarch}
- done
- if [ -n "${pkgs[${repo}]}" ]; then
- "$(dirname $0)/db-move" ${TESTING_REPO} "${repo}" ${pkgs[${repo}]}
- fi
-done
-
-script_unlock