summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore6
-rw-r--r--COPYING674
-rw-r--r--TODO10
-rw-r--r--config34
-rw-r--r--config.local.import-community5
-rw-r--r--config.local.import-packages5
-rw-r--r--config.local.parabola14
-rwxr-xr-xcron-jobs/check_archlinux/check_packages.py2
-rwxr-xr-xcron-jobs/make_repo_torrents70
-rw-r--r--cron-jobs/makepkg.conf147
-rwxr-xr-xcron-jobs/sourceballs6
-rwxr-xr-xcron-jobs/update-web-db78
l---------cron-jobs/update-web-files-db1
-rwxr-xr-xdb-check-nonfree-in-db28
-rwxr-xr-xdb-check-package-libraries193
-rwxr-xr-xdb-check-unsigned-packages38
-rwxr-xr-xdb-check-unsigned-packages.py96
-rw-r--r--db-functions40
-rwxr-xr-xdb-import288
-rwxr-xr-xdb-import-pick-mirror25
-rw-r--r--db-import.conf23
-rwxr-xr-xdb-init6
-rwxr-xr-xdb-move52
-rwxr-xr-xdb-remove15
-rwxr-xr-xdb-update31
-rwxr-xr-xmake_individual_torrent52
-rw-r--r--test/lib/common.inc37
-rwxr-xr-xtest/test.d/create-filelists.sh2
-rwxr-xr-xtest/test.d/db-update.sh6
-rwxr-xr-xtest/test.d/testing2x.sh2
-rwxr-xr-xtesting2x61
-rw-r--r--tests-xihh/__init__.py0
-rw-r--r--tests-xihh/blacklist_sample2
-rw-r--r--tests-xihh/core.db.tar.gzbin0 -> 1345 bytes
-rw-r--r--tests-xihh/depends4
-rw-r--r--tests-xihh/desc39
-rw-r--r--tests-xihh/rsync_output_sample14
-rw-r--r--tests-xihh/test_filter.py196
38 files changed, 1906 insertions, 396 deletions
diff --git a/.gitignore b/.gitignore
index f47d96a..98a5228 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,9 @@
*~
+*.pyc
/config.local
test/packages/*/*.pkg.tar.?z
+\#*#
+.#*
+yftime
+src*
+pkg*
diff --git a/COPYING b/COPYING
new file mode 100644
index 0000000..94a9ed0
--- /dev/null
+++ b/COPYING
@@ -0,0 +1,674 @@
+ GNU GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users. We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors. You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights. Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received. You must make sure that they, too, receive
+or can get the source code. And you must show them these terms so they
+know their rights.
+
+ Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+ For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software. For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+ Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so. This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software. The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable. Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products. If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+ Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary. To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Use with the GNU Affero General Public License.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+ <one line to give the program's name and a brief idea of what it does.>
+ Copyright (C) <year> <name of author>
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+ <program> Copyright (C) <year> <name of author>
+ This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, your program's commands
+might be different; for a GUI interface, you would use an "about box".
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU GPL, see
+<http://www.gnu.org/licenses/>.
+
+ The GNU General Public License does not permit incorporating your program
+into proprietary programs. If your program is a subroutine library, you
+may consider it more useful to permit linking proprietary applications with
+the library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License. But first, please read
+<http://www.gnu.org/philosophy/why-not-lgpl.html>.
diff --git a/TODO b/TODO
new file mode 100644
index 0000000..9dd4b52
--- /dev/null
+++ b/TODO
@@ -0,0 +1,10 @@
+* Test Suite for clean_repo.py
+
+ - Review all repo
+ - Remove all blacklisted packages
+ - Get pending list right
+ - Extract licenses all right
+
+* Fix db-move
+
+ - Make it use abslibre
diff --git a/config b/config
index 0fc62fd..79b795c 100644
--- a/config
+++ b/config
@@ -1,20 +1,22 @@
#!/hint/bash
-FTP_BASE="/srv/ftp"
-SVNREPO=''
-SVNUSER=''
+case "$USER" in
+ db-import-packages) _name=import-packages;;
+ db-import-community) _name=import-community;;
+ *) _name=parabola;;
+esac
+
+FTP_BASE="/srv/repo/main"
PKGREPOS=()
PKGPOOL=''
SRCPOOL=''
-TESTING_REPO=''
-STABLE_REPOS=()
-CLEANUP_DESTDIR="/var/tmp"
+CLEANUP_DESTDIR="/srv/repo/private/${_name}/package-cleanup"
CLEANUP_DRYRUN=false
# Time in days to keep moved packages
CLEANUP_KEEP=30
-SOURCE_CLEANUP_DESTDIR="/var/tmp"
+SOURCE_CLEANUP_DESTDIR="/srv/repo/private/${_name}/source-cleanup"
SOURCE_CLEANUP_DRYRUN=false
# Time in days to keep moved sourcepackages
SOURCE_CLEANUP_KEEP=14
@@ -24,8 +26,8 @@ REQUIRE_SIGNATURE=true
LOCK_DELAY=10
LOCK_TIMEOUT=300
-STAGING="$HOME/staging"
-export TMPDIR="/var/tmp"
+[ -n "${STAGING:-}" ] || STAGING="$HOME/staging/unknown/staging"
+export TMPDIR="${TMPDIR:-/tmp}"
ARCHES=(i686 x86_64)
DBEXT=".db.tar.gz"
FILESEXT=".files.tar.gz"
@@ -33,12 +35,18 @@ PKGEXT=".pkg.tar.?z"
SRCEXT=".src.tar.gz"
# Allowed licenses: get sourceballs only for licenses in this array
-ALLOWED_LICENSES=('GPL' 'GPL1' 'GPL2' 'LGPL' 'LGPL1' 'LGPL2' 'LGPL2.1')
+# Empty (commented out) to get sourceballs for all packages
+#ALLOWED_LICENSES=('GPL' 'GPL1' 'GPL2' 'LGPL' 'LGPL1' 'LGPL2' 'LGPL2.1')
+
+# Where to send error emails, and who they are from
+LIST="dev@lists.parabola.nu"
+FROM="dbscripts+${_name}@$(hostname -f)"
# Where to send error emails, and who they are from
-LIST="arch-dev-public@archlinux.org"
-#LIST="aaronmgriffin@gmail.com"
-FROM="repomaint@archlinux.org"
+LIST="maintenance@lists.parabola.nu"
+FROM="repo@parabola.nu"
# Override default config with config.local
[ -f "$(dirname "${BASH_SOURCE[0]}")/config.local" ] && . "$(dirname "${BASH_SOURCE[0]}")/config.local"
+[ -f "$(dirname "${BASH_SOURCE[0]}")/config.local.${_name}" ] && . "$(dirname "${BASH_SOURCE[0]}")/config.local.${_name}"
+unset _name
diff --git a/config.local.import-community b/config.local.import-community
new file mode 100644
index 0000000..393fd57
--- /dev/null
+++ b/config.local.import-community
@@ -0,0 +1,5 @@
+#!/hint/bash
+
+PKGREPOS=({community,multilib}{,-testing,-staging})
+PKGPOOL='pool/community'
+SRCPOOL='sources/community'
diff --git a/config.local.import-packages b/config.local.import-packages
new file mode 100644
index 0000000..c699b28
--- /dev/null
+++ b/config.local.import-packages
@@ -0,0 +1,5 @@
+#!/hint/bash
+
+PKGREPOS=('core' 'extra' 'testing' 'staging' {kde,gnome}-unstable)
+PKGPOOL='pool/packages'
+SRCPOOL='sources/packages'
diff --git a/config.local.parabola b/config.local.parabola
new file mode 100644
index 0000000..648dfbb
--- /dev/null
+++ b/config.local.parabola
@@ -0,0 +1,14 @@
+#!/hint/bash
+
+PKGREPOS=(
+ # Main repos
+ libre{,-testing}
+ libre-multilib{,-testing}
+ # Community project repos
+ {nonsystemd,nonprism}{,-testing}
+ pcr kernels cross java
+ # User repos
+ '~smv' '~xihh' '~brendan' '~lukeshu' '~emulatorman' '~aurelien' '~jorginho' '~coadde' '~drtan'
+)
+PKGPOOL='pool/parabola'
+SRCPOOL='sources/parabola'
diff --git a/cron-jobs/check_archlinux/check_packages.py b/cron-jobs/check_archlinux/check_packages.py
index d233bf6..ac0194f 100755
--- a/cron-jobs/check_archlinux/check_packages.py
+++ b/cron-jobs/check_archlinux/check_packages.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/env python2
#
# check_archlinux.py
#
diff --git a/cron-jobs/make_repo_torrents b/cron-jobs/make_repo_torrents
new file mode 100755
index 0000000..2eb0978
--- /dev/null
+++ b/cron-jobs/make_repo_torrents
@@ -0,0 +1,70 @@
+#!/bin/bash
+# Copyright (C) 2014 Joseph Graham <joseph@t67.eu>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+# This script finds any updated packages and calls
+# `make_indivudual_torrent' for each of them.
+
+username=$( id -un )
+
+case "${username}" in
+ repo | root )
+ true
+ ;;
+ * )
+ echo "This script must be run as repo user or root user."
+ echo "ByeBye!"
+ exit 1
+ ;;
+esac
+
+# pacman doesn't support multiple different packages of the same name,
+# so it's OK to just stuff all the torrents into a single directory.
+script_directory="$(dirname "$(readlink -e "$0")")/.."
+. "$(dirname "$(readlink -e "$0")")/../config"
+public_location="$FTP_BASE/"
+torrent_location="$FTP_BASE/torrents/"
+
+cd "${torrent_location}"
+
+# Find any directories that might have packages in then
+find "${public_location}" -name 'os' -type 'd' |
+while read dir
+do
+ # Find any packages
+ find "${dir}" -name '*\.pkg\.tar\.xz' |
+ while read pkg
+ do
+ pkg_name="${pkg##*/}"
+
+ if [[ -h "${pkg}" ]] # check if it's a symbolic link
+ then
+ # We get the target of the symlink
+ pkg=$( readlink -f "${pkg}" )
+ fi
+
+ # If a .torrent file does not already exist for this package, we call
+ # `make_individual_torrent' to make it.
+ if ! [[ -f "${torrent_location}${pkg_name}.torrent" ]]
+ then
+ "$script_directory/make_individual_torrent" "${pkg}" "${public_location}"
+ fi
+ done
+done
+
+if [[ "${username}" == root ]]
+then
+ chown repo *
+fi
diff --git a/cron-jobs/makepkg.conf b/cron-jobs/makepkg.conf
deleted file mode 100644
index d8512fb..0000000
--- a/cron-jobs/makepkg.conf
+++ /dev/null
@@ -1,147 +0,0 @@
-#
-# /etc/makepkg.conf
-#
-
-#########################################################################
-# SOURCE ACQUISITION
-#########################################################################
-#
-#-- The download utilities that makepkg should use to acquire sources
-# Format: 'protocol::agent'
-DLAGENTS=('ftp::/usr/bin/curl -sS -fC - --ftp-pasv --retry 3 --retry-delay 3 -o %o %u'
- 'http::/usr/bin/curl -sS -fLC - --retry 3 --retry-delay 3 -o %o %u'
- 'https::/usr/bin/curl -sS -fLC - --retry 3 --retry-delay 3 -o %o %u'
- 'rsync::/usr/bin/rsync -q --no-motd -z %u %o'
- 'scp::/usr/bin/scp -q -C %u %o')
-
-# Other common tools:
-# /usr/bin/snarf
-# /usr/bin/lftpget -c
-# /usr/bin/wget
-
-#-- The package required by makepkg to download VCS sources
-# Format: 'protocol::package'
-VCSCLIENTS=('bzr::bzr'
- 'git::git'
- 'hg::mercurial'
- 'svn::subversion')
-
-#########################################################################
-# ARCHITECTURE, COMPILE FLAGS
-#########################################################################
-#
-CARCH="x86_64"
-CHOST="x86_64-unknown-linux-gnu"
-
-#-- Compiler and Linker Flags
-# -march (or -mcpu) builds exclusively for an architecture
-# -mtune optimizes for an architecture, but builds for whole processor family
-CPPFLAGS="-D_FORTIFY_SOURCE=2"
-CFLAGS="-march=x86-64 -mtune=generic -O2 -pipe -fstack-protector-strong"
-CXXFLAGS="-march=x86-64 -mtune=generic -O2 -pipe -fstack-protector-strong"
-LDFLAGS="-Wl,-O1,--sort-common,--as-needed,-z,relro"
-#-- Make Flags: change this for DistCC/SMP systems
-#MAKEFLAGS="-j2"
-#-- Debugging flags
-DEBUG_CFLAGS="-g -fvar-tracking-assignments"
-DEBUG_CXXFLAGS="-g -fvar-tracking-assignments"
-
-#########################################################################
-# BUILD ENVIRONMENT
-#########################################################################
-#
-# Defaults: BUILDENV=(!distcc color !ccache check !sign)
-# A negated environment option will do the opposite of the comments below.
-#
-#-- distcc: Use the Distributed C/C++/ObjC compiler
-#-- color: Colorize output messages
-#-- ccache: Use ccache to cache compilation
-#-- check: Run the check() function if present in the PKGBUILD
-#-- sign: Generate PGP signature file
-#
-BUILDENV=(!distcc color !ccache check !sign)
-#
-#-- If using DistCC, your MAKEFLAGS will also need modification. In addition,
-#-- specify a space-delimited list of hosts running in the DistCC cluster.
-#DISTCC_HOSTS=""
-#
-#-- Specify a directory for package building.
-#BUILDDIR=/tmp/makepkg
-
-#########################################################################
-# GLOBAL PACKAGE OPTIONS
-# These are default values for the options=() settings
-#########################################################################
-#
-# Default: OPTIONS=(strip docs !libtool !staticlibs emptydirs zipman purge !optipng !upx !debug)
-# A negated option will do the opposite of the comments below.
-#
-#-- strip: Strip symbols from binaries/libraries
-#-- docs: Save doc directories specified by DOC_DIRS
-#-- libtool: Leave libtool (.la) files in packages
-#-- staticlibs: Leave static library (.a) files in packages
-#-- emptydirs: Leave empty directories in packages
-#-- zipman: Compress manual (man and info) pages in MAN_DIRS with gzip
-#-- purge: Remove files specified by PURGE_TARGETS
-#-- upx: Compress binary executable files using UPX
-#-- optipng: Optimize PNG images with optipng
-#-- debug: Add debugging flags as specified in DEBUG_* variables
-#
-OPTIONS=(strip docs !libtool !staticlibs emptydirs zipman purge !optipng !upx !debug)
-
-#-- File integrity checks to use. Valid: md5, sha1, sha256, sha384, sha512
-INTEGRITY_CHECK=(md5)
-#-- Options to be used when stripping binaries. See `man strip' for details.
-STRIP_BINARIES="--strip-all"
-#-- Options to be used when stripping shared libraries. See `man strip' for details.
-STRIP_SHARED="--strip-unneeded"
-#-- Options to be used when stripping static libraries. See `man strip' for details.
-STRIP_STATIC="--strip-debug"
-#-- Manual (man and info) directories to compress (if zipman is specified)
-MAN_DIRS=({usr{,/local}{,/share},opt/*}/{man,info})
-#-- Doc directories to remove (if !docs is specified)
-DOC_DIRS=(usr/{,local/}{,share/}{doc,gtk-doc} opt/*/{doc,gtk-doc})
-#-- Files to be removed from all packages (if purge is specified)
-PURGE_TARGETS=(usr/{,share}/info/dir .packlist *.pod)
-
-#########################################################################
-# PACKAGE OUTPUT
-#########################################################################
-#
-# Default: put built package and cached source in build directory
-#
-#-- Destination: specify a fixed directory where all packages will be placed
-#PKGDEST=/home/packages
-#-- Source cache: specify a fixed directory where source files will be cached
-#SRCDEST=/home/sources
-#-- Source packages: specify a fixed directory where all src packages will be placed
-#SRCPKGDEST=/home/srcpackages
-#-- Log files: specify a fixed directory where all log files will be placed
-#LOGDEST=/home/makepkglogs
-#-- Packager: name/email of the person or organization building packages
-#PACKAGER="John Doe <john@doe.com>"
-#-- Specify a key to use for package signing
-#GPGKEY=""
-
-#########################################################################
-# COMPRESSION DEFAULTS
-#########################################################################
-#
-COMPRESSGZ=(gzip -c -f -n)
-COMPRESSBZ2=(bzip2 -c -f)
-COMPRESSXZ=(xz -c -z -)
-COMPRESSLRZ=(lrzip -q)
-COMPRESSLZO=(lzop -q)
-COMPRESSZ=(compress -c -f)
-
-#########################################################################
-# EXTENSION DEFAULTS
-#########################################################################
-#
-# WARNING: Do NOT modify these variables unless you know what you are
-# doing.
-#
-PKGEXT='.pkg.tar.xz'
-SRCEXT='.src.tar.gz'
-
-# vim: set ft=sh ts=2 sw=2 et:
diff --git a/cron-jobs/sourceballs b/cron-jobs/sourceballs
index a1030e9..c02912a 100755
--- a/cron-jobs/sourceballs
+++ b/cron-jobs/sourceballs
@@ -77,9 +77,9 @@ for repo in "${PKGREPOS[@]}"; do
continue
fi
- # Get the sources from svn
+ # Get the sources from xbs
mkdir -p -m0770 "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}"
- arch_svn export -q "${SVNREPO}/${pkgbase}/repos/${repo}-${pkgarch}" \
+ cp -a "$(xbs releasepath "${pkgbase}" "${repo}" "${pkgarch}")" \
"${WORKDIR}/pkgbuilds/${repo}-${pkgarch}/${pkgbase}" >/dev/null 2>&1
if [ $? -ge 1 ]; then
failedpkgs+=("${pkgbase}-${pkgver}${SRCEXT}")
@@ -88,7 +88,7 @@ for repo in "${PKGREPOS[@]}"; do
# Build the actual source package
pushd "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}/${pkgbase}" >/dev/null
- makepkg --nocolor --allsource --ignorearch --skippgpcheck --config "${dirname}/makepkg.conf" >"${WORKDIR}/${pkgbase}.log" 2>&1
+ SRCPKGDEST=. makepkg --nocolor --allsource --ignorearch --skippgpcheck >"${WORKDIR}/${pkgbase}.log" 2>&1
if [ $? -eq 0 ] && [ -f "${pkgbase}-${pkgver}${SRCEXT}" ]; then
mv_acl "${pkgbase}-${pkgver}${SRCEXT}" "${FTP_BASE}/${SRCPOOL}/${pkgbase}-${pkgver}${SRCEXT}"
# Avoid creating the same source package for every arch
diff --git a/cron-jobs/update-web-db b/cron-jobs/update-web-db
deleted file mode 100755
index c8bf74a..0000000
--- a/cron-jobs/update-web-db
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/bin/bash
-
-. "$(dirname "$(readlink -e "$0")")/../config"
-. "$(dirname "$(readlink -e "$0")")/../db-functions"
-
-# setup paths
-SPATH="/srv/http/archweb"
-ENVPATH="/srv/http/archweb-env/bin/activate"
-
-# having "more important repos" last should make [core] trickle to the top of
-# the updates list each hour rather than being overwhelmed by big [extra] and
-# [community] updates
-REPOS=('community-testing' 'multilib-testing' 'multilib' 'community' 'extra' 'testing' 'core')
-LOGOUT="/tmp/archweb_update.log"
-
-# figure out what operation to perform
-cmd="${0##*/}"
-if [[ $cmd != "update-web-db" && $cmd != "update-web-files-db" ]]; then
- die "Invalid command name '%s' specified!" "$cmd"
-fi
-
-script_lock
-
-# run at nice 5. it can churn quite a bit of cpu after all.
-renice +5 -p $$ > /dev/null
-
-echo "$cmd: Updating DB at $(date)" >> "${LOGOUT}"
-
-# source our virtualenv if it exists
-if [ -f "$ENVPATH" ]; then
- . "$ENVPATH"
-fi
-
-case "$cmd" in
- update-web-db)
- dbfileext="${DBEXT}"
- flags=""
- ;;
- update-web-files-db)
- dbfileext="${FILESEXT}"
- flags="--filesonly"
- ;;
-esac
-
-# Lock the repos and get a copy of the db files to work on
-for repo in "${REPOS[@]}"; do
- for arch in "${ARCHES[@]}"; do
- repo_lock "${repo}" "${arch}" || exit 1
- dbfile="/srv/ftp/${repo}/os/${arch}/${repo}${dbfileext}"
- if [ -f "${dbfile}" ]; then
- mkdir -p "${WORKDIR}/${repo}/${arch}"
- cp "${dbfile}" "${WORKDIR}/${repo}/${arch}/${repo}${dbfileext}"
- fi
- repo_unlock "${repo}" "${arch}"
- done
-done
-
-# Run reporead on our db copy
-pushd "$SPATH" >/dev/null
-for repo in "${REPOS[@]}"; do
- for arch in "${ARCHES[@]}"; do
- dbcopy="${WORKDIR}/${repo}/${arch}/${repo}${dbfileext}"
- if [ -f "${dbcopy}" ]; then
- echo "Updating ${repo}-${arch}" >> "${LOGOUT}"
- ./manage.py reporead "${flags}" "${arch}" "${dbcopy}" >> "${LOGOUT}" 2>&1
- echo "" >> "${LOGOUT}"
- fi
- done
-done
-popd >/dev/null
-echo "" >> "${LOGOUT}"
-
-# rotate the file if it is getting big (> 10M), overwriting any old backup
-if [[ $(stat -c%s "${LOGOUT}") -gt 10485760 ]]; then
- mv "${LOGOUT}" "${LOGOUT}.old"
-fi
-
-script_unlock
diff --git a/cron-jobs/update-web-files-db b/cron-jobs/update-web-files-db
deleted file mode 120000
index 0c2c4fa..0000000
--- a/cron-jobs/update-web-files-db
+++ /dev/null
@@ -1 +0,0 @@
-update-web-db \ No newline at end of file
diff --git a/db-check-nonfree-in-db b/db-check-nonfree-in-db
new file mode 100755
index 0000000..3e6b273
--- /dev/null
+++ b/db-check-nonfree-in-db
@@ -0,0 +1,28 @@
+#!/usr/bin/env python2
+#-*- encoding: utf-8 -*-
+from filter import *
+import argparse
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ prog="db-check-nonfree-in-db",
+ description="Cleans nonfree files on repo",)
+
+ parser.add_argument("-k", "--blacklist-file", type=str,
+ help="File containing blacklisted names",
+ required=True,)
+
+ parser.add_argument("-b", "--database", type=str,
+ help="dabatase to clean",
+ required=True,)
+
+ args=parser.parse_args()
+
+ if not (args.blacklist_file and args.database):
+ parser.print_help()
+ exit(1)
+
+ blacklist=listado(args.blacklist_file)
+ pkgs=get_pkginfo_from_db(args.database)
+
+ print(" ".join([pkg["name"] for pkg in pkgs if pkg["name"] in blacklist]))
diff --git a/db-check-package-libraries b/db-check-package-libraries
new file mode 100755
index 0000000..612fc4f
--- /dev/null
+++ b/db-check-package-libraries
@@ -0,0 +1,193 @@
+#!/usr/bin/env python3
+# Copyright (C) 2012 Michał Masłowski <mtjm@mtjm.eu>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+
+"""
+Check which libraries are provided or required by a package, store
+this in a database, update and list broken packages.
+
+Dependencies:
+
+- Python 3.2 or later with SQLite 3 support
+
+- ``bsdtar``
+
+- ``readelf``
+"""
+
+
+import os.path
+import re
+import sqlite3
+import subprocess
+import tempfile
+
+
+#: Regexp matching an interesting dynamic entry.
+_DYNAMIC = re.compile(r"^\s*[0-9a-fx]+"
+ "\s*\((NEEDED|SONAME)\)[^:]*:\s*\[(.+)\]$")
+
+
+def make_db(path):
+ """Make a new, empty, library database at *path*."""
+ con = sqlite3.connect(path)
+ con.executescript("""
+create table provided(
+ library varchar not null,
+ package varchar not null
+);
+create table used(
+ library varchar not null,
+ package varchar not null
+);
+""")
+ con.close()
+
+
+def begin(database):
+ """Connect to *database* and start a transaction."""
+ con = sqlite3.connect(database)
+ con.execute("begin exclusive")
+ return con
+
+
+def add_provided(con, package, libraries):
+ """Write that *package* provides *libraries*."""
+ for library in libraries:
+ con.execute("insert into provided (package, library) values (?,?)",
+ (package, library))
+
+
+def add_used(con, package, libraries):
+ """Write that *package* uses *libraries*."""
+ for library in libraries:
+ con.execute("insert into used (package, library) values (?,?)",
+ (package, library))
+
+
+def remove_package(con, package):
+ """Remove all entries for a package."""
+ con.execute("delete from provided where package=?", (package,))
+ con.execute("delete from used where package=?", (package,))
+
+
+def add_package(con, package):
+ """Add entries from a named *package*."""
+ # Extract to a temporary directory. This could be done more
+ # efficiently, since there is no need to store more than one file
+ # at once.
+ with tempfile.TemporaryDirectory() as temp:
+ tar = subprocess.Popen(("bsdtar", "xf", package, "-C", temp))
+ tar.communicate()
+ with open(os.path.join(temp, ".PKGINFO")) as pkginfo:
+ for line in pkginfo:
+ if line.startswith("pkgname ="):
+ pkgname = line[len("pkgname ="):].strip()
+ break
+ # Don't list previously removed libraries.
+ remove_package(con, pkgname)
+ provided = set()
+ used = set()
+ # Search for ELFs.
+ for dirname, dirnames, filenames in os.walk(temp):
+ assert dirnames is not None # unused, avoid pylint warning
+ for file_name in filenames:
+ path = os.path.join(dirname, file_name)
+ with open(path, "rb") as file_object:
+ if file_object.read(4) != b"\177ELF":
+ continue
+ readelf = subprocess.Popen(("readelf", "-d", path),
+ stdout=subprocess.PIPE)
+ for line in readelf.communicate()[0].split(b"\n"):
+ match = _DYNAMIC.match(line.decode("ascii"))
+ if match:
+ if match.group(1) == "SONAME":
+ provided.add(match.group(2))
+ elif match.group(1) == "NEEDED":
+ used.add(match.group(2))
+ else:
+ raise AssertionError("unknown entry type "
+ + match.group(1))
+ add_provided(con, pkgname, provided)
+ add_used(con, pkgname, used)
+
+
+def init(arguments):
+ """Initialize."""
+ make_db(arguments.database)
+
+
+def add(arguments):
+ """Add packages."""
+ con = begin(arguments.database)
+ for package in arguments.packages:
+ add_package(con, package)
+ con.commit()
+ con.close()
+
+
+def remove(arguments):
+ """Remove packages."""
+ con = begin(arguments.database)
+ for package in arguments.packages:
+ remove_package(con, package)
+ con.commit()
+ con.close()
+
+
+def check(arguments):
+ """List broken packages."""
+ con = begin(arguments.database)
+ available = set(row[0] for row
+ in con.execute("select library from provided"))
+ for package, library in con.execute("select package, library from used"):
+ if library not in available:
+ print(package, "needs", library)
+ con.close()
+
+
+def main():
+ """Get arguments and run the command."""
+ from argparse import ArgumentParser
+ parser = ArgumentParser(prog="db-check-package-libraries",
+ description="Check packages for "
+ "provided/needed libraries")
+ parser.add_argument("-d", "--database", type=str,
+ help="Database file to use",
+ default="package-libraries.sqlite")
+ subparsers = parser.add_subparsers()
+ subparser = subparsers.add_parser(name="init",
+ help="initialize the database")
+ subparser.set_defaults(command=init)
+ subparser = subparsers.add_parser(name="add",
+ help="add packages to database")
+ subparser.add_argument("packages", nargs="+", type=str,
+ help="package files to add")
+ subparser.set_defaults(command=add)
+ subparser = subparsers.add_parser(name="remove",
+ help="remove packages from database")
+ subparser.add_argument("packages", nargs="+", type=str,
+ help="package names to remove")
+ subparser.set_defaults(command=remove)
+ subparser = subparsers.add_parser(name="check",
+ help="list broken packages")
+ subparser.set_defaults(command=check)
+ arguments = parser.parse_args()
+ arguments.command(arguments)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/db-check-unsigned-packages b/db-check-unsigned-packages
new file mode 100755
index 0000000..0fc053b
--- /dev/null
+++ b/db-check-unsigned-packages
@@ -0,0 +1,38 @@
+#!/bin/bash
+# Copyright (C) 2012 Michał Masłowski <mtjm@mtjm.eu>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+set -e
+
+# Output a list of repo/package-name-and-version pairs representing
+# unsigned packages available for architecture $1 and specified for
+# architecture $2 (usually $1 or any, default is to list all).
+
+. "$(dirname "$(readlink -e "$0")")/config"
+. "$(dirname "$(readlink -e "$0")")/db-functions"
+
+if [ $# -lt 1 ]; then
+ msg "usage: %s <architecture>" "${0##*/}"
+ exit 1
+fi
+
+arch=$1
+shift
+
+for repo in "${PKGREPOS[@]}"
+do
+ db="${FTP_BASE}/${repo}/os/${arch}/${repo}.db"
+ [ -f "$db" ] && "$(dirname "$(readlink -e "$0")")/db-check-unsigned-packages.py" "$repo" "$@" < "$db"
+done
diff --git a/db-check-unsigned-packages.py b/db-check-unsigned-packages.py
new file mode 100755
index 0000000..80cff51
--- /dev/null
+++ b/db-check-unsigned-packages.py
@@ -0,0 +1,96 @@
+#!/usr/bin/env python3
+# Copyright (C) 2012 Michał Masłowski <mtjm@mtjm.eu>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+
+"""
+Output a list of repo/package-name-and-version pairs representing
+unsigned packages in the database at standard input of repo named in
+the first argument and specified for architectures listed in the
+following arguments (usually the one of the database or any, default
+is to list all).
+
+If the --keyset argument is passed, print the key fingerprint of every
+signed package.
+"""
+
+
+import base64
+import subprocess
+import sys
+import tarfile
+
+
+def main():
+ """Do the job."""
+ check_keys = False
+ if "--keyset" in sys.argv:
+ sys.argv.remove("--keyset")
+ check_keys = True
+ repo = sys.argv[1]
+ pkgarches = frozenset(name.encode("utf-8") for name in sys.argv[2:])
+ packages = []
+ keys = []
+ with tarfile.open(fileobj=sys.stdin.buffer) as archive:
+ for entry in archive:
+ if entry.name.endswith("/desc"):
+ content = archive.extractfile(entry)
+ skip = False
+ is_arch = False
+ key = None
+ for line in content:
+ if is_arch:
+ is_arch = False
+ if pkgarches and line.strip() not in pkgarches:
+ skip = True # different architecture
+ break
+ if line == b"%PGPSIG%\n":
+ skip = True # signed
+ key = b""
+ if check_keys:
+ continue
+ else:
+ break
+ if line == b"%ARCH%\n":
+ is_arch = True
+ continue
+ if key is not None:
+ if line.strip():
+ key += line.strip()
+ else:
+ break
+ if check_keys and key:
+ key_binary = base64.b64decode(key)
+ keys.append(key_binary)
+ packages.append(repo + "/" + entry.name[:-5])
+ if skip:
+ continue
+ print(repo + "/" + entry.name[:-5])
+ if check_keys and keys:
+ # We have collected all signed package names in packages and
+ # all keys in keys. Let's now ask gpg to list all signatures
+ # and find which keys made them.
+ packets = subprocess.check_output(("gpg", "--list-packets"),
+ input=b"".join(keys))
+ i = 0
+ for line in packets.decode("latin1").split("\n"):
+ if line.startswith(":signature packet:"):
+ keyid = line[line.index("keyid ") + len("keyid "):]
+ print(packages[i], keyid)
+ i += 1
+
+
+if __name__ == "__main__":
+ main()
diff --git a/db-functions b/db-functions
index 217e62f..1d37123 100644
--- a/db-functions
+++ b/db-functions
@@ -304,7 +304,7 @@ check_pkgfile() {
fi
}
-check_pkgsvn() {
+check_pkgxbs() {
local pkgfile="${1}"
local _pkgbase="$(getpkgbase "${pkgfile}")"
[ $? -ge 1 ] && return 1
@@ -318,18 +318,11 @@ check_pkgsvn() {
in_array "${repo}" "${PKGREPOS[@]}" || return 1
- if [ ! -f "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}" ]; then
- mkdir -p "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}"
- arch_svn export -q "${SVNREPO}/${_pkgbase}/repos/${repo}-${_pkgarch}/PKGBUILD" \
- "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}" >/dev/null
- [ $? -ge 1 ] && return 1
- fi
-
- local svnver="$(. "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}"; get_full_version)"
- [ "${svnver}" == "${_pkgver}" ] || return 1
+ local xbsver="$(. "$(xbs releasepath "${_pkgbase}" "${repo}" "${_pkgarch}")/PKGBUILD"; get_full_version "${_pkgname}")"
+ [ "${xbsver}" == "${_pkgver}" ] || return 1
- local svnnames=($(. "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}"; echo "${pkgname[@]}"))
- in_array "${_pkgname}" "${svnnames[@]}" || return 1
+ local xbsnames=($(. "$(xbs releasepath "${_pkgbase}" "${repo}" "${_pkgarch}")/PKGBUILD"; echo "${pkgname[@]}"))
+ in_array "${_pkgname}" "${xbsnames[@]}" || return 1
return 0
}
@@ -340,7 +333,7 @@ check_splitpkgs() {
local pkgfiles=("${@}")
local pkgfile
local pkgdir
- local svnname
+ local xbsname
mkdir -p "${WORKDIR}/check_splitpkgs/"
pushd "${WORKDIR}/check_splitpkgs" >/dev/null
@@ -353,23 +346,16 @@ check_splitpkgs() {
mkdir -p "${repo}/${_pkgarch}/${_pkgbase}"
echo "${_pkgname}" >> "${repo}/${_pkgarch}/${_pkgbase}/staging"
- if [ ! -f "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}" ]; then
- mkdir -p "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}"
- arch_svn export -q "${SVNREPO}/${_pkgbase}/repos/${repo}-${_pkgarch}/PKGBUILD" \
- "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}" >/dev/null
- [ $? -ge 1 ] && return 1
- fi
-
- local svnnames=($(. "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}"; echo "${pkgname[@]}"))
- printf '%s\n' "${svnnames[@]}" >> "${repo}/${_pkgarch}/${_pkgbase}/svn"
+ local xbsnames=($(. "$(xbs releasepath "${_pkgbase}" "${repo}" "${_pkgarch}")/PKGBUILD"; echo "${pkgname[@]}"))
+ printf '%s\n' "${xbsnames[@]}" >> "${repo}/${_pkgarch}/${_pkgbase}/xbs"
done
popd >/dev/null
for pkgdir in "${WORKDIR}/check_splitpkgs/${repo}"/*/*; do
[ ! -d "${pkgdir}" ] && continue
sort -u "${pkgdir}/staging" -o "${pkgdir}/staging"
- sort -u "${pkgdir}/svn" -o "${pkgdir}/svn"
- if [ ! -z "$(comm -13 "${pkgdir}/staging" "${pkgdir}/svn")" ]; then
+ sort -u "${pkgdir}/xbs" -o "${pkgdir}/xbs"
+ if [ ! -z "$(comm -13 "${pkgdir}/staging" "${pkgdir}/xbs")" ]; then
return 1
fi
done
@@ -448,10 +434,11 @@ arch_repo_add() {
local arch=$2
local pkgs=("${@:3}")
+ printf -v pkgs_str -- '%q ' "${pkgs[@]}"
# package files might be relative to repo dir
pushd "${FTP_BASE}/${repo}/os/${arch}" >/dev/null
/usr/bin/repo-add -q "${repo}${DBEXT}" "${pkgs[@]}" \
- || error '%s' "repo-add ${repo}${DBEXT} ${pkgs[*]}"
+ || error 'repo-add %q %s' "${repo}${DBEXT}" "${pkgs_str% }"
popd >/dev/null
set_repo_permission "${repo}" "${arch}"
@@ -468,8 +455,9 @@ arch_repo_remove() {
error "No database found at '%s'" "${dbfile}"
return 1
fi
+ printf -v pkgs_str -- '%q ' "${pkgs[@]}"
/usr/bin/repo-remove -q "${dbfile}" "${pkgs[@]}" \
- || error '%s' "repo-remove ${dbfile} ${pkgs[*]}"
+ || error 'repo-remove %q %s' "${dbfile}" "${pkgs_str% }"
set_repo_permission "${repo}" "${arch}"
REPO_MODIFIED=1
diff --git a/db-import b/db-import
new file mode 100755
index 0000000..a8a073d
--- /dev/null
+++ b/db-import
@@ -0,0 +1,288 @@
+#!/bin/bash
+set -euE
+# Imports Arch-like repos, running them through a blacklist
+# License: GPLv3
+
+. "$(dirname "$(readlink -e "$0")")/config" # for: FTP_BASE DBEXT
+. "$(dirname "$(readlink -e "$0")")/db-import.conf" # for: IMPORTDIR IMPORTS
+. "$(librelib messages)"
+. "$(librelib blacklist)"
+
+# DBs = pacman DataBases
+
+# This replaces two scripts:
+# - abslibre : imported ABS tree from Arch
+# - db-sync : imported pacman DBs from Arch
+
+# The flow here is:
+# 1. "${IMPORTDIR}/cache/${name}/dbs/" # Download the pacman databases
+# 2. "${IMPORTDIR}/cache/${name}/abs/" # Download the ABS tree
+# 3. "${IMPORTDIR}/clean/${name}/dbs/" # Run the pacman DBs through the blacklist
+# 4. "${IMPORTDIR}/clean/${name}/pkgs/" # Download the pkg files mentioned in "clean/${name}/dbs/"
+# 5. "${IMPORTDIR}/staging/${tag}" # Copy all the package files we just downloaded to here
+# 6. Run `db-update on` with STAGING="${IMPORTDIR}/staging/${tag}"
+
+# generic arguments to pass to rsync, borrowed from `abs`
+SYNCARGS='-mrtvlH --no-motd --no-p --no-o --no-g'
+
+main() {
+ blacklist-update
+
+ local importStr
+ for importStr in "${IMPORTS[@]}"; do
+ local importAry=($importStr)
+ local name=${importAry[0]}
+ local pkgmirror=${importAry[1]}
+ local absmirror=${importAry[2]}
+ local tags=("${importAry[@]:3}")
+
+ msg "Fetching remote package source: %s" "$name"
+ fetch_dbs "$name" "$pkgmirror"
+ fetch_abs "$name" "$absmirror" "${tags[@]}"
+ msg "Filtering blacklisted packages from remote package source: %s" "$name"
+ clean_dbs "$name" "${tags[@]}"
+ fetch_pkgs "$name" "${tags[@]}"
+ msg "Publishing changes from remote package source: %s" "$name"
+ publish "$name" "${tags[@]}"
+ done
+ return $r
+}
+
+fetch_dbs() {
+ local name=$1
+ local pkgmirror=$2
+
+ msg2 'Synchronizing package databases...'
+
+ mkdir -p -- "${IMPORTDIR}/cache/${name}/dbs"
+ # Grab just the .db files from $pkgmirror
+ rsync $SYNCARGS --delete-after \
+ --include="*/" \
+ --include="*.db" \
+ --include="*${DBEXT}" \
+ --exclude="*" \
+ "rsync://${pkgmirror}/" "${IMPORTDIR}/cache/${name}/dbs"
+}
+
+fetch_abs() {
+ local name=$1
+ local absmirror=$2
+ local tags=("${@:3}")
+
+ local fake_home
+ local absroot
+
+ # Sync the ABS tree from $absmirror
+ local arch
+ for arch in $(list_arches "${tags[@]}"); do
+ msg2 'Synchronizing %s ABS tree...' "$arch"
+
+ absroot="${IMPORTDIR}/cache/${name}/abs/${arch}"
+ mkdir -p -- "$absroot"
+
+ # Configure `abs` for this mirror
+ fake_home="${IMPORTDIR}/homes/${name}/${arch}"
+ mkdir -p -- "$fake_home"
+ {
+ printf "ABSROOT='%s'\n" "$absroot"
+ printf "SYNCSERVER='%s'\n" "$absmirror"
+ printf "ARCH='%s'\n" "$arch"
+ printf 'REPOS=(\n'
+ list_repos "$arch" "${tags[@]}"
+ printf ')\n'
+ } > "${fake_home}/.abs.conf"
+
+ # Run `abs`
+ HOME=$fake_home abs
+ done
+}
+
+clean_dbs() {
+ local name=$1
+ local tags=("${@:2}")
+
+ rm -rf -- "${IMPORTDIR}/clean/$name"
+
+ local tag
+ for tag in "${tags[@]}"; do
+ msg2 'Creating clean version of %s package database...' "$tag"
+
+ local cache="${IMPORTDIR}/cache/$name/dbs/$(db_file "$tag")"
+ local clean="${IMPORTDIR}/clean/$name/dbs/$(db_file "$tag")"
+ install -Dm644 "$cache" "$clean"
+
+ blacklist-cat | blacklist-get-pkg | xargs -d '\n' repo-remove "$clean"
+ done
+}
+
+fetch_pkgs() {
+ local name=$1
+ local tags=("${@:2}")
+
+ local repo arch dbfile whitelist
+
+ local tag
+ for tag in "${tags[@]}"; do
+ msg2 'Syncronizing package files for %s...' "$tag"
+ repo=${tag%-*}
+ arch=${tag##*-}
+
+ dbfile="${IMPORTDIR}/clean/$name/dbs/$(db_file "$tag")"
+ whitelist="${IMPORTDIR}/clean/$name/dbs/$tag.whitelist"
+
+ list_pkgs "$dbfile" > "$whitelist"
+
+ # fetch the architecture-specific packages
+ rsync $SYNCARGS --delete-after --delete-excluded \
+ --delay-updates \
+ --include-from=<(sed "s|\$|-$arch.tar.?z|" "$whitelist") \
+ --exclude='*' \
+ "rsync://${pkgmirror}/$(db_dir "$tag")/" \
+ "${IMPORTDIR}/clean/${name}/pkgs/${tag}/"
+
+ # fetch the architecture-independent packages
+ rsync $SYNCARGS --delete-after --delete-excluded \
+ --delay-updates \
+ --include-from=<(sed "s|\$|-any.tar.?z|" "$whitelist") \
+ --exclude='*' \
+ "rsync://${pkgmirror}/$(db_dir "$tag")/" \
+ "${IMPORTDIR}/clean/${name}/pkgs/${repo}-any/"
+ done
+}
+
+publish() {
+ local name=$1
+ local tags=("${@:2}")
+
+ local tag
+ for tag in "${tags[@]}"; do
+ msg2 'Publishing changes to %s...' "$tag"
+ publish_tag "$name" "$tag"
+ done
+}
+
+publish_tag() {
+ local name=$1
+ local tag=$2
+
+ local repo=${tag%-*}
+ local arch=${tag##*-}
+ local dir="${IMPORTDIR}/clean/${name}/pkgs/${tag}"
+
+ local found
+ local error=false
+ local files=()
+
+ local pkgid pkgarch
+ for pkgid in $(list_added_pkgs "$name" "$tag"); do
+ found=false
+
+ for pkgarch in "${arch}" any; do
+ file="${dir}/${pkgid}-${arch}".pkg.tar.?z
+ if ! $found && [[ -r $file ]]; then
+ files+=("$file")
+ found=true
+ fi
+ done
+
+ if ! $found; then
+ error 'Could not find package file for %s' "$pkgid"
+ error=true
+ fi
+ done
+
+ if $error; then
+ error 'Quitting...'
+ return 1
+ fi
+
+ mkdir -p -- "${IMPORTDIR}/staging/${tag}/${repo}"
+ cp -al -- "${files[@]}" "${IMPORTDIR}/staging/${tag}/${repo}/"
+ STAGING="${IMPORTDIR}/staging/${tag}" db-update
+
+ # XXX: db-remove wants pkgbase, not pkgname
+ list_removed_pkgs "$name" "$tag" | xargs -d '\n' db-remove "$repo" "$arch"
+}
+
+################################################################################
+
+# Usage: list_arches repo-arch...
+# Returns a list of the architectures mentioned in a list of "repo-arch" pairs.
+list_arches() {
+ local tags=("$@")
+ printf '%s\n' "${tags[@]##*-}" | sort -u
+}
+
+# Usage: list_repos arch repo-arch...
+# Returns a list of all the repositories mentioned for a given architecture in a
+# list of "repo-arch" pairs.
+list_repos() {
+ local arch=$1
+ local tags=("${@:2}")
+ printf '%s\n' "${tags[@]}" | sed -n "s/-$arch\$//p"
+}
+
+# Usage: db_dir repo-arch
+db_dir() {
+ local tag=$1
+ local repo=${tag%-*}
+ local arch=${tag##*-}
+ echo "${repo}/os/${arch}"
+}
+
+# Usage; db_file repo-arch
+db_file() {
+ local tag=$1
+ local repo=${tag%-*}
+ local arch=${tag##*-}
+ echo "${repo}/os/${arch}/${repo}${DBEXT}"
+}
+
+# Usage: list_pkgs dbfile
+# Prints "$pkgname-$(get_full_version "$pkgname")" for every package in $dbfile
+list_pkgs() {
+ local dbfile=$1
+ bsdtar tf "$dbfile" | cut -d/ -f1
+}
+
+# Usage: list_pkgs | sep_ver
+# Separates the pkgname from the version (replaces the '-' with ' ') for the
+# list provided on stdin.
+sep_ver() {
+ sed -r 's/-([^-]*-[^-]*)$/ \1/'
+}
+
+# Usage: list_removed_pkgs importsrc repo-arch
+# Prints "$pkgname-$(get_full_version "$pkgname")" for every removed package.
+list_removed_pkgs() {
+ local name=$1
+ local tag=$2
+
+ local old="${FTP_BASE}/$(db_file "$tag")"
+ local new="${IMPORTDIR}/clean/$name/dbs/$(db_file "$tag")"
+
+ # make a list of:
+ # pkgname oldver[ newver]
+ # It will include removed or updated packages (changed packages)
+ join -a1 \
+ <(list_pkgs "$old"|sep_ver|sort) \
+ <(list_pkgs "$new"|sep_ver|sort)
+ | grep -v ' .* ' # remove updated packages
+ | sed 's/ /-/' # re-combine the pkgname and version
+}
+
+# Usage: list_added_pkgs importsrc repo-arch
+# slightly a misnomer; added and updated
+# Prints "$pkgname-$(get_full_version "$pkgname")" for every added or updated
+# package.
+list_added_pkgs() {
+ local name=$1
+ local tag=$2
+
+ local old="${FTP_BASE}/$(db_file "$tag")"
+ local new="${IMPORTDIR}/clean/$name/dbs/$(db_file "$tag")"
+
+ comm -13 <(list_pkgs "$old") <(list_pkgs "$new")
+}
+
+main "$@"
diff --git a/db-import-pick-mirror b/db-import-pick-mirror
new file mode 100755
index 0000000..4d01b95
--- /dev/null
+++ b/db-import-pick-mirror
@@ -0,0 +1,25 @@
+#!/usr/bin/env ruby
+
+require 'json'
+require 'net/http'
+
+protocol = ARGV[0]
+jsonurl = ARGV[1]
+
+data = JSON::parse(Net::HTTP.get(URI(jsonurl)))
+
+if data["version"] != 3
+ print "Data format version != 3"
+ exit 1
+end
+
+# Filter out URLs with incomplete information
+urls = data["urls"].select{|a| a.none?{|k,v|v.nil?}}
+rsync_urls = urls.select{|a| a["protocol"]==protocol}
+
+# By score ( (delay+speed)/completion )
+#best = rsync_urls.sort{|a,b| (a["score"] || Float::INFINITY) <=> (b["score"] || Float::INFINITY) }.first
+# By delay/completion
+best = rsync_urls.sort{|a,b| a["delay"]/a["completion_pct"] <=> b["delay"]/b["completion_pct"] }.first
+
+puts best["url"]
diff --git a/db-import.conf b/db-import.conf
new file mode 100644
index 0000000..e00e7b0
--- /dev/null
+++ b/db-import.conf
@@ -0,0 +1,23 @@
+#!/hint/bash
+
+IMPORTDIR=/srv/repo/import
+
+case "$USER" in
+ db-import-packages)
+ _archrepos=(
+ {core,extra,testing,staging}-{i686,x86_64}
+ {gnome,kde}-unstable-{i686,x86_64}
+ );;
+ db-import-community)
+ _archrepos=(
+ community{,-testing,-staging}-{i686,x86_64}
+ multilib{,-testing,-staging}-x86_64
+ );;
+esac
+
+_archpkgmirror=$(db-import-pick-mirror rsync https://www.archlinux.org/mirrors/status/tier/1/json/)
+
+# name pkgmirror absmirror repo-arch...
+IMPORTS=("archlinux ${_archpkgmirror} rsync.archlinux.org ${_archrepos[*]}")
+
+unset _archrepos _archpkgmirror
diff --git a/db-init b/db-init
new file mode 100755
index 0000000..e25dbff
--- /dev/null
+++ b/db-init
@@ -0,0 +1,6 @@
+#!/bin/bash
+# Creates the repo structure defined in config
+
+source "$(dirname "$(readlink -e "$0")")/config"
+
+mkdir -p -- "${FTP_BASE}"/{"${PKGPOOL}","${SRCPOOL}"} "${CLEANUP_DESTDIR}" "${SOURCE_CLEANUP_DESTDIR}" "${STAGING}"
diff --git a/db-move b/db-move
index 55121be..89a0ad6 100755
--- a/db-move
+++ b/db-move
@@ -24,23 +24,16 @@ for pkgarch in "${ARCHES[@]}"; do
repo_lock "${repo_from}" "${pkgarch}" || exit 1
done
-# check if packages to be moved exist in svn and ftp dir
-arch_svn checkout -q -N "${SVNREPO}" "${WORKDIR}/svn" >/dev/null
+# First loop is to check that all necessary files exist
for pkgbase in "${args[@]:2}"; do
- arch_svn up -q "${WORKDIR}/svn/${pkgbase}" >/dev/null
for pkgarch in "${ARCHES[@]}" 'any'; do
- svnrepo_from="${WORKDIR}/svn/${pkgbase}/repos/${repo_from}-${pkgarch}"
- if [ -r "${svnrepo_from}/PKGBUILD" ]; then
- pkgnames=($(. "${svnrepo_from}/PKGBUILD"; echo "${pkgname[@]}"))
+ xbsrepo_from="$(xbs releasepath "${pkgbase}" "${repo_from}" "${pkgarch}")"
+ if [ -r "${xbsrepo_from}/PKGBUILD" ]; then
+ pkgnames=($(. "${xbsrepo_from}/PKGBUILD"; echo "${pkgname[@]}"))
if [ ${#pkgnames[@]} -lt 1 ]; then
die "Could not read pkgname"
fi
- pkgver=$(. "${svnrepo_from}/PKGBUILD"; get_full_version)
- if [ -z "${pkgver}" ]; then
- die "Could not read pkgver"
- fi
-
if [ "${pkgarch}" == 'any' ]; then
tarches=("${ARCHES[@]}")
else
@@ -48,6 +41,10 @@ for pkgbase in "${args[@]:2}"; do
fi
for pkgname in "${pkgnames[@]}"; do
+ pkgver=$(. "${xbsrepo_from}/PKGBUILD"; get_full_version "${pkgname}")
+ if [ -z "${pkgver}" ]; then
+ die "Could not read pkgver"
+ fi
for tarch in "${tarches[@]}"; do
getpkgfile "${ftppath_from}/${tarch}/${pkgname}-${pkgver}-${pkgarch}"${PKGEXT} >/dev/null
done
@@ -63,37 +60,22 @@ msg "Moving packages from [%s] to [%s]..." "${repo_from}" "${repo_to}"
declare -A add_pkgs
declare -A remove_pkgs
for pkgbase in "${args[@]:2}"; do
- tag_list=""
- for pkgarch in "${ARCHES[@]}" 'any'; do
- svnrepo_from="${WORKDIR}/svn/${pkgbase}/repos/${repo_from}-${pkgarch}"
- svnrepo_to="${WORKDIR}/svn/${pkgbase}/repos/${repo_to}-${pkgarch}"
-
- if [ -f "${svnrepo_from}/PKGBUILD" ]; then
+ # move the package in xbs
+ arches=($(xbs move "${repo_from}" "${repo_to}" "${pkgbase}"))
+ # move the package in ftp
+ for pkgarch in "${arches[@]}"; do
+ xbsrepo_to="$(xbs releasepath "$pkgbase" "$repo_to" "$pkgarch")"
+ if true; then # to add an indent level to make merging easier
if [ "${pkgarch}" == 'any' ]; then
tarches=("${ARCHES[@]}")
else
tarches=("${pkgarch}")
fi
msg2 "%s (%s)" "${pkgbase}" "${tarches[*]}"
- pkgnames=($(. "${svnrepo_from}/PKGBUILD"; echo "${pkgname[@]}"))
- pkgver=$(. "${svnrepo_from}/PKGBUILD"; get_full_version)
-
- if [ -d "${svnrepo_to}" ]; then
- for file in $(arch_svn ls "${svnrepo_to}"); do
- arch_svn rm -q "${svnrepo_to}/$file@"
- done
- else
- mkdir "${svnrepo_to}"
- arch_svn add -q "${svnrepo_to}"
- fi
-
- for file in $(arch_svn ls "${svnrepo_from}"); do
- arch_svn mv -q -r HEAD "${svnrepo_from}/$file@" "${svnrepo_to}/"
- done
- arch_svn rm --force -q "${svnrepo_from}"
- tag_list+=", $pkgarch"
+ pkgnames=($(. "${xbsrepo_to}/PKGBUILD"; echo "${pkgname[@]}"))
for pkgname in "${pkgnames[@]}"; do
+ pkgver=$(. "${xbsrepo_to}/PKGBUILD"; get_full_version "${pkgname}")
for tarch in "${tarches[@]}"; do
pkgpath=$(getpkgfile "${ftppath_from}/${tarch}/${pkgname}-${pkgver}-${pkgarch}"${PKGEXT})
pkgfile="${pkgpath##*/}"
@@ -108,8 +90,6 @@ for pkgbase in "${args[@]:2}"; do
done
fi
done
- tag_list="${tag_list#, }"
- arch_svn commit -q "${WORKDIR}/svn/${pkgbase}" -m "${0##*/}: moved ${pkgbase} from [${repo_from}] to [${repo_to}] (${tag_list})"
done
for tarch in "${ARCHES[@]}"; do
diff --git a/db-remove b/db-remove
index 7e7fcb8..dcbe4b4 100755
--- a/db-remove
+++ b/db-remove
@@ -12,9 +12,6 @@ repo="$1"
arch="$2"
pkgbases=("${@:3}")
-ftppath="$FTP_BASE/$repo/os"
-svnrepo="$repo-$arch"
-
if ! check_repo_permission "$repo"; then
die "You don't have permission to remove packages from %s" "${repo}"
fi
@@ -32,14 +29,14 @@ done
remove_pkgs=()
for pkgbase in "${pkgbases[@]}"; do
msg "Removing %s from [%s]..." "$pkgbase" "$repo"
- arch_svn checkout -q "${SVNREPO}/${pkgbase}" "${WORKDIR}/svn/${pkgbase}" >/dev/null
- if [ -d "${WORKDIR}/svn/$pkgbase/repos/$svnrepo" ]; then
- remove_pkgs+=($(. "${WORKDIR}/svn/$pkgbase/repos/$svnrepo/PKGBUILD"; echo ${pkgname[@]}))
- arch_svn rm --force -q "${WORKDIR}/svn/$pkgbase/repos/$svnrepo"
- arch_svn commit -q "${WORKDIR}/svn/$pkgbase" -m "${0##*/}: $pkgbase removed by $(id -un)"
+ path="$(xbs releasepath "$pkgbase" "$repo" "$arch")"
+ if [ -d "$path" ]; then
+ remove_pkgs+=($(. "$path/PKGBUILD"; echo "${pkgname[@]}"))
+ xbs unrelease "$pkgbase" "$repo" "$arch"
else
- warning "%s not found in %s" "$pkgbase" "$svnrepo"
+ warning "%s not found in %s for %s" \
+ "$pkgbase" "$(xbs name)" "$repo-$arch"
warning "Removing only %s from the repo" "$pkgbase"
warning "If it was a split package you have to remove the others yourself!"
remove_pkgs+=("$pkgbase")
diff --git a/db-update b/db-update
index 358c534..c55869e 100755
--- a/db-update
+++ b/db-update
@@ -45,8 +45,8 @@ for repo in "${repos[@]}"; do
if "${REQUIRE_SIGNATURE}" && ! pacman-key -v "${pkg}.sig" >/dev/null 2>&1; then
die "Package %s does not have a valid signature" "${repo}/${pkg##*/}"
fi
- if ! check_pkgsvn "${pkg}" "${repo}"; then
- die "Package %s is not consistent with svn repository" "${repo}/${pkg##*/}"
+ if ! check_pkgxbs "${pkg}" "${repo}"; then
+ die "Package %s is not consistent with %s" "${repo}/${pkg##*/}" "$(xbs name)"
fi
if ! check_pkgrepos "${pkg}"; then
die "Package %s already exists in another repository" "${repo}/${pkg##*/}"
@@ -69,10 +69,12 @@ for repo in "${repos[@]}"; do
fi
done
+dirs=()
for repo in "${repos[@]}"; do
msg "Updating [%s]..." "${repo}"
any_pkgs=($(getpkgfiles "${STAGING}/${repo}/"*-any${PKGEXT} 2>/dev/null))
for pkgarch in "${ARCHES[@]}"; do
+ add_dirs=()
add_pkgs=()
arch_pkgs=($(getpkgfiles "${STAGING}/${repo}/"*-"${pkgarch}"${PKGEXT} 2>/dev/null))
for pkg in "${arch_pkgs[@]}" "${any_pkgs[@]}"; do
@@ -90,11 +92,17 @@ for repo in "${repos[@]}"; do
if [ -f "$FTP_BASE/${PKGPOOL}/${pkgfile}.sig" ]; then
ln -s "../../../${PKGPOOL}/${pkgfile}.sig" "$FTP_BASE/$repo/os/${pkgarch}"
fi
+ add_dirs+=("${STAGING}/abslibre/$(getpkgarch "$FTP_BASE/$PKGPOOL/$pkgfile")/$repo/$(getpkgbase "$FTP_BASE/$PKGPOOL/$pkgfile")")
add_pkgs+=("${pkgfile}")
done
+ for add_dir in "${add_dirs[@]}"; do
+ (cd "${add_dir}" && xbs release-server "${repo}" "${pkgarch}") ||
+ error 'cd %q && xbs release-server %q %q' "${add_dir}" "${repo}" "${pkgarch}"
+ done
if [ ${#add_pkgs[@]} -ge 1 ]; then
arch_repo_add "${repo}" "${pkgarch}" "${add_pkgs[@]}"
fi
+ dirs+=("${add_dirs[@]}")
done
done
@@ -103,3 +111,22 @@ for repo in "${repos[@]}"; do
repo_unlock "${repo}" "${pkgarch}"
done
done
+
+cd "${STAGING}"
+
+# Remove left over XBS files
+rm -rf -- "${dirs[@]}"
+dirname -z -- "${dirs[@]}" |
+ xargs -0 realpath -zm --relative-to="${STAGING}" -- |
+ xargs -0 rmdir -p -- 2>/dev/null
+
+# Stage generated source files
+while read -r file; do
+ pub="${FTP_BASE}/${file}"
+ if [[ -f "$pub" ]]; then
+ warning "file already exists: %s" "${file}"
+ else
+ mkdir -p -- "${pub%/*}"
+ mv -vn "$file" "$pub"
+ fi
+done < <(find other sources -type f 2>/dev/null)
diff --git a/make_individual_torrent b/make_individual_torrent
new file mode 100755
index 0000000..0a7e778
--- /dev/null
+++ b/make_individual_torrent
@@ -0,0 +1,52 @@
+#!/bin/bash
+# Copyright (C) 2014 Joseph Graham <joseph@t67.eu>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+# This script is called by `make_repo_torrents' to make a torrent. It
+# depends on `mktorrent'. It takes the following args:
+# $1 - path of package
+# $2 - public location
+
+# Comma seperated list of trackers, no spaces
+# t67.eu is run by Xylon, hackcoop by fauno & friends
+trackers='http://t67.eu:6969/announce,http://tracker.hackcoop.com.ar/announce'
+
+# This mirror is put as a webseed. Which mirror we use for a webseed
+# doesn't really matter since it's re-written on the client machine by
+# pacman2pacman so it won't normally be used anyway.
+seed_url='http://repo.parabolagnulinux.org/'
+
+if [[ -z "${1}" ]]
+then
+ echo "Error. First arg must be the path of the package."
+ echo 1
+fi
+
+if [[ -z "${2}" ]]
+then
+ echo "Error. Second arg must be the public location."
+ echo 1
+fi
+
+pkg="${1}"
+public_location="${2}"
+
+pkg_name="${pkg##*/}"
+
+# URL of the actual package for the webseed
+webseed="${seed_url}${pkg#${public_location}}"
+
+mktorrent -a "${trackers}" "${pkg}" -w "${webseed}" >/dev/null ||
+echo "Error making torrent for \"${pkg}\""
diff --git a/test/lib/common.inc b/test/lib/common.inc
index bef8749..1831602 100644
--- a/test/lib/common.inc
+++ b/test/lib/common.inc
@@ -46,21 +46,19 @@ oneTimeSetUp() {
if ! "${build}"; then
if [ "${pkgarch[0]}" == 'any' ]; then
- sudo extra-x86_64-build || die 'extra-x86_64-build failed'
+ sudo libremakepkg || die 'libremakepkg failed'
else
for a in "${pkgarch[@]}"; do
if in_array "$a" "${ARCH_BUILD[@]}"; then
- sudo "extra-${a}-build" || die "extra-${a}-build failed"
+ sudo setarch "$a" libremakepkg -n "$a" || die "setarch ${a} libremakepkg -n ${a} failed"
+ for p in "${pkgname[@]}"; do
+ cp "${p}-${pkgversion}-${a}"${PKGEXT} "$(dirname "${BASH_SOURCE[0]})/../packages/${d##*/}")"
+ done
+ else
+ warning "skipping arch %s" "$a"
fi
done
fi
- for a in "${pkgarch[@]}"; do
- if in_array "$a" "${ARCH_BUILD[@]}"; then
- for p in "${pkgname[@]}"; do
- cp "${p}-${pkgversion}-${a}"${PKGEXT} "$(dirname "${BASH_SOURCE[0]}")/../packages/${d##*/}"
- done
- fi
- done
fi
popd >/dev/null
done
@@ -82,6 +80,7 @@ setUp() {
PKGREPOS=('core' 'extra' 'testing')
PKGPOOL='pool/packages'
+ SRCPOOL='pool/sources'
mkdir -p "${TMP}/"{ftp,tmp,staging,{package,source}-cleanup,svn-packages-{copy,repo}}
for r in "${PKGREPOS[@]}"; do
@@ -105,11 +104,21 @@ setUp() {
arch_svn commit -q -m"initial commit of ${pkg}" "${TMP}/svn-packages-copy"
done
+ mkdir -p "${TMP}/home/.config/libretools"
+ export XDG_CONFIG_HOME="${TMP}/home/.config"
+ printf '%s\n' \
+ 'SVNURL=foo' \
+ "SVNREPO=\"${TMP}/svn-packages-copy\"" \
+ "ARCHES=($(printf '%q ' "${BUILD_ARCHES[@]}"))" \
+ > "$XDG_CONFIG_HOME/libretools/xbs-abs.conf"
+ printf '%s\n' 'BUILDSYSTEM=abs' > "$XDG_CONFIG_HOME/xbs.conf"
+
cat <<eot > "$(dirname "${BASH_SOURCE[0]}")/../../config.local"
FTP_BASE="${TMP}/ftp"
SVNREPO="file://${TMP}/svn-packages-repo"
PKGREPOS=("${PKGREPOS[@]}")
PKGPOOL="${PKGPOOL}"
+ SRCPOOL="${SRCPOOL}"
TESTING_REPO='testing'
STABLE_REPOS=('core' 'extra')
CLEANUP_DESTDIR="${TMP}/package-cleanup"
@@ -139,7 +148,7 @@ releasePackage() {
local pkgname
pushd "${TMP}/svn-packages-copy/${pkgbase}/trunk/" >/dev/null
- archrelease "${repo}-${arch}" >/dev/null 2>&1
+ xbs release "${repo}" "${arch}" >/dev/null 2>&1
pkgver=$(. PKGBUILD; get_full_version)
pkgname=($(. PKGBUILD; echo "${pkgname[@]}"))
popd >/dev/null
@@ -167,12 +176,12 @@ checkAnyPackageDB() {
for arch in "${ARCH_BUILD[@]}"; do
[ -L "${FTP_BASE}/${repo}/os/${arch}/${pkg}" ] || fail "${repo}/os/${arch}/${pkg} is not a symlink"
- [ "$(readlink -e "${FTP_BASE}/${repo}/os/${arch}/${pkg}")" == "${FTP_BASE}/${PKGPOOL}/${pkg}" ] \
+ [ "$(readlink -e "${FTP_BASE}/${repo}/os/${arch}/${pkg}")" == "$(readlink -e "${FTP_BASE}/${PKGPOOL}/${pkg}")" ] \
|| fail "${repo}/os/${arch}/${pkg} does not link to ${PKGPOOL}/${pkg}"
if "${REQUIRE_SIGNATURE}"; then
[ -L "${FTP_BASE}/${repo}/os/${arch}/${pkg}.sig" ] || fail "${repo}/os/${arch}/${pkg}.sig is not a symlink"
- [ "$(readlink -e "${FTP_BASE}/${repo}/os/${arch}/${pkg}.sig")" == "${FTP_BASE}/${PKGPOOL}/${pkg}.sig" ] \
+ [ "$(readlink -e "${FTP_BASE}/${repo}/os/${arch}/${pkg}.sig")" == "$(readlink -e "${FTP_BASE}/${PKGPOOL}/${pkg}.sig")" ] \
|| fail "${repo}/os/${arch}/${pkg}.sig does not link to ${PKGPOOL}/${pkg}.sig"
fi
@@ -208,7 +217,7 @@ checkPackageDB() {
[ -L "${FTP_BASE}/${repo}/os/${arch}/${pkg}" ] || fail "${repo}/os/${arch}/${pkg} not a symlink"
[ -r "${STAGING}/${repo}/${pkg}" ] && fail "${repo}/${pkg} found in staging dir"
- [ "$(readlink -e "${FTP_BASE}/${repo}/os/${arch}/${pkg}")" == "${FTP_BASE}/${PKGPOOL}/${pkg}" ] \
+ [ "$(readlink -e "${FTP_BASE}/${repo}/os/${arch}/${pkg}")" == "$(readlink -e "${FTP_BASE}/${PKGPOOL}/${pkg}")" ] \
|| fail "${repo}/os/${arch}/${pkg} does not link to ${PKGPOOL}/${pkg}"
if "${REQUIRE_SIGNATURE}"; then
@@ -216,7 +225,7 @@ checkPackageDB() {
[ -L "${FTP_BASE}/${repo}/os/${arch}/${pkg}.sig" ] || fail "${repo}/os/${arch}/${pkg}.sig is not a symlink"
[ -r "${STAGING}/${repo}/${pkg}.sig" ] && fail "${repo}/${pkg}.sig found in staging dir"
- [ "$(readlink -e "${FTP_BASE}/${repo}/os/${arch}/${pkg}.sig")" == "${FTP_BASE}/${PKGPOOL}/${pkg}.sig" ] \
+ [ "$(readlink -e "${FTP_BASE}/${repo}/os/${arch}/${pkg}.sig")" == "$(readlink -e "${FTP_BASE}/${PKGPOOL}/${pkg}.sig")" ] \
|| fail "${repo}/os/${arch}/${pkg}.sig does not link to ${PKGPOOL}/${pkg}.sig"
fi
diff --git a/test/test.d/create-filelists.sh b/test/test.d/create-filelists.sh
index 20dafc6..837c432 100755
--- a/test/test.d/create-filelists.sh
+++ b/test/test.d/create-filelists.sh
@@ -59,7 +59,7 @@ testCreateSplitFileLists() {
../db-update
for pkgbase in "${pkgs[@]}"; do
- pkgnames=($(source "${TMP}/svn-packages-copy/${pkgbase}/trunk/PKGBUILD"; echo ${pkgname[@]}))
+ pkgnames=($(source "${TMP}/svn-packages-copy/${pkgbase}/trunk/PKGBUILD"; echo "${pkgname[@]}"))
for pkgname in "${pkgnames[@]}"; do
for arch in "${ARCH_BUILD[@]}"; do
if ! bsdtar -xOf "${FTP_BASE}/extra/os/${arch}/extra${FILESEXT}" | grep "usr/bin/${pkgname}" &>/dev/null; then
diff --git a/test/test.d/db-update.sh b/test/test.d/db-update.sh
index 540eccf..5d3c833 100755
--- a/test/test.d/db-update.sh
+++ b/test/test.d/db-update.sh
@@ -80,7 +80,7 @@ testUpdateAnyPackage() {
pushd "${TMP}/svn-packages-copy/pkg-any-a/trunk/" >/dev/null
sed 's/pkgrel=1/pkgrel=2/g' -i PKGBUILD
arch_svn commit -q -m"update pkg to pkgrel=2" >/dev/null
- sudo extra-i686-build
+ sudo libremakepkg
mv pkg-any-a-1-2-any.pkg.tar.xz "${pkgdir}/pkg-any-a/"
popd >/dev/null
@@ -98,7 +98,7 @@ testUpdateAnyPackageToDifferentRepositoriesAtOnce() {
pushd "${TMP}/svn-packages-copy/pkg-any-a/trunk/" >/dev/null
sed 's/pkgrel=1/pkgrel=2/g' -i PKGBUILD
arch_svn commit -q -m"update pkg to pkgrel=2" >/dev/null
- sudo extra-i686-build
+ sudo libremakepkg
mv pkg-any-a-1-2-any.pkg.tar.xz "${pkgdir}/pkg-any-a/"
popd >/dev/null
@@ -130,7 +130,7 @@ testUpdateSameAnyPackageToDifferentRepositories() {
../db-update >/dev/null 2>&1 && (fail 'Adding an existing package to another repository should fail'; return 1)
local arch
- for arch in i686 x86_64; do
+ for arch in "${ARCH_BUILD[@]}"; do
( [ -r "${FTP_BASE}/testing/os/${arch}/testing${DBEXT%.tar.*}" ] \
&& bsdtar -xf "${FTP_BASE}/testing/os/${arch}/testing${DBEXT%.tar.*}" -O | grep "${pkgbase}" &>/dev/null) \
&& fail "${pkgbase} should not be in testing/os/${arch}/testing${DBEXT%.tar.*}"
diff --git a/test/test.d/testing2x.sh b/test/test.d/testing2x.sh
index c611ce4..8232490 100755
--- a/test/test.d/testing2x.sh
+++ b/test/test.d/testing2x.sh
@@ -10,7 +10,7 @@ testTesting2xAnyPackage() {
pushd "${TMP}/svn-packages-copy/pkg-any-a/trunk/" >/dev/null
sed 's/pkgrel=1/pkgrel=2/g' -i PKGBUILD
arch_svn commit -q -m"update pkg to pkgrel=2" >/dev/null
- sudo extra-i686-build
+ sudo libremakepkg
mv pkg-any-a-1-2-any.pkg.tar.xz "${pkgdir}/pkg-any-a/"
popd >/dev/null
diff --git a/testing2x b/testing2x
deleted file mode 100755
index 901c1b0..0000000
--- a/testing2x
+++ /dev/null
@@ -1,61 +0,0 @@
-#!/bin/bash
-
-. "$(dirname "$(readlink -e "$0")")/config"
-. "$(dirname "$(readlink -e "$0")")/db-functions"
-
-if [ $# -lt 1 ]; then
- msg "usage: %s <pkgname|pkgbase> ..." "${0##*/}"
- exit 1
-fi
-
-# Lock everything to reduce possibility of interfering task between the different repo-updates
-script_lock
-for repo in "${TESTING_REPO}" "${STABLE_REPOS[@]}"; do
- for pkgarch in "${ARCHES[@]}"; do
- repo_lock "${repo}" "${pkgarch}" || exit 1
- done
-done
-
-declare -A pkgs
-
-for pkgbase in "$@"; do
- if [ ! -d "${WORKDIR}/${pkgbase}" ]; then
- arch_svn export -q "${SVNREPO}/${pkgbase}/repos" "${WORKDIR}/${pkgbase}" >/dev/null
-
- found_source=false
- for pkgarch in "${ARCHES[@]}" 'any'; do
- svnrepo_from="${WORKDIR}/${pkgbase}/${TESTING_REPO}-${pkgarch}"
- if [ -r "${svnrepo_from}/PKGBUILD" ]; then
- found_source=true
- break
- fi
- done
- "${found_source}" || die "%s not found in [%s]" "${pkgbase}" "${TESTING_REPO}"
- found_target=false
- for pkgarch in "${ARCHES[@]}" 'any'; do
- for repo in "${STABLE_REPOS[@]}"; do
- svnrepo_to="${WORKDIR}/${pkgbase}/${repo}-${pkgarch}"
- if [ -r "${svnrepo_to}/PKGBUILD" ]; then
- found_target=true
- pkgs[${repo}]+="${pkgbase} "
- break 2
- fi
- done
- done
- "${found_target}" || die "%s not found in any of these repos: " "${pkgbase}" "${STABLE_REPOS[*]}"
- fi
-done
-
-for pkgarch in "${ARCHES[@]}"; do
- repo_unlock "${TESTING_REPO}" "${pkgarch}"
-done
-for repo in "${STABLE_REPOS[@]}"; do
- for pkgarch in "${ARCHES[@]}"; do
- repo_unlock "${repo}" "${pkgarch}"
- done
- if [ -n "${pkgs[${repo}]}" ]; then
- "$(dirname "$(readlink -e "$0")")/db-move" "${TESTING_REPO}" "${repo}" "${pkgs[${repo}]}"
- fi
-done
-
-script_unlock
diff --git a/tests-xihh/__init__.py b/tests-xihh/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests-xihh/__init__.py
diff --git a/tests-xihh/blacklist_sample b/tests-xihh/blacklist_sample
new file mode 100644
index 0000000..2a02af6
--- /dev/null
+++ b/tests-xihh/blacklist_sample
@@ -0,0 +1,2 @@
+alex:alex-libre: Aquí va un comentario
+gmime22 ::Non free dependencies \ No newline at end of file
diff --git a/tests-xihh/core.db.tar.gz b/tests-xihh/core.db.tar.gz
new file mode 100644
index 0000000..5eb2081
--- /dev/null
+++ b/tests-xihh/core.db.tar.gz
Binary files differ
diff --git a/tests-xihh/depends b/tests-xihh/depends
new file mode 100644
index 0000000..7ff3ad4
--- /dev/null
+++ b/tests-xihh/depends
@@ -0,0 +1,4 @@
+%DEPENDS%
+glibc>=2.13
+zlib
+
diff --git a/tests-xihh/desc b/tests-xihh/desc
new file mode 100644
index 0000000..abba644
--- /dev/null
+++ b/tests-xihh/desc
@@ -0,0 +1,39 @@
+%FILENAME%
+binutils-2.21-4-x86_64.pkg.tar.xz
+
+%NAME%
+binutils
+
+%VERSION%
+2.21-4
+
+%DESC%
+A set of programs to assemble and manipulate binary and object files
+
+%GROUPS%
+base
+
+%CSIZE%
+3412892
+
+%ISIZE%
+17571840
+
+%MD5SUM%
+4e666f87c78998f4839f33dc06d2043a
+
+%URL%
+http://www.gnu.org/software/binutils/
+
+%LICENSE%
+GPL
+
+%ARCH%
+x86_64
+
+%BUILDDATE%
+1297240369
+
+%PACKAGER%
+Allan McRae <allan@archlinux.org>
+
diff --git a/tests-xihh/rsync_output_sample b/tests-xihh/rsync_output_sample
new file mode 100644
index 0000000..72d9cd0
--- /dev/null
+++ b/tests-xihh/rsync_output_sample
@@ -0,0 +1,14 @@
+dr-xr-sr-x 4096 2010/09/11 11:37:10 .
+-rw-r--r-- 11 2011/02/08 00:00:01 lastsync
+drwxrwxr-x 15 2010/09/11 11:28:50 community-staging
+drwxrwxr-x 30 2010/09/11 11:28:50 community-staging/os
+drwxrwxr-x 8192 2011/02/07 17:00:01 community-staging/os/i686
+lrwxrwxrwx 52 2010/12/23 16:51:01 community-staging/os/i686/alex-2.3.4-1-i686.pkg.tar.xz -> ../../../pool/community/alex-2.3.4-1-i686.pkg.tar.xz
+lrwxrwxrwx 27 2011/02/07 14:02:54 community-staging/os/i686/community-staging.db -> community-staging.db.tar.gz
+-rw-rw-r-- 2237 2011/02/07 14:02:54 community-staging/os/i686/community-staging.db.tar.gz
+-rw-rw-r-- 3209 2011/02/07 14:00:13 community-staging/os/i686/community-staging.db.tar.gz.old
+drwxrwxr-x 15 2009/07/22 15:07:56 community
+drwxrwxr-x 40 2009/08/04 15:57:42 community/os
+drwxrwsr-x 36864 2011/02/03 05:00:01 community/os/any
+-rw-rw-r-- 303336 2010/07/16 10:06:28 community/os/any/any2dvd-0.34-4-any.pkg.tar.xz
+-rw-rw-r-- 221664 2010/03/28 15:55:48 community/os/x86_64/gmime22-2.2.26-1-x86_64.pkg.tar.xz
diff --git a/tests-xihh/test_filter.py b/tests-xihh/test_filter.py
new file mode 100644
index 0000000..d8006f9
--- /dev/null
+++ b/tests-xihh/test_filter.py
@@ -0,0 +1,196 @@
+# -*- encoding: utf-8 -*-
+""" """
+
+__author__ = "Joshua Ismael Haase Hernández <hahj87@gmail.com>"
+__version__ = "$Revision: 1.1 $"
+__date__ = "$Date: 2011/02/08 $"
+__copyright__ = "Copyright (c) 2011 Joshua Ismael Haase Hernández"
+__license__ = "GPL3+"
+
+from repm.config import *
+from repm.filter import *
+import unittest
+
+class pkginfo_from_file_KnownValues(unittest.TestCase):
+ # (filename, name, version, release, arch)
+ # filename is location
+ known=(
+ ("community-testing/os/i686/inputattach-1.24-3-i686.pkg.tar.xz","inputattach","1.24","3","i686"),
+ ("community-testing/os/i686/ngspice-22-1-i686.pkg.tar.xz","ngspice","22","1","i686"),
+ ("community-testing/os/i686/tmux-1.4-2-i686.pkg.tar.xz","tmux","1.4","2","i686"),
+ ("community-testing/os/i686/tor-0.2.1.29-2-i686.pkg.tar.xz","tor","0.2.1.29","2","i686"),
+ ("../../../pool/community/tor-0.2.1.29-2-i686.pkg.tar.xz","tor","0.2.1.29","2","i686"),
+ ("community-testing/os/x86_64/inputattach-1.24-3-x86_64.pkg.tar.xz","inputattach","1.24","3","x86_64"),
+ ("../../../pool/community/inputattach-1.24-3-x86_64.pkg.tar.xz","inputattach","1.24","3","x86_64"),
+ ("tor-0.2.1.29-2-x86_64.pkg.tar.xz","tor","0.2.1.29","2","x86_64"),
+ )
+
+ def generate_results(self, example_tuple, attr):
+ location, name, version, release, arch = example_tuple
+ return pkginfo_from_filename(location)[attr], locals()[attr]
+
+ def testReturnPackageObject(self):
+ for i in self.known:
+ location, name, version, release, arch = i
+ self.assertIsInstance(pkginfo_from_filename(location),Package)
+
+ def testNames(self):
+ for i in self.known:
+ k,v = self.generate_results(example_tuple=i,attr="name")
+ self.assertEqual(k, v)
+
+ def testVersions(self):
+ for i in self.known:
+ k,v = self.generate_results(example_tuple=i,attr="version")
+ self.assertEqual(k, v)
+
+ def testArchs(self):
+ for i in self.known:
+ k,v = self.generate_results(example_tuple=i,attr="arch")
+ self.assertEqual(k, v)
+
+ def testReleases(self):
+ for i in self.known:
+ k,v = self.generate_results(example_tuple=i,attr="release")
+ self.assertEqual(k, v)
+
+ def testLocations(self):
+ for i in self.known:
+ k,v = self.generate_results(example_tuple=i,attr="location")
+ self.assertEqual(k, v)
+
+class pkginfo_from_file_BadInput(unittest.TestCase):
+ bad=("community-testing/os/i686/community-testing.db",
+ "community-testing/os/i686/community-testing.db.tar.gz",
+ "community-testing/os/i686/community-testing.db.tar.gz.old",
+ "community-testing/os/i686/community-testing.files",
+ "community-testing/os/i686/community-testing.files.tar.gz",
+ "community-testing/os/x86_64")
+
+ def testBadInput(self):
+ for i in self.bad:
+ self.assertRaises(NonValidFile,pkginfo_from_filename,i)
+
+class pkginfoFromRsyncOutput(unittest.TestCase):
+ example_package_list=(Package(),Package(),Package())
+ example_package_list[0].package_info={ "name" : "alex",
+ "version" : "2.3.4",
+ "release" : "1",
+ "arch" : "i686",
+ "license" : False,
+ "location": "community-staging/os/i686/alex-2.3.4-1-i686.pkg.tar.xz",
+ "depends" : False,}
+ example_package_list[1].package_info={ "name" : "any2dvd",
+ "version" : "0.34",
+ "release" : "4",
+ "arch" : "any",
+ "license" : False,
+ "location": "community/os/any/any2dvd-0.34-4-any.pkg.tar.xz",
+ "depends" : False,}
+ example_package_list[2].package_info={ "name" : "gmime22",
+ "version" : "2.2.26",
+ "release" : "1",
+ "arch" : "x86_64",
+ "license" : False,
+ "location": "community/os/x86_64/gmime22-2.2.26-1-x86_64.pkg.tar.xz",
+ "depends" : False,}
+
+ try:
+ output_file = open("rsync_output_sample")
+ rsync_out= output_file.read()
+ output_file.close()
+ except IOError: print("There is no rsync_output_sample file")
+
+ pkglist = pkginfo_from_rsync_output(rsync_out)
+
+ def testOutputArePackages(self):
+ if not self.pkglist:
+ self.fail("not pkglist:" + str(self.pkglist))
+ for pkg in self.pkglist:
+ self.assertIsInstance(pkg,Package)
+
+ def testPackageInfo(self):
+ if not self.pkglist:
+ self.fail("Pkglist doesn't exist: " + str(self.pkglist))
+ self.assertEqual(self.pkglist,self.example_package_list)
+
+class generateRsyncBlacklist(unittest.TestCase):
+ example_package_list=(Package(),Package(),Package())
+ example_package_list[0].package_info={ "name" : "alex",
+ "version" : "2.3.4",
+ "release" : "1",
+ "arch" : "i686",
+ "license" : False,
+ "location": "community-staging/os/i686/alex-2.3.4-1-i686.pkg.tar.xz",
+ "depends" : False,}
+ example_package_list[1].package_info={ "name" : "any2dvd",
+ "version" : "0.34",
+ "release" : "4",
+ "arch" : "any",
+ "license" : False,
+ "location": "community/os/any/any2dvd-0.34-4-any.pkg.tar.xz",
+ "depends" : False,}
+ example_package_list[2].package_info={ "name" : "gmime22",
+ "version" : "2.2.26",
+ "release" : "1",
+ "arch" : "x86_64",
+ "license" : False,
+ "location": "community/os/x86_64/gmime22-2.2.26-1-x86_64.pkg.tar.xz",
+ "depends" : False,}
+
+ def testListado(self):
+ self.assertEqual(listado("blacklist_sample"),["alex","gmime22"])
+
+ def testExcludeFiles(self):
+ a=rsyncBlacklist_from_blacklist(self.example_package_list,
+ listado("blacklist_sample"),
+ False)
+ b=[self.example_package_list[0]["location"],self.example_package_list[2]["location"]]
+ self.assertEqual(a,b)
+
+class pkginfo_from_descKnownValues(unittest.TestCase):
+ pkgsample=Package()
+ pkgsample.package_info={"name" : "binutils",
+ "version" : "2.21",
+ "release" : "4",
+ "arch" : "x86_64",
+ "license" : "GPL",
+ "location": "binutils-2.21-4-x86_64.pkg.tar.xz",
+ "depends" : False,}
+ fsock=open("desc")
+ pkggen=pkginfo_from_desc(fsock.read())
+ fsock.close()
+ def testPkginfoFromDesc(self):
+ if self.pkggen is None:
+ self.fail("return value is None")
+ self.assertEqual(self.pkgsample,self.pkggen)
+
+class pkginfo_from_db(unittest.TestCase):
+ archdb = os.path.join("./workdir")
+ example_package_list=(Package(),Package(),Package())
+ example_package_list[0].package_info={ "name" : "acl",
+ "version" : "2.2.49",
+ "release" : "2",
+ "arch" : "x86_64",
+ "license" : ("LGPL",),
+ "location": "acl-2.2.49-2-x86_64.pkg.tar.xz",
+ "depends" : ("attr>=2.4.41"),}
+ example_package_list[1].package_info={ "name" : "glibc",
+ "version" : "2.13",
+ "release" : "4",
+ "arch" : "x86_64",
+ "license" : ("GPL","LGPL"),
+ "location": "glibc-2.13-4-x86_64.pkg.tar.xz",
+ "depends" : ("linux-api-headers>=2.6.37","tzdata",),}
+ example_package_list[2].package_info={ "name" : "",
+ "version" : "2.2.26",
+ "release" : "1",
+ "arch" : "x86_64",
+ "license" : False,
+ "location": "",
+ "depends" : False,}
+
+
+if __name__ == "__main__":
+ unittest.main()
+